1
0
mirror of synced 2025-12-19 18:14:56 -05:00

Live tests: add validation tests (#38711)

This commit is contained in:
Catherine Noll
2024-05-31 11:34:49 -04:00
committed by GitHub
parent ba6ba8ee28
commit cf26fa3008
25 changed files with 2223 additions and 63 deletions

View File

@@ -152,6 +152,46 @@ files = [
[package.dependencies]
frozenlist = ">=1.1.0"
[[package]]
name = "airbyte-cdk"
version = "1.1.3"
description = "A framework for writing Airbyte Connectors."
optional = false
python-versions = "<4.0,>=3.9"
files = [
{file = "airbyte_cdk-1.1.3-py3-none-any.whl", hash = "sha256:d72c8a26ed41dac11b2b945b98dd81fb868f31bed150c5a2495c2dd68c61df86"},
{file = "airbyte_cdk-1.1.3.tar.gz", hash = "sha256:8d2a331a4a61f7d7ec1ff5ba76ca5d4fd70c2e24146e4b12673568c08484dece"},
]
[package.dependencies]
airbyte-protocol-models = ">=0.9.0,<1.0"
backoff = "*"
cachetools = "*"
cryptography = ">=42.0.5,<43.0.0"
Deprecated = ">=1.2,<1.3"
dpath = ">=2.1.6,<3.0.0"
genson = "1.2.2"
isodate = ">=0.6.1,<0.7.0"
Jinja2 = ">=3.1.2,<3.2.0"
jsonref = ">=0.2,<0.3"
jsonschema = ">=3.2.0,<3.3.0"
langchain_core = "0.1.42"
pendulum = "<3.0.0"
pydantic = ">=1.10.8,<2.0.0"
pyjwt = ">=2.8.0,<3.0.0"
pyrate-limiter = ">=3.1.0,<3.2.0"
python-dateutil = "*"
pytz = "2024.1"
PyYAML = ">=6.0.1,<7.0.0"
requests = "*"
requests_cache = "*"
wcmatch = "8.4"
[package.extras]
file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"]
sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"]
vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"]
[[package]]
name = "airbyte-protocol-models"
version = "0.11.0"
@@ -179,13 +219,13 @@ files = [
[[package]]
name = "anyio"
version = "4.3.0"
version = "4.4.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
{file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
{file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
{file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
{file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
]
[package.dependencies]
@@ -342,6 +382,17 @@ files = [
{file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"},
]
[[package]]
name = "bracex"
version = "2.4"
description = "Bash style brace expander."
optional = false
python-versions = ">=3.8"
files = [
{file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"},
{file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"},
]
[[package]]
name = "brotli"
version = "1.1.0"
@@ -721,7 +772,7 @@ tqdm = "^4.66.2"
type = "git"
url = "git@github.com:airbytehq/airbyte-platform-internal"
reference = "HEAD"
resolved_reference = "a9ff6a91f11d799ff87f99483f7d2a678548b87a"
resolved_reference = "7c886731fcf100bfdb0f57ce4c14dafb121ba263"
subdirectory = "tools/connection-retriever"
[[package]]
@@ -817,6 +868,17 @@ packaging = ">=17.0"
pandas = ">=0.24.2"
pyarrow = ">=3.0.0"
[[package]]
name = "decorator"
version = "5.1.1"
description = "Decorators for Humans"
optional = false
python-versions = ">=3.5"
files = [
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "deepdiff"
version = "6.7.1"
@@ -835,6 +897,23 @@ ordered-set = ">=4.0.2,<4.2.0"
cli = ["click (==8.1.3)", "pyyaml (==6.0.1)"]
optimize = ["orjson"]
[[package]]
name = "deprecated"
version = "1.2.14"
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
{file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
]
[package.dependencies]
wrapt = ">=1.10,<2"
[package.extras]
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
[[package]]
name = "docker"
version = "6.1.3"
@@ -1061,13 +1140,12 @@ files = [
[[package]]
name = "genson"
version = "1.3.0"
version = "1.2.2"
description = "GenSON is a powerful, user-friendly JSON Schema generator."
optional = false
python-versions = "*"
files = [
{file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"},
{file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"},
{file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"},
]
[[package]]
@@ -1085,12 +1163,12 @@ files = [
google-auth = ">=2.14.1,<3.0.dev0"
googleapis-common-protos = ">=1.56.2,<2.0.dev0"
grpcio = [
{version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
{version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
{version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
]
grpcio-status = [
{version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
{version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
{version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
]
proto-plus = ">=1.22.3,<2.0.0dev"
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
@@ -1260,8 +1338,8 @@ google-cloud-audit-log = ">=0.1.0,<1.0.0dev"
google-cloud-core = ">=2.0.0,<3.0.0dev"
grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
proto-plus = [
{version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
{version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
{version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
]
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
@@ -1730,6 +1808,20 @@ blessed = ">=1.19.0"
editor = ">=1.6.0"
readchar = ">=3.0.6"
[[package]]
name = "isodate"
version = "0.6.1"
description = "An ISO 8601 date/time/duration parser and formatter"
optional = false
python-versions = "*"
files = [
{file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"},
{file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"},
]
[package.dependencies]
six = "*"
[[package]]
name = "itsdangerous"
version = "2.2.0"
@@ -1772,6 +1864,63 @@ files = [
[package.dependencies]
ansicon = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "jsonpatch"
version = "1.33"
description = "Apply JSON-Patches (RFC 6902)"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
files = [
{file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"},
{file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"},
]
[package.dependencies]
jsonpointer = ">=1.9"
[[package]]
name = "jsonpointer"
version = "2.4"
description = "Identify specific nodes in a JSON document (RFC 6901)"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
files = [
{file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"},
{file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"},
]
[[package]]
name = "jsonref"
version = "0.2"
description = "An implementation of JSON Reference for Python"
optional = false
python-versions = "*"
files = [
{file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"},
{file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"},
]
[[package]]
name = "jsonschema"
version = "3.2.0"
description = "An implementation of JSON Schema validation for Python"
optional = false
python-versions = "*"
files = [
{file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"},
{file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"},
]
[package.dependencies]
attrs = ">=17.4.0"
pyrsistent = ">=0.14.0"
setuptools = "*"
six = ">=1.11.0"
[package.extras]
format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"]
format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"]
[[package]]
name = "kaitaistruct"
version = "0.10"
@@ -1783,6 +1932,44 @@ files = [
{file = "kaitaistruct-0.10.tar.gz", hash = "sha256:a044dee29173d6afbacf27bcac39daf89b654dd418cfa009ab82d9178a9ae52a"},
]
[[package]]
name = "langchain-core"
version = "0.1.42"
description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"},
{file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"},
]
[package.dependencies]
jsonpatch = ">=1.33,<2.0"
langsmith = ">=0.1.0,<0.2.0"
packaging = ">=23.2,<24.0"
pydantic = ">=1,<3"
PyYAML = ">=5.3"
tenacity = ">=8.1.0,<9.0.0"
[package.extras]
extended-testing = ["jinja2 (>=3,<4)"]
[[package]]
name = "langsmith"
version = "0.1.65"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langsmith-0.1.65-py3-none-any.whl", hash = "sha256:ab4487029240e69cca30da1065f1e9138e5a7ca2bbe8c697f0bd7d5839f71cf7"},
{file = "langsmith-0.1.65.tar.gz", hash = "sha256:d3c2eb2391478bd79989f02652cf66e29a7959d677614b6993a47cef43f7f43b"},
]
[package.dependencies]
orjson = ">=3.9.14,<4.0.0"
pydantic = ">=1,<3"
requests = ">=2,<3"
[[package]]
name = "ldap3"
version = "2.9.1"
@@ -2278,15 +2465,70 @@ files = [
[package.extras]
dev = ["black", "mypy", "pytest"]
[[package]]
name = "orjson"
version = "3.10.3"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
python-versions = ">=3.8"
files = [
{file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"},
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"},
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"},
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"},
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"},
{file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"},
{file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"},
{file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"},
{file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"},
{file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"},
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"},
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"},
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"},
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"},
{file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"},
{file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"},
{file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"},
{file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"},
{file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"},
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"},
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"},
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"},
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"},
{file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"},
{file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"},
{file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"},
{file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"},
{file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"},
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"},
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"},
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"},
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"},
{file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"},
{file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"},
{file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"},
{file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"},
{file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"},
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"},
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"},
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"},
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"},
{file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"},
{file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"},
{file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"},
{file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"},
{file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"},
]
[[package]]
name = "packaging"
version = "24.0"
version = "23.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
{file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]]
@@ -2327,8 +2569,8 @@ files = [
[package.dependencies]
numpy = [
{version = ">=1.22.4", markers = "python_version < \"3.11\""},
{version = ">=1.23.2", markers = "python_version == \"3.11\""},
{version = ">=1.22.4", markers = "python_version < \"3.11\""},
]
python-dateutil = ">=2.8.2"
pytz = ">=2020.1"
@@ -2419,6 +2661,40 @@ bcrypt = ["bcrypt (>=3.1.0)"]
build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"]
totp = ["cryptography"]
[[package]]
name = "pendulum"
version = "2.1.2"
description = "Python datetimes made easy"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
files = [
{file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"},
{file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"},
{file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"},
{file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"},
{file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"},
{file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"},
{file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"},
{file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"},
{file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"},
{file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"},
{file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"},
{file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"},
{file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"},
{file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"},
{file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"},
{file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"},
{file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"},
{file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"},
{file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"},
{file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"},
{file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"},
]
[package.dependencies]
python-dateutil = ">=2.6,<3.0"
pytzdata = ">=2020.1"
[[package]]
name = "pg8000"
version = "1.31.2"
@@ -2797,6 +3073,62 @@ files = [
{file = "pyperclip-1.8.2.tar.gz", hash = "sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57"},
]
[[package]]
name = "pyrate-limiter"
version = "3.1.1"
description = "Python Rate-Limiter using Leaky-Bucket Algorithm"
optional = false
python-versions = ">=3.8,<4.0"
files = [
{file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"},
{file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"},
]
[package.extras]
all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"]
docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"]
[[package]]
name = "pyrsistent"
version = "0.20.0"
description = "Persistent/Functional/Immutable data structures"
optional = false
python-versions = ">=3.8"
files = [
{file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"},
{file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"},
{file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"},
{file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"},
{file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"},
{file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"},
{file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"},
{file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"},
{file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"},
{file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"},
{file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"},
{file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"},
{file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"},
{file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"},
{file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"},
{file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"},
{file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"},
{file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"},
{file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"},
{file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"},
{file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"},
{file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"},
{file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"},
{file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"},
{file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"},
{file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"},
{file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"},
{file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"},
{file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"},
{file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"},
{file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"},
{file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"},
]
[[package]]
name = "pytest"
version = "8.2.1"
@@ -2912,6 +3244,17 @@ files = [
{file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
]
[[package]]
name = "pytzdata"
version = "2020.1"
description = "The Olson timezone database for Python."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"},
{file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"},
]
[[package]]
name = "pywin32"
version = "306"
@@ -3027,6 +3370,36 @@ urllib3 = ">=1.21.1,<3"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-cache"
version = "1.2.0"
description = "A persistent cache for python requests"
optional = false
python-versions = ">=3.8"
files = [
{file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"},
{file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"},
]
[package.dependencies]
attrs = ">=21.2"
cattrs = ">=22.2"
platformdirs = ">=2.5"
requests = ">=2.22"
url-normalize = ">=1.4"
urllib3 = ">=1.25.5"
[package.extras]
all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"]
bson = ["bson (>=0.5)"]
docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"]
dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"]
json = ["ujson (>=5.4)"]
mongodb = ["pymongo (>=3)"]
redis = ["redis (>=3)"]
security = ["itsdangerous (>=2.0)"]
yaml = ["pyyaml (>=6.0.1)"]
[[package]]
name = "requests-oauthlib"
version = "2.0.0"
@@ -3387,6 +3760,21 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3_binary"]
[[package]]
name = "tenacity"
version = "8.3.0"
description = "Retry code until it succeeds"
optional = false
python-versions = ">=3.8"
files = [
{file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"},
{file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"},
]
[package.extras]
doc = ["reno", "sphinx"]
test = ["pytest", "tornado (>=4.5)", "typeguard"]
[[package]]
name = "termcolor"
version = "2.4.0"
@@ -3498,13 +3886,13 @@ files = [
[[package]]
name = "types-requests"
version = "2.32.0.20240521"
version = "2.32.0.20240523"
description = "Typing stubs for requests"
optional = false
python-versions = ">=3.8"
files = [
{file = "types-requests-2.32.0.20240521.tar.gz", hash = "sha256:c5c4a0ae95aad51f1bf6dae9eed04a78f7f2575d4b171da37b622e08b93eb5d3"},
{file = "types_requests-2.32.0.20240521-py3-none-any.whl", hash = "sha256:ab728ba43ffb073db31f21202ecb97db8753ded4a9dc49cb480d8a5350c5c421"},
{file = "types-requests-2.32.0.20240523.tar.gz", hash = "sha256:26b8a6de32d9f561192b9942b41c0ab2d8010df5677ca8aa146289d11d505f57"},
{file = "types_requests-2.32.0.20240523-py3-none-any.whl", hash = "sha256:f19ed0e2daa74302069bbbbf9e82902854ffa780bc790742a810a9aaa52f65ec"},
]
[package.dependencies]
@@ -3512,13 +3900,13 @@ urllib3 = ">=2"
[[package]]
name = "typing-extensions"
version = "4.11.0"
version = "4.12.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
{file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
{file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"},
{file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"},
]
[[package]]
@@ -3532,6 +3920,20 @@ files = [
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
]
[[package]]
name = "url-normalize"
version = "1.4.3"
description = "URL normalization for Python"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
{file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"},
{file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"},
]
[package.dependencies]
six = "*"
[[package]]
name = "urllib3"
version = "2.2.1"
@@ -3565,6 +3967,20 @@ files = [
{file = "urwid_mitmproxy-2.1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:d2d536ad412022365b5e1974cde9029b86cfc30f3960ae073f959630f0c27c21"},
]
[[package]]
name = "wcmatch"
version = "8.4"
description = "Wildcard/glob file name matcher."
optional = false
python-versions = ">=3.7"
files = [
{file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"},
{file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"},
]
[package.dependencies]
bracex = ">=2.1.1"
[[package]]
name = "wcwidth"
version = "0.2.13"
@@ -3609,6 +4025,85 @@ MarkupSafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog (>=2.3)"]
[[package]]
name = "wrapt"
version = "1.16.0"
description = "Module for decorators, wrappers and monkey patching."
optional = false
python-versions = ">=3.6"
files = [
{file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
{file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
{file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"},
{file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"},
{file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"},
{file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"},
{file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"},
{file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"},
{file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"},
{file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"},
{file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"},
{file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"},
{file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"},
{file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"},
{file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"},
{file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"},
{file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"},
{file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"},
{file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"},
{file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"},
{file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"},
{file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"},
{file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"},
{file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"},
{file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"},
{file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"},
{file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"},
{file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"},
{file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"},
{file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"},
{file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"},
{file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"},
{file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"},
{file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"},
{file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"},
{file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"},
{file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"},
{file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"},
{file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"},
{file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"},
{file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"},
{file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"},
{file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"},
{file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"},
{file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"},
{file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"},
{file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"},
{file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"},
{file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"},
{file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"},
{file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"},
{file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"},
{file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"},
{file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"},
{file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"},
{file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"},
{file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"},
{file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"},
{file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"},
{file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"},
{file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"},
{file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"},
{file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"},
{file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"},
{file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"},
{file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"},
{file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"},
{file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"},
{file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"},
{file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"},
]
[[package]]
name = "wsproto"
version = "1.2.0"
@@ -3801,4 +4296,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10,<3.12"
content-hash = "02d813aa0511a7a4d2ebcec7ac1f430d15b10cfd5e268b357374b86db8ae7661"
content-hash = "5a9b833a4fd53cd81d5d767ea99a2ca694c421c8b0e85cbd551e98b10ec3cd5a"

View File

@@ -16,10 +16,13 @@ packages = [
[tool.poetry.dependencies]
python = "^3.10,<3.12"
airbyte-cdk = "*"
airbyte-protocol-models = "<1.0.0"
cachetools = "~=5.3.3"
dagger-io = "==0.9.6"
decorator = ">=5.1.1"
deepdiff = "6.7.1"
jsonschema = "*"
pydantic = "*"
pytest-asyncio = "~=0.23.5"
pytest = "^8.1.1"
@@ -35,7 +38,7 @@ asyncer = "^0.0.5"
rich = "^13.7.1"
mitmproxy = "^10.2.4"
requests = "<=2.31.1" # Pinned due to this issue https://github.com/docker/docker-py/issues/3256#issuecomment-2127688011
pyyaml = "^6.0.1"
pyyaml = "~=6.0.1"
dpath = "^2.1.6"
genson = "^1.2.2"
segment-analytics-python = "^2.3.2"

View File

@@ -0,0 +1,25 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
from __future__ import annotations
from enum import Enum
class TestEvaluationMode(Enum):
"""
Tests may be run in "diagnostic" mode or "strict" mode.
When run in "diagnostic" mode, `AssertionError`s won't fail the test, but we will continue to surface
any errors to the test report.
In "strict" mode, tests pass/fail as usual.
In live tests, diagnostic mode is used for tests that don't affect the overall functionality of the
connector but that test an ideal state of the connector. Currently this is applicable to validation
tests only.
The diagnostic mode can be made available to a test using the @pytest.mark.allow_diagnostic_mode decorator,
and passing in the --validation-test-mode=diagnostic flag.
"""
DIAGNOSTIC = "diagnostic"
STRICT = "strict"

View File

@@ -0,0 +1,265 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from functools import reduce
from typing import Any, Dict, List, Mapping, Optional, Set, Text, Union
import dpath.util
import pendulum
from jsonref import JsonRef
class CatalogField:
"""Field class to represent cursor/pk fields.
It eases the read of values from records according to schema definition.
"""
def __init__(self, schema: Mapping[str, Any], path: List[str]):
self.schema = schema
self.path = path
self.formats = self._detect_formats()
def _detect_formats(self) -> Set[str]:
"""Extract set of formats/types for this field"""
format_ = []
try:
format_ = self.schema.get("format", self.schema["type"])
if not isinstance(format_, List):
format_ = [format_]
except KeyError:
pass
return set(format_)
def _parse_value(self, value: Any) -> Any:
"""Do actual parsing of the serialized value"""
if self.formats.intersection({"datetime", "date-time", "date"}):
if value is None and "null" not in self.formats:
raise ValueError(f"Invalid field format. Value: {value}. Format: {self.formats}")
# handle beautiful MySQL datetime, i.e. NULL datetime
if value.startswith("0000-00-00"):
value = value.replace("0000-00-00", "0001-01-01")
return pendulum.parse(value)
return value
def parse(self, record: Mapping[str, Any], path: Optional[List[Union[int, str]]] = None) -> Any:
"""Extract field value from the record and cast it to native type"""
path = path or self.path
value = reduce(lambda data, key: data[key], path, record)
return self._parse_value(value)
class JsonSchemaHelper:
"""Helper class to simplify schema validation and read of records according to their schema."""
def __init__(self, schema):
self._schema = schema
def get_ref(self, path: str) -> Any:
"""Resolve reference
:param path: reference (#/definitions/SomeClass, etc)
:return: part of schema that is definition of the reference
:raises KeyError: in case path can't be followed
"""
node = self._schema
for segment in path.split("/")[1:]:
node = node[segment]
return node
def get_property(self, path: List[str]) -> Mapping[str, Any]:
"""Get any part of schema according to provided path, resolves $refs if necessary
schema = {
"properties": {
"field1": {
"properties": {
"nested_field": {
<inner_object>
}
}
},
"field2": ...
}
}
helper = JsonSchemaHelper(schema)
helper.get_property(["field1", "nested_field"]) == <inner_object>
:param path: list of fields in the order of navigation
:return: discovered part of schema
:raises KeyError: in case path can't be followed
"""
node = self._schema
for segment in path:
if "$ref" in node:
node = self.get_ref(node["$ref"])
node = node["properties"][segment]
return node
def field(self, path: List[str]) -> CatalogField:
"""Get schema property and wrap it into CatalogField.
CatalogField is a helper to ease the read of values from records according to schema definition.
:param path: list of fields in the order of navigation
:return: discovered part of schema wrapped in CatalogField
:raises KeyError: in case path can't be followed
"""
return CatalogField(schema=self.get_property(path), path=path)
def get_node(self, path: List[Union[str, int]]) -> Any:
"""Return part of schema by specified path
:param path: list of fields in the order of navigation
"""
node = self._schema
for segment in path:
if "$ref" in node:
node = self.get_ref(node["$ref"])
node = node[segment]
return node
def get_parent_path(self, path: str, separator="/") -> Any:
"""
Returns the parent path of the supplied path
"""
absolute_path = f"{separator}{path}" if not path.startswith(separator) else path
parent_path, _ = absolute_path.rsplit(sep=separator, maxsplit=1)
return parent_path
def get_parent(self, path: str, separator="/") -> Any:
"""
Returns the parent dict of a given path within the `obj` dict
"""
parent_path = self.get_parent_path(path, separator=separator)
if parent_path == "":
return self._schema
return dpath.util.get(self._schema, parent_path, separator=separator)
def find_nodes(self, keys: List[str]) -> List[List[Union[str, int]]]:
"""Find all paths that lead to nodes with the specified keys.
:param keys: list of keys
:return: list of json object paths
"""
variant_paths = []
def traverse_schema(_schema: Union[Dict[Text, Any], List], path=None):
path = path or []
if path and path[-1] in keys:
variant_paths.append(path)
if isinstance(_schema, dict):
for item in _schema:
traverse_schema(_schema[item], [*path, item])
elif isinstance(_schema, list):
for i, item in enumerate(_schema):
traverse_schema(_schema[i], [*path, i])
traverse_schema(self._schema)
return variant_paths
def get_object_structure(obj: dict) -> List[str]:
"""
Traverse through object structure and compose a list of property keys including nested one.
This list reflects object's structure with list of all obj property key
paths. In case if object is nested inside array we assume that it has same
structure as first element.
:param obj: data object to get its structure
:returns list of object property keys paths
"""
paths = []
def _traverse_obj_and_get_path(obj, path=""):
if path:
paths.append(path)
if isinstance(obj, dict):
return {k: _traverse_obj_and_get_path(v, path + "/" + k) for k, v in obj.items()}
elif isinstance(obj, list) and len(obj) > 0:
return [_traverse_obj_and_get_path(obj[0], path + "/[]")]
_traverse_obj_and_get_path(obj)
return paths
def get_expected_schema_structure(schema: dict, annotate_one_of: bool = False) -> List[str]:
"""
Traverse through json schema and compose list of property keys that object expected to have.
:param annotate_one_of: Generate one_of index in path
:param schema: jsonschema to get expected paths
:returns list of object property keys paths
"""
paths = []
if "$ref" in schema:
"""
JsonRef doesnt work correctly with schemas that has refenreces in root e.g.
{
"$ref": "#/definitions/ref"
"definitions": {
"ref": ...
}
}
Considering this schema already processed by resolver so it should
contain only references to definitions section, replace root reference
manually before processing it with JsonRef library.
"""
ref = schema["$ref"].split("/")[-1]
schema.update(schema["definitions"][ref])
schema.pop("$ref")
# Resolve all references to simplify schema processing.
schema = JsonRef.replace_refs(schema)
def _scan_schema(subschema, path=""):
if "oneOf" in subschema or "anyOf" in subschema:
if annotate_one_of:
return [
_scan_schema({"type": "object", **s}, path + f"({num})")
for num, s in enumerate(subschema.get("oneOf") or subschema.get("anyOf"))
]
return [_scan_schema({"type": "object", **s}, path) for s in subschema.get("oneOf") or subschema.get("anyOf")]
schema_type = subschema.get("type", ["object", "null"])
if not isinstance(schema_type, list):
schema_type = [schema_type]
if "object" in schema_type:
props = subschema.get("properties")
if not props:
# Handle objects with arbitrary properties:
# {"type": "object", "additionalProperties": {"type": "string"}}
if path:
paths.append(path)
return
return {k: _scan_schema(v, path + "/" + k) for k, v in props.items()}
elif "array" in schema_type:
items = subschema.get("items", {})
return [_scan_schema(items, path + "/[]")]
paths.append(path)
_scan_schema(schema)
return paths
def flatten_tuples(to_flatten):
"""Flatten a tuple of tuples into a single tuple."""
types = set()
if not isinstance(to_flatten, tuple):
to_flatten = (to_flatten,)
for thing in to_flatten:
if isinstance(thing, tuple):
types.update(flatten_tuples(thing))
else:
types.add(thing)
return tuple(types)
def get_paths_in_connector_config(schema: dict) -> List[str]:
"""
Traverse through the provided schema's values and extract the path_in_connector_config paths
:param properties: jsonschema containing values which may have path_in_connector_config attributes
:returns list of path_in_connector_config paths
"""
return ["/" + "/".join(value["path_in_connector_config"]) for value in schema.values()]

View File

@@ -9,14 +9,17 @@ from collections.abc import Iterable, Iterator, MutableMapping
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from typing import Any, Optional
from typing import Any, Dict, List, Optional
import _collections_abc
import dagger
import requests
from airbyte_protocol.models import AirbyteCatalog # type: ignore
from airbyte_protocol.models import AirbyteMessage # type: ignore
from airbyte_protocol.models import AirbyteStateMessage # type: ignore
from airbyte_protocol.models import AirbyteStreamStatusTraceMessage # type: ignore
from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore
from airbyte_protocol.models import TraceType # type: ignore
from airbyte_protocol.models import Type as AirbyteMessageType
from genson import SchemaBuilder # type: ignore
from live_tests.commons.backends import DuckDbBackend, FileBackend
@@ -329,6 +332,22 @@ class ExecutionResult:
if message.type is AirbyteMessageType.RECORD:
yield message
def get_states_per_stream(self, stream: str) -> Dict[str, List[AirbyteStateMessage]]:
self.logger.info(f"Reading state messages for stream {stream}")
states = defaultdict(list)
for message in self.airbyte_messages:
if message.type is AirbyteMessageType.STATE:
states[message.state.stream.stream_descriptor.name].append(message.state)
return states
def get_status_messages_per_stream(self, stream: str) -> Dict[str, List[AirbyteStreamStatusTraceMessage]]:
self.logger.info(f"Reading state messages for stream {stream}")
statuses = defaultdict(list)
for message in self.airbyte_messages:
if message.type is AirbyteMessageType.TRACE and message.trace.type == TraceType.STREAM_STATUS:
statuses[message.trace.stream_status.stream_descriptor.name].append(message.trace.stream_status)
return statuses
def get_message_count_per_type(self) -> dict[AirbyteMessageType, int]:
message_count: dict[AirbyteMessageType, int] = defaultdict(int)
for message in self.airbyte_messages:

View File

@@ -12,11 +12,13 @@ from typing import TYPE_CHECKING, Optional
import dagger
import pytest
from airbyte_protocol.models import ConfiguredAirbyteCatalog # type: ignore
from airbyte_protocol.models import AirbyteCatalog, AirbyteStateMessage, ConfiguredAirbyteCatalog, ConnectorSpecification # type: ignore
from connection_retriever.audit_logging import get_user_email # type: ignore
from connection_retriever.retrieval import ConnectionNotFoundError, NotPermittedError # type: ignore
from live_tests import stash_keys
from live_tests.commons.connection_objects_retrieval import ConnectionObject, get_connection_objects
from live_tests.commons.connector_runner import ConnectorRunner, Proxy
from live_tests.commons.evaluation_modes import TestEvaluationMode
from live_tests.commons.models import (
ActorType,
Command,
@@ -30,26 +32,25 @@ from live_tests.commons.models import (
from live_tests.commons.secret_access import get_airbyte_api_key
from live_tests.commons.segment_tracking import track_usage
from live_tests.commons.utils import build_connection_url, clean_up_artifacts
from live_tests.regression_tests import stash_keys
from live_tests.report import Report, ReportState
from live_tests.utils import get_catalog, get_spec
from rich.prompt import Confirm, Prompt
from .report import Report, ReportState
if TYPE_CHECKING:
from _pytest.config import Config
from _pytest.config.argparsing import Parser
from _pytest.fixtures import SubRequest
from pytest_sugar import SugarTerminalReporter # type: ignore
## CONSTS
LOGGER = logging.getLogger("regression_tests")
# CONSTS
LOGGER = logging.getLogger("regression")
MAIN_OUTPUT_DIRECTORY = Path("/tmp/regression_tests_artifacts")
# It's used by Dagger and its very verbose
logging.getLogger("httpx").setLevel(logging.ERROR)
## PYTEST HOOKS
# PYTEST HOOKS
def pytest_addoption(parser: Parser) -> None:
parser.addoption(
"--connector-image",
@@ -83,6 +84,12 @@ def pytest_addoption(parser: Parser) -> None:
"We recommend reading with state to properly test incremental sync. \n"
"But if the target version introduces a breaking change in the state, you might want to run without state. \n",
)
parser.addoption(
"--test-evaluation-mode",
choices=[e.value for e in TestEvaluationMode],
default=TestEvaluationMode.STRICT.value,
help='If "diagnostic" mode is selected, all tests will pass as long as there is no exception; warnings will be logged. In "strict" mode, tests may fail.',
)
def pytest_configure(config: Config) -> None:
@@ -124,6 +131,7 @@ def pytest_configure(config: Config) -> None:
custom_configured_catalog_path = config.getoption("--catalog-path")
custom_state_path = config.getoption("--state-path")
config.stash[stash_keys.SELECTED_STREAMS] = set(config.getoption("--stream") or [])
config.stash[stash_keys.TEST_EVALUATION_MODE] = TestEvaluationMode(config.getoption("--test-evaluation-mode", "strict"))
if config.stash[stash_keys.RUN_IN_AIRBYTE_CI]:
config.stash[stash_keys.SHOULD_READ_WITH_STATE] = bool(get_option_or_fail(config, "--should-read-with-state"))
@@ -234,6 +242,17 @@ def pytest_keyboard_interrupt(excinfo: Exception) -> None:
def pytest_runtest_makereport(item: pytest.Item, call: pytest.CallInfo) -> Generator:
outcome = yield
report = outcome.get_result()
# Overwrite test failures with passes for tests being run in diagnostic mode
if (
item.config.stash.get(stash_keys.TEST_EVALUATION_MODE, TestEvaluationMode.STRICT) == TestEvaluationMode.DIAGNOSTIC
and "allow_diagnostic_mode" in item.keywords
):
if call.when == "call":
if call.excinfo:
if report.outcome == "failed":
report.outcome = "passed"
# This is to add skipped or failed tests due to upstream fixture failures on setup
if report.outcome in ["failed", "skipped"] or report.when == "call":
item.config.stash[stash_keys.REPORT].add_test_result(
@@ -242,7 +261,7 @@ def pytest_runtest_makereport(item: pytest.Item, call: pytest.CallInfo) -> Gener
)
## HELPERS
# HELPERS
def get_option_or_fail(config: pytest.Config, option: str) -> str:
@@ -288,7 +307,7 @@ def prompt_for_read_with_or_without_state() -> bool:
return Prompt.ask(message) == "1"
## FIXTURES
# FIXTURES
@pytest.fixture(scope="session")
@@ -354,6 +373,16 @@ def configured_catalog(connection_objects: ConnectionObjects, selected_streams:
return connection_objects.configured_catalog
@pytest.fixture(scope="session")
def target_discovered_catalog(discover_target_execution_result: ExecutionResult) -> AirbyteCatalog:
return get_catalog(discover_target_execution_result)
@pytest.fixture(scope="session")
def target_spec(spec_target_execution_result: ExecutionResult) -> ConnectorSpecification:
return get_spec(spec_target_execution_result)
@pytest.fixture(scope="session", autouse=True)
def primary_keys_per_stream(
configured_catalog: ConfiguredAirbyteCatalog,

View File

@@ -4,5 +4,6 @@ console_output_style = progress
log_cli = True
log_cli_level= INFO
markers =
allow_diagnostic_mode: mark a test as eligible for diagnostic mode.
with_state: mark test as running a read command with state.
without_state: mark test as running a read command without state.

View File

@@ -6,9 +6,8 @@ from collections.abc import Callable
import pytest
from airbyte_protocol.models import Status, Type # type: ignore
from live_tests.commons.models import ExecutionResult
from live_tests.regression_tests.consts import MAX_LINES_IN_REPORT
from .utils import fail_test_on_failing_execution_results, tail_file
from live_tests.consts import MAX_LINES_IN_REPORT
from live_tests.utils import fail_test_on_failing_execution_results, is_successful_check, tail_file
pytestmark = [
pytest.mark.anyio,
@@ -32,12 +31,6 @@ async def test_check_passes_on_both_versions(
],
)
def is_successful_check(execution_result: ExecutionResult) -> bool:
for message in execution_result.airbyte_messages:
if message.type is Type.CONNECTION_STATUS and message.connectionStatus.status is Status.SUCCEEDED:
return True
return False
successful_control_check: bool = is_successful_check(check_control_execution_result)
successful_target_check: bool = is_successful_check(check_target_execution_result)
error_messages = []

View File

@@ -8,8 +8,7 @@ import pytest
from _pytest.fixtures import SubRequest
from airbyte_protocol.models import AirbyteCatalog, AirbyteStream, Type # type: ignore
from live_tests.commons.models import ExecutionResult
from .utils import fail_test_on_failing_execution_results, get_and_write_diff
from live_tests.utils import fail_test_on_failing_execution_results, get_and_write_diff, get_catalog
pytestmark = [
pytest.mark.anyio,
@@ -34,12 +33,6 @@ async def test_catalog_are_the_same(
],
)
def get_catalog(execution_result: ExecutionResult) -> AirbyteCatalog:
for message in execution_result.airbyte_messages:
if message.type is Type.CATALOG and message.catalog:
return message.catalog
return None
control_catalog = get_catalog(discover_control_execution_result)
target_catalog = get_catalog(discover_target_execution_result)

View File

@@ -9,8 +9,7 @@ import pytest
from airbyte_protocol.models import AirbyteMessage # type: ignore
from deepdiff import DeepDiff # type: ignore
from live_tests.commons.models import ExecutionResult
from .utils import fail_test_on_failing_execution_results, get_and_write_diff, get_test_logger, write_string_to_test_artifact
from live_tests.utils import fail_test_on_failing_execution_results, get_and_write_diff, get_test_logger, write_string_to_test_artifact
if TYPE_CHECKING:
from _pytest.fixtures import SubRequest
@@ -400,6 +399,7 @@ class TestDataIntegrity:
read_target_execution_result,
)
@pytest.mark.allow_diagnostic_mode
@pytest.mark.with_state()
async def test_all_records_are_the_same_with_state(
self,
@@ -431,6 +431,7 @@ class TestDataIntegrity:
read_with_state_target_execution_result,
)
@pytest.mark.allow_diagnostic_mode
@pytest.mark.without_state()
async def test_all_records_are_the_same_without_state(
self,

View File

@@ -6,8 +6,7 @@ from collections.abc import Callable
import pytest
from airbyte_protocol.models import Type # type: ignore
from live_tests.commons.models import ExecutionResult
from .utils import fail_test_on_failing_execution_results
from live_tests.utils import fail_test_on_failing_execution_results
pytestmark = [
pytest.mark.anyio,

View File

@@ -14,9 +14,8 @@ from typing import TYPE_CHECKING, Any, Optional
import requests
import yaml
from jinja2 import Environment, PackageLoader, select_autoescape
from live_tests.regression_tests import stash_keys
from .consts import MAX_LINES_IN_REPORT
from live_tests import stash_keys
from live_tests.consts import MAX_LINES_IN_REPORT
if TYPE_CHECKING:
import pytest

View File

@@ -4,8 +4,9 @@ from __future__ import annotations
from pathlib import Path
import pytest
from live_tests.commons.evaluation_modes import TestEvaluationMode
from live_tests.commons.models import ConnectionObjects
from live_tests.regression_tests.report import Report
from live_tests.report import Report
AIRBYTE_API_KEY = pytest.StashKey[str]()
AUTO_SELECT_CONNECTION = pytest.StashKey[bool]()
@@ -30,3 +31,4 @@ TARGET_VERSION = pytest.StashKey[str]()
TEST_ARTIFACT_DIRECTORY = pytest.StashKey[Path]()
USER = pytest.StashKey[str]()
WORKSPACE_ID = pytest.StashKey[str]()
TEST_EVALUATION_MODE = pytest.StashKey[TestEvaluationMode]

View File

@@ -7,13 +7,15 @@ from collections.abc import Callable, Iterable
from pathlib import Path
from typing import TYPE_CHECKING, Optional, Union
import docker # type: ignore
import pytest
from airbyte_protocol.models import AirbyteMessage, Type # type: ignore
from airbyte_protocol.models import AirbyteCatalog, AirbyteMessage, ConnectorSpecification, Status, Type # type: ignore
from deepdiff import DeepDiff # type: ignore
from live_tests import stash_keys
from live_tests.commons.models import ExecutionResult
from . import stash_keys
from .consts import MAX_LINES_IN_REPORT
from live_tests.consts import MAX_LINES_IN_REPORT
from mitmproxy import http, io # type: ignore
from mitmproxy.addons.savehar import SaveHar # type: ignore
if TYPE_CHECKING:
from _pytest.fixtures import SubRequest
@@ -122,3 +124,39 @@ def tail_file(file_path: Path, n: int = MAX_LINES_IN_REPORT) -> list[str]:
# Return the last n lines
return lines[-n:]
def is_successful_check(execution_result: ExecutionResult) -> bool:
for message in execution_result.airbyte_messages:
if message.type is Type.CONNECTION_STATUS and message.connectionStatus.status is Status.SUCCEEDED:
return True
return False
def get_catalog(execution_result: ExecutionResult) -> AirbyteCatalog:
catalog = [m.catalog for m in execution_result.airbyte_messages if m.type is Type.CATALOG and m.catalog]
try:
return catalog[0]
except ValueError:
raise ValueError(f"Expected exactly one catalog in the execution result, but got {len(catalog)}.")
def get_spec(execution_result: ExecutionResult) -> ConnectorSpecification:
spec = [m.spec for m in execution_result.airbyte_messages if m.type is Type.SPEC]
try:
return spec[0]
except ValueError:
raise ValueError(f"Expected exactly one spec in the execution result, but got {len(spec)}.")
def find_all_values_for_key_in_schema(schema: dict, searched_key: str):
"""Retrieve all (nested) values in a schema for a specific searched key"""
if isinstance(schema, list):
for schema_item in schema:
yield from find_all_values_for_key_in_schema(schema_item, searched_key)
if isinstance(schema, dict):
for key, value in schema.items():
if key == searched_key:
yield value
if isinstance(value, dict) or isinstance(value, list):
yield from find_all_values_for_key_in_schema(value, searched_key)

View File

@@ -0,0 +1,43 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from typing import Callable
import pytest
from airbyte_protocol.models import Type
from live_tests.commons.models import ExecutionResult
from live_tests.consts import MAX_LINES_IN_REPORT
from live_tests.utils import fail_test_on_failing_execution_results, is_successful_check, tail_file
pytestmark = [
pytest.mark.anyio,
]
@pytest.mark.allow_diagnostic_mode
async def test_check_succeeds(
record_property: Callable,
check_target_execution_result: ExecutionResult,
) -> None:
"""
Verify that the check command succeeds on the target connection.
Success is determined by the presence of a connection status message with a status of SUCCEEDED.
"""
fail_test_on_failing_execution_results(
record_property,
[check_target_execution_result],
)
assert len([msg for msg in check_target_execution_result.airbyte_messages if msg.type == Type.CONNECTION_STATUS]) == 1
successful_target_check: bool = is_successful_check(check_target_execution_result)
error_messages = []
if not successful_target_check:
record_property(
f"Target CHECK standard output [Last {MAX_LINES_IN_REPORT} lines]",
tail_file(check_target_execution_result.stdout_file_path, n=MAX_LINES_IN_REPORT),
)
error_messages.append("The target check did not succeed. Check the test artifacts for more information.")
if error_messages:
pytest.fail("\n".join(error_messages))

View File

@@ -0,0 +1,164 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from typing import TYPE_CHECKING, Callable, List, Union
import dpath.util
import jsonschema
import pytest
from airbyte_protocol.models import AirbyteCatalog
from live_tests.commons.models import ExecutionResult
from live_tests.utils import fail_test_on_failing_execution_results, find_all_values_for_key_in_schema
pytestmark = [
pytest.mark.anyio,
]
@pytest.mark.allow_diagnostic_mode
async def test_discover(
record_property: Callable,
discover_target_execution_result: ExecutionResult,
target_discovered_catalog: AirbyteCatalog,
):
"""
Verify that the discover command succeeds on the target connection.
Success is determined by the presence of a catalog with one or more streams, all with unique names.
"""
fail_test_on_failing_execution_results(
record_property,
[discover_target_execution_result],
)
duplicated_stream_names = _duplicated_stream_names(target_discovered_catalog.streams)
assert target_discovered_catalog is not None, "Message should have catalog"
assert hasattr(target_discovered_catalog, "streams") and target_discovered_catalog.streams, "Catalog should contain streams"
assert len(duplicated_stream_names) == 0, f"Catalog should have uniquely named streams, duplicates are: {duplicated_stream_names}"
def _duplicated_stream_names(streams) -> List[str]:
"""Counts number of times a stream appears in the catalog"""
name_counts = dict()
for stream in streams:
count = name_counts.get(stream.name, 0)
name_counts[stream.name] = count + 1
return [k for k, v in name_counts.items() if v > 1]
@pytest.mark.allow_diagnostic_mode
async def test_streams_have_valid_json_schemas(target_discovered_catalog: AirbyteCatalog):
"""Check if all stream schemas are valid json schemas."""
for stream in target_discovered_catalog.streams:
jsonschema.Draft7Validator.check_schema(stream.json_schema)
@pytest.mark.allow_diagnostic_mode
async def test_defined_cursors_exist_in_schema(target_discovered_catalog: AirbyteCatalog):
"""Check if all of the source defined cursor fields exist on stream's json schema."""
for stream in target_discovered_catalog.streams:
if not stream.default_cursor_field:
continue
schema = stream.json_schema
assert "properties" in schema, f"Top level item should have an 'object' type for {stream.name} stream schema"
cursor_path = "/properties/".join(stream.default_cursor_field)
cursor_field_location = dpath.util.search(schema["properties"], cursor_path)
assert cursor_field_location, (
f"Some of defined cursor fields {stream.default_cursor_field} are not specified in discover schema "
f"properties for {stream.name} stream"
)
@pytest.mark.allow_diagnostic_mode
async def test_defined_refs_exist_in_schema(target_discovered_catalog: AirbyteCatalog):
"""Check the presence of unresolved `$ref`s values within each json schema."""
schemas_errors = []
for stream in target_discovered_catalog.streams:
check_result = list(find_all_values_for_key_in_schema(stream.json_schema, "$ref"))
if check_result:
schemas_errors.append({stream.name: check_result})
assert not schemas_errors, f"Found unresolved `$refs` values for selected streams: {tuple(schemas_errors)}."
@pytest.mark.allow_diagnostic_mode
@pytest.mark.parametrize("keyword", ["allOf", "not"])
async def test_defined_keyword_exist_in_schema(keyword, target_discovered_catalog: AirbyteCatalog):
"""Check for the presence of not allowed keywords within each json schema"""
schemas_errors = []
for stream in target_discovered_catalog.streams:
check_result = _find_keyword_schema(stream.json_schema, key=keyword)
if check_result:
schemas_errors.append(stream.name)
assert not schemas_errors, f"Found not allowed `{keyword}` keyword for selected streams: {schemas_errors}."
def _find_keyword_schema(schema: Union[dict, list, str], key: str) -> bool:
"""Find at least one keyword in a schema, skip object properties"""
def _find_keyword(schema, key, _skip=False):
if isinstance(schema, list):
for v in schema:
_find_keyword(v, key)
elif isinstance(schema, dict):
for k, v in schema.items():
if k == key and not _skip:
raise StopIteration
rec_skip = k == "properties" and schema.get("type") == "object"
_find_keyword(v, key, rec_skip)
try:
_find_keyword(schema, key)
except StopIteration:
return True
return False
@pytest.mark.allow_diagnostic_mode
async def test_primary_keys_exist_in_schema(target_discovered_catalog: AirbyteCatalog):
"""Check that all primary keys are present in catalog."""
for stream in target_discovered_catalog.streams:
for pk in stream.source_defined_primary_key or []:
schema = stream.json_schema
pk_path = "/properties/".join(pk)
pk_field_location = dpath.util.search(schema["properties"], pk_path)
assert pk_field_location, f"One of the PKs ({pk}) is not specified in discover schema for {stream.name} stream"
@pytest.mark.allow_diagnostic_mode
async def test_streams_has_sync_modes(target_discovered_catalog: AirbyteCatalog):
"""Check that the supported_sync_modes is a not empty field in streams of the catalog."""
for stream in target_discovered_catalog.streams:
assert stream.supported_sync_modes is not None, f"The stream {stream.name} is missing supported_sync_modes field declaration."
assert len(stream.supported_sync_modes) > 0, f"supported_sync_modes list on stream {stream.name} should not be empty."
@pytest.mark.allow_diagnostic_mode
async def test_additional_properties_is_true(target_discovered_catalog: AirbyteCatalog):
"""
Check that value of the "additionalProperties" field is always true.
A stream schema declaring "additionalProperties": false introduces the risk of accidental breaking changes.
Specifically, when removing a property from the stream schema, existing connector catalog will no longer be valid.
False value introduces the risk of accidental breaking changes.
Read https://github.com/airbytehq/airbyte/issues/14196 for more details.
"""
for stream in target_discovered_catalog.streams:
additional_properties_values = list(find_all_values_for_key_in_schema(stream.json_schema, "additionalProperties"))
if additional_properties_values:
assert all(
[additional_properties_value is True for additional_properties_value in additional_properties_values]
), "When set, additionalProperties field value must be true for backward compatibility."
@pytest.mark.allow_diagnostic_mode
@pytest.mark.skip("This a placeholder for a CAT which has too many failures. We need to fix the connectors at scale first.")
async def test_catalog_has_supported_data_types(target_discovered_catalog: AirbyteCatalog):
"""
Check that all streams have supported data types, format and airbyte_types.
Supported data types are listed there: https://docs.airbyte.com/understanding-airbyte/supported-data-types/
"""
pass

View File

@@ -0,0 +1,139 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from collections import defaultdict
from functools import reduce
from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Optional, Tuple
import pytest
from airbyte_cdk.sources.file_based.schema_helpers import conforms_to_schema
from airbyte_protocol.models import (
AirbyteStateMessage,
AirbyteStateStats,
AirbyteStateType,
AirbyteStreamStatus,
AirbyteStreamStatusTraceMessage,
ConfiguredAirbyteCatalog,
)
from live_tests.commons.models import ExecutionResult
from live_tests.utils import fail_test_on_failing_execution_results, get_test_logger
if TYPE_CHECKING:
from _pytest.fixtures import SubRequest
pytestmark = [
pytest.mark.anyio,
]
@pytest.mark.allow_diagnostic_mode
async def test_read(
request: "SubRequest",
record_property: Callable,
configured_catalog: ConfiguredAirbyteCatalog,
read_target_execution_result: ExecutionResult,
primary_keys_per_stream: dict[str, Optional[list[str]]],
):
"""
Verify that the read command succeeds on the target connection.
Also makes assertions about the validity of the read command output:
- At least one state message is emitted per stream
- Appropriate stream status messages are emitted for each stream
- If a primary key exists for the stream, it is present in the records emitted
"""
has_records = False
errors = []
warnings = []
fail_test_on_failing_execution_results(
record_property,
[read_target_execution_result],
)
for stream in configured_catalog.streams:
records = read_target_execution_result.get_records_per_stream(stream.stream.name)
state_messages = read_target_execution_result.get_states_per_stream(stream.stream.name)
statuses = read_target_execution_result.get_status_messages_per_stream(stream.stream.name)
primary_key = primary_keys_per_stream.get(stream.stream.name)
for record in records:
has_records = True
if not conforms_to_schema(record.record.data, stream.schema()):
errors.append(f"A record was encountered that does not conform to the schema. stream={stream.stream.name} record={record}")
if primary_key:
if _extract_primary_key_value(record.dict(), primary_key) is None:
errors.append(
f"Primary key subkeys {repr(primary_key)} have null values or not present in {stream.stream.name} stream records."
)
if stream.stream.name not in state_messages:
errors.append(
f"At least one state message should be emitted per stream, but no state messages were emitted for {stream.stream.name}."
)
try:
_validate_state_messages(state_messages=state_messages[stream.stream.name], configured_catalog=configured_catalog)
except AssertionError as exc:
warnings.append(
f"Invalid state message for stream {stream.stream.name}. exc={exc} state_messages={state_messages[stream.stream.name]}"
)
if stream.stream.name not in statuses:
warnings.append(f"No stream statuses were emitted for stream {stream.stream.name}.")
if not _validate_stream_statuses(configured_catalog=configured_catalog, statuses=statuses[stream.stream.name]):
errors.append(f"Invalid statuses for stream {stream.stream.name}. statuses={statuses[stream.stream.name]}")
if not has_records:
errors.append("At least one record should be read using provided catalog.")
if errors:
logger = get_test_logger(request)
for error in errors:
logger.info(error)
def _extract_primary_key_value(record: Mapping[str, Any], primary_key: List[List[str]]) -> dict[Tuple[str], Any]:
pk_values = {}
for pk_path in primary_key:
pk_value: Any = reduce(lambda data, key: data.get(key) if isinstance(data, dict) else None, pk_path, record)
pk_values[tuple(pk_path)] = pk_value
return pk_values
def _validate_stream_statuses(configured_catalog: ConfiguredAirbyteCatalog, statuses: List[AirbyteStreamStatusTraceMessage]):
"""Validate all statuses for all streams in the catalogs were emitted in correct order:
1. STARTED
2. RUNNING (can be >1)
3. COMPLETE
"""
stream_statuses = defaultdict(list)
for status in statuses:
stream_statuses[f"{status.stream_descriptor.namespace}-{status.stream_descriptor.name}"].append(status.status)
assert set(f"{x.stream.namespace}-{x.stream.name}" for x in configured_catalog.streams) == set(
stream_statuses
), "All stream must emit status"
for stream_name, status_list in stream_statuses.items():
assert (
len(status_list) >= 3
), f"Stream `{stream_name}` statuses should be emitted in the next order: `STARTED`, `RUNNING`,... `COMPLETE`"
assert status_list[0] == AirbyteStreamStatus.STARTED
assert status_list[-1] == AirbyteStreamStatus.COMPLETE
assert all(x == AirbyteStreamStatus.RUNNING for x in status_list[1:-1])
def _validate_state_messages(state_messages: List[AirbyteStateMessage], configured_catalog: ConfiguredAirbyteCatalog):
# Ensure that at least one state message is emitted for each stream
assert len(state_messages) >= len(
configured_catalog.streams
), "At least one state message should be emitted for each configured stream."
for state_message in state_messages:
stream_name = state_message.stream.stream_descriptor.name
state_type = state_message.type
# Ensure legacy state type is not emitted anymore
assert state_type != AirbyteStateType.LEGACY, (
f"Ensure that statuses from the {stream_name} stream are emitted using either "
"`STREAM` or `GLOBAL` state types, as the `LEGACY` state type is now deprecated."
)
# Check if stats are of the correct type and present in state message
assert isinstance(state_message.sourceStats, AirbyteStateStats), "Source stats should be in state message."

View File

@@ -0,0 +1,492 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from typing import TYPE_CHECKING, Callable, Dict, List, Optional, Set, Tuple
import dpath.util
import jsonschema
import pytest
from airbyte_protocol.models import ConnectorSpecification
from live_tests.commons.json_schema_helper import JsonSchemaHelper, get_expected_schema_structure, get_paths_in_connector_config
from live_tests.commons.models import ExecutionResult, SecretDict
from live_tests.utils import fail_test_on_failing_execution_results, find_all_values_for_key_in_schema, get_test_logger
pytestmark = [
pytest.mark.anyio,
]
if TYPE_CHECKING:
from _pytest.fixtures import SubRequest
@pytest.fixture(name="secret_property_names")
def secret_property_names_fixture():
return (
"client_token",
"access_token",
"api_token",
"token",
"secret",
"client_secret",
"password",
"key",
"service_account_info",
"service_account",
"tenant_id",
"certificate",
"jwt",
"credentials",
"app_id",
"appid",
"refresh_token",
)
DATE_PATTERN = "^[0-9]{2}-[0-9]{2}-[0-9]{4}$"
DATETIME_PATTERN = "^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2})?$"
async def test_spec(
record_property: Callable,
spec_target_execution_result: ExecutionResult,
):
"""Check that the spec call succeeds"""
fail_test_on_failing_execution_results(record_property, [spec_target_execution_result])
@pytest.mark.allow_diagnostic_mode
async def test_config_match_spec(
target_spec: ConnectorSpecification,
connector_config: Optional[SecretDict],
):
"""Check that config matches the actual schema from the spec call"""
# Getting rid of technical variables that start with an underscore
config = {key: value for key, value in connector_config.data.items() if not key.startswith("_")}
try:
jsonschema.validate(instance=config, schema=target_spec.connectionSpecification)
except jsonschema.exceptions.ValidationError as err:
pytest.fail(f"Config invalid: {err}")
except jsonschema.exceptions.SchemaError as err:
pytest.fail(f"Spec is invalid: {err}")
async def test_enum_usage(target_spec: ConnectorSpecification):
"""Check that enum lists in specs contain distinct values."""
docs_url = "https://docs.airbyte.io/connector-development/connector-specification-reference"
docs_msg = f"See specification reference at {docs_url}."
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
enum_paths = schema_helper.find_nodes(keys=["enum"])
for path in enum_paths:
enum_list = schema_helper.get_node(path)
assert len(set(enum_list)) == len(
enum_list
), f"Enum lists should not contain duplicate values. Misconfigured enum array: {enum_list}. {docs_msg}"
async def test_oneof_usage(target_spec: ConnectorSpecification):
"""Check that if spec contains oneOf it follows the rules according to reference
https://docs.airbyte.io/connector-development/connector-specification-reference
"""
docs_url = "https://docs.airbyte.io/connector-development/connector-specification-reference"
docs_msg = f"See specification reference at {docs_url}."
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
variant_paths = schema_helper.find_nodes(keys=["oneOf", "anyOf"])
for variant_path in variant_paths:
top_level_obj = schema_helper.get_node(variant_path[:-1])
assert (
top_level_obj.get("type") == "object"
), f"The top-level definition in a `oneOf` block should have type: object. misconfigured object: {top_level_obj}. {docs_msg}"
variants = schema_helper.get_node(variant_path)
for variant in variants:
assert "properties" in variant, f"Each item in the oneOf array should be a property with type object. {docs_msg}"
oneof_path = ".".join(map(str, variant_path))
variant_props = [set(v["properties"].keys()) for v in variants]
common_props = set.intersection(*variant_props)
assert common_props, f"There should be at least one common property for {oneof_path} subobjects. {docs_msg}"
const_common_props = set()
for common_prop in common_props:
if all(["const" in variant["properties"][common_prop] for variant in variants]):
const_common_props.add(common_prop)
assert (
len(const_common_props) == 1
), f"There should be exactly one common property with 'const' keyword for {oneof_path} subobjects. {docs_msg}"
const_common_prop = const_common_props.pop()
for n, variant in enumerate(variants):
prop_obj = variant["properties"][const_common_prop]
assert (
"default" not in prop_obj or prop_obj["default"] == prop_obj["const"]
), f"'default' needs to be identical to const in common property {oneof_path}[{n}].{const_common_prop}. It's recommended to just use `const`. {docs_msg}"
assert "enum" not in prop_obj or (
len(prop_obj["enum"]) == 1 and prop_obj["enum"][0] == prop_obj["const"]
), f"'enum' needs to be an array with a single item identical to const in common property {oneof_path}[{n}].{const_common_prop}. It's recommended to just use `const`. {docs_msg}"
def _is_spec_property_name_secret(path: str, secret_property_names) -> Tuple[Optional[str], bool]:
"""
Given a path to a type field, extract a field name and decide whether it is a name of secret or not
based on a provided list of secret names.
Split the path by `/`, drop the last item and make list reversed.
Then iterate over it and find the first item that's not a reserved keyword or an index.
Example:
properties/credentials/oneOf/1/properties/api_key/type -> [api_key, properties, 1, oneOf, credentials, properties] -> api_key
"""
reserved_keywords = ("anyOf", "oneOf", "allOf", "not", "properties", "items", "type", "prefixItems")
for part in reversed(path.split("/")[:-1]):
if part.isdigit() or part in reserved_keywords:
continue
return part, part.lower() in secret_property_names
return None, False
def _property_can_store_secret(prop: dict) -> bool:
"""
Some fields can not hold a secret by design, others can.
Null type as well as boolean can not hold a secret value.
A string, a number or an integer type can always store secrets.
Secret objects and arrays can not be rendered correctly in the UI:
A field with a constant value can not hold a secret as well.
"""
unsecure_types = {"string", "integer", "number"}
type_ = prop["type"]
is_property_constant_value = bool(prop.get("const"))
can_store_secret = any(
[
isinstance(type_, str) and type_ in unsecure_types,
isinstance(type_, list) and (set(type_) & unsecure_types),
]
)
if not can_store_secret:
return False
# if a property can store a secret, additional check should be done if it's a constant value
return not is_property_constant_value
async def test_secret_is_properly_marked(target_spec: ConnectorSpecification, secret_property_names):
"""
Each field has a type, therefore we can make a flat list of fields from the returned specification.
Iterate over the list, check if a field name is a secret name, can potentially hold a secret value
and make sure it is marked as `airbyte_secret`.
"""
secrets_exposed = []
non_secrets_hidden = []
spec_properties = target_spec.connectionSpecification["properties"]
for type_path, type_value in dpath.util.search(spec_properties, "**/type", yielded=True):
_, is_property_name_secret = _is_spec_property_name_secret(type_path, secret_property_names)
if not is_property_name_secret:
continue
absolute_path = f"/{type_path}"
property_path, _ = absolute_path.rsplit(sep="/", maxsplit=1)
property_definition = dpath.util.get(spec_properties, property_path)
marked_as_secret = property_definition.get("airbyte_secret", False)
possibly_a_secret = _property_can_store_secret(property_definition)
if marked_as_secret and not possibly_a_secret:
non_secrets_hidden.append(property_path)
if not marked_as_secret and possibly_a_secret:
secrets_exposed.append(property_path)
if non_secrets_hidden:
properties = "\n".join(non_secrets_hidden)
pytest.fail(
f"""Some properties are marked with `airbyte_secret` although they probably should not be.
Please double check them. If they're okay, please fix this test.
{properties}"""
)
if secrets_exposed:
properties = "\n".join(secrets_exposed)
pytest.fail(
f"""The following properties should be marked with `airbyte_secret!`
{properties}"""
)
def _fail_on_errors(errors: List[str]):
if len(errors) > 0:
pytest.fail("\n".join(errors))
def test_property_type_is_not_array(target_spec: ConnectorSpecification):
"""
Each field has one or multiple types, but the UI only supports a single type and optionally "null" as a second type.
"""
errors = []
for type_path, type_value in dpath.util.search(target_spec.connectionSpecification, "**/properties/*/type", yielded=True):
if isinstance(type_value, List):
number_of_types = len(type_value)
if number_of_types != 2 and number_of_types != 1:
errors.append(
f"{type_path} is not either a simple type or an array of a simple type plus null: {type_value} (for example: type: [string, null])"
)
if number_of_types == 2 and type_value[1] != "null":
errors.append(
f"Second type of {type_path} is not null: {type_value}. Type can either be a simple type or an array of a simple type plus null (for example: type: [string, null])"
)
_fail_on_errors(errors)
def test_object_not_empty(target_spec: ConnectorSpecification):
"""
Each object field needs to have at least one property as the UI won't be able to show them otherwise.
If the whole spec is empty, it's allowed to have a single empty object at the top level
"""
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
errors = []
for type_path, type_value in dpath.util.search(target_spec.connectionSpecification, "**/type", yielded=True):
if type_path == "type":
# allow empty root object
continue
if type_value == "object":
property = schema_helper.get_parent(type_path)
if "oneOf" not in property and ("properties" not in property or len(property["properties"]) == 0):
errors.append(
f"{type_path} is an empty object which will not be represented correctly in the UI. Either remove or add specific properties"
)
_fail_on_errors(errors)
async def test_array_type(target_spec: ConnectorSpecification):
"""
Each array has one or multiple types for its items, but the UI only supports a single type which can either be object, string or an enum
"""
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
errors = []
for type_path, type_type in dpath.util.search(target_spec.connectionSpecification, "**/type", yielded=True):
property_definition = schema_helper.get_parent(type_path)
if type_type != "array":
# unrelated "items", not an array definition
continue
items_value = property_definition.get("items", None)
if items_value is None:
continue
elif isinstance(items_value, List):
errors.append(f"{type_path} is not just a single item type: {items_value}")
elif items_value.get("type") not in ["object", "string", "number", "integer"] and "enum" not in items_value:
errors.append(f"Items of {type_path} has to be either object or string or define an enum")
_fail_on_errors(errors)
async def test_forbidden_complex_types(target_spec: ConnectorSpecification):
"""
not, anyOf, patternProperties, prefixItems, allOf, if, then, else, dependentSchemas and dependentRequired are not allowed
"""
forbidden_keys = [
"not",
"anyOf",
"patternProperties",
"prefixItems",
"allOf",
"if",
"then",
"else",
"dependentSchemas",
"dependentRequired",
]
found_keys = set()
for forbidden_key in forbidden_keys:
for path, value in dpath.util.search(target_spec.connectionSpecification, f"**/{forbidden_key}", yielded=True):
found_keys.add(path)
for forbidden_key in forbidden_keys:
# remove forbidden keys if they are used as properties directly
for path, _value in dpath.util.search(target_spec.connectionSpecification, f"**/properties/{forbidden_key}", yielded=True):
found_keys.remove(path)
if len(found_keys) > 0:
key_list = ", ".join(found_keys)
pytest.fail(f"Found the following disallowed JSON schema features: {key_list}")
async def test_date_pattern(request: "SubRequest", target_spec: ConnectorSpecification):
"""
Properties with format date or date-time should always have a pattern defined how the date/date-time should be formatted
that corresponds with the format the datepicker component is creating.
"""
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
for format_path, format in dpath.util.search(target_spec.connectionSpecification, "**/format", yielded=True):
if not isinstance(format, str):
# format is not a format definition here but a property named format
continue
property_definition = schema_helper.get_parent(format_path)
pattern = property_definition.get("pattern")
logger = get_test_logger(request)
if format == "date" and not pattern == DATE_PATTERN:
logger.warning(
f"{format_path} is defining a date format without the corresponding pattern. Consider setting the pattern to {DATE_PATTERN} to make it easier for users to edit this field in the UI."
)
if format == "date-time" and not pattern == DATETIME_PATTERN:
logger.warning(
f"{format_path} is defining a date-time format without the corresponding pattern Consider setting the pattern to {DATETIME_PATTERN} to make it easier for users to edit this field in the UI."
)
async def test_date_format(request: "SubRequest", target_spec: ConnectorSpecification):
"""
Properties with a pattern that looks like a date should have their format set to date or date-time.
"""
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
for pattern_path, pattern in dpath.util.search(target_spec.connectionSpecification, "**/pattern", yielded=True):
if not isinstance(pattern, str):
# pattern is not a pattern definition here but a property named pattern
continue
if pattern == DATE_PATTERN or pattern == DATETIME_PATTERN:
property_definition = schema_helper.get_parent(pattern_path)
format = property_definition.get("format")
logger = get_test_logger(request)
if not format == "date" and pattern == DATE_PATTERN:
logger.warning(
f"{pattern_path} is defining a pattern that looks like a date without setting the format to `date`. Consider specifying the format to make it easier for users to edit this field in the UI."
)
if not format == "date-time" and pattern == DATETIME_PATTERN:
logger.warning(
f"{pattern_path} is defining a pattern that looks like a date-time without setting the format to `date-time`. Consider specifying the format to make it easier for users to edit this field in the UI."
)
async def test_duplicate_order(target_spec: ConnectorSpecification):
"""
Custom ordering of field (via the "order" property defined in the field) is not allowed to have duplicates within the same group.
`{ "a": { "order": 1 }, "b": { "order": 1 } }` is invalid because there are two fields with order 1
`{ "a": { "order": 1 }, "b": { "order": 1, "group": "x" } }` is valid because the fields with the same order are in different groups
"""
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
errors = []
for properties_path, properties in dpath.util.search(target_spec.connectionSpecification, "**/properties", yielded=True):
definition = schema_helper.get_parent(properties_path)
if definition.get("type") != "object":
# unrelated "properties", not an actual object definition
continue
used_orders: Dict[str, Set[int]] = {}
for property in properties.values():
if "order" not in property:
continue
order = property.get("order")
group = property.get("group", "")
if group not in used_orders:
used_orders[group] = set()
orders_for_group = used_orders[group]
if order in orders_for_group:
errors.append(f"{properties_path} has duplicate order: {order}")
orders_for_group.add(order)
_fail_on_errors(errors)
async def test_nested_group(target_spec: ConnectorSpecification):
"""
Groups can only be defined on the top level properties
`{ "a": { "group": "x" }}` is valid because field "a" is a top level field
`{ "a": { "oneOf": [{ "type": "object", "properties": { "b": { "group": "x" } } }] }}` is invalid because field "b" is nested in a oneOf
"""
errors = []
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
for result in dpath.util.search(target_spec.connectionSpecification, "/properties/**/group", yielded=True):
group_path = result[0]
parent_path = schema_helper.get_parent_path(group_path)
is_property_named_group = parent_path.endswith("properties")
grandparent_path = schema_helper.get_parent_path(parent_path)
if grandparent_path != "/properties" and not is_property_named_group:
errors.append(f"Groups can only be defined on top level, is defined at {group_path}")
_fail_on_errors(errors)
async def test_display_type(target_spec: ConnectorSpecification):
"""
The display_type property can only be set on fields which have a oneOf property, and must be either "dropdown" or "radio"
"""
errors = []
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
for result in dpath.util.search(target_spec.connectionSpecification, "/properties/**/display_type", yielded=True):
display_type_path = result[0]
parent_path = schema_helper.get_parent_path(display_type_path)
is_property_named_display_type = parent_path.endswith("properties")
if is_property_named_display_type:
continue
parent_object = schema_helper.get_parent(display_type_path)
if "oneOf" not in parent_object:
errors.append(f"display_type is only allowed on fields which have a oneOf property, but is set on {parent_path}")
display_type_value = parent_object.get("display_type")
if display_type_value != "dropdown" and display_type_value != "radio":
errors.append(f"display_type must be either 'dropdown' or 'radio', but is set to '{display_type_value}' at {display_type_path}")
_fail_on_errors(errors)
async def test_defined_refs_exist_in_json_spec_file(target_spec: ConnectorSpecification):
"""Checking for the presence of unresolved `$ref`s values within each json spec file"""
check_result = list(find_all_values_for_key_in_schema(target_spec.connectionSpecification["properties"], "$ref"))
assert not check_result, "Found unresolved `$refs` value in spec.json file"
async def test_oauth_flow_parameters(target_spec: ConnectorSpecification):
"""Check if connector has correct oauth flow parameters according to
https://docs.airbyte.io/connector-development/connector-specification-reference
"""
advanced_auth = target_spec.advanced_auth
if not advanced_auth:
return
spec_schema = target_spec.connectionSpecification
paths_to_validate = set()
if advanced_auth.predicate_key:
paths_to_validate.add("/" + "/".join(advanced_auth.predicate_key))
oauth_config_specification = advanced_auth.oauth_config_specification
if oauth_config_specification:
if oauth_config_specification.oauth_user_input_from_connector_config_specification:
paths_to_validate.update(
get_paths_in_connector_config(oauth_config_specification.oauth_user_input_from_connector_config_specification["properties"])
)
if oauth_config_specification.complete_oauth_output_specification:
paths_to_validate.update(
get_paths_in_connector_config(oauth_config_specification.complete_oauth_output_specification["properties"])
)
if oauth_config_specification.complete_oauth_server_output_specification:
paths_to_validate.update(
get_paths_in_connector_config(oauth_config_specification.complete_oauth_server_output_specification["properties"])
)
diff = paths_to_validate - set(get_expected_schema_structure(spec_schema))
assert diff == set(), f"Specified oauth fields are missed from spec schema: {diff}"
async def test_oauth_is_default_method(target_spec: ConnectorSpecification):
"""
OAuth is default check.
If credentials do have oneOf: we check that the OAuth is listed at first.
If there is no oneOf and Oauth: OAuth is only option to authenticate the source and no check is needed.
"""
advanced_auth = target_spec.advanced_auth
if not advanced_auth:
pytest.skip("Source does not have OAuth method.")
if not advanced_auth.predicate_key:
pytest.skip("Advanced Auth object does not have predicate_key, only one option to authenticate.")
spec_schema = target_spec.connectionSpecification
credentials = advanced_auth.predicate_key[0]
try:
one_of_default_method = dpath.util.get(spec_schema, f"/**/{credentials}/oneOf/0")
except KeyError as e: # Key Error when oneOf is not in credentials object
pytest.skip("Credentials object does not have oneOf option.")
path_in_credentials = "/".join(advanced_auth.predicate_key[1:])
auth_method_predicate_const = dpath.util.get(one_of_default_method, f"/**/{path_in_credentials}/const")
assert (
auth_method_predicate_const == advanced_auth.predicate_value
), f"Oauth method should be a default option. Current default method is {auth_method_predicate_const}."
async def test_additional_properties_is_true(target_spec: ConnectorSpecification):
"""Check that value of the "additionalProperties" field is always true.
A spec declaring "additionalProperties": false introduces the risk of accidental breaking changes.
Specifically, when removing a property from the spec, existing connector configs will no longer be valid.
False value introduces the risk of accidental breaking changes.
Read https://github.com/airbytehq/airbyte/issues/14196 for more details"""
additional_properties_values = find_all_values_for_key_in_schema(target_spec.connectionSpecification, "additionalProperties")
if additional_properties_values:
assert all(
[additional_properties_value is True for additional_properties_value in additional_properties_values]
), "When set, additionalProperties field value must be true for backward compatibility."

View File

@@ -0,0 +1,282 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from enum import Enum
from typing import Any, Iterable, List, Text, Tuple, Union
import pendulum
import pytest
from airbyte_protocol.models import (
AirbyteMessage,
AirbyteRecordMessage,
AirbyteStream,
ConfiguredAirbyteStream,
DestinationSyncMode,
SyncMode,
Type,
)
from live_tests.commons.json_schema_helper import JsonSchemaHelper, get_expected_schema_structure, get_object_structure
from pydantic import BaseModel
def records_with_state(records, state, stream_mapping, state_cursor_paths) -> Iterable[Tuple[Any, Any, Any]]:
"""Iterate over records and return cursor value with corresponding cursor value from state"""
for record in records:
stream_name = record.record.stream
stream = stream_mapping[stream_name]
helper = JsonSchemaHelper(schema=stream.stream.json_schema)
cursor_field = helper.field(stream.cursor_field)
record_value = cursor_field.parse(record=record.record.data)
try:
if state[stream_name] is None:
continue
# first attempt to parse the state value assuming the state object is namespaced on stream names
state_value = cursor_field.parse(record=state[stream_name], path=state_cursor_paths[stream_name])
except KeyError:
try:
# try second time as an absolute path in state file (i.e. bookmarks -> stream_name -> column -> value)
state_value = cursor_field.parse(record=state, path=state_cursor_paths[stream_name])
except KeyError:
continue
yield record_value, state_value, stream_name
@pytest.fixture(name="simple_state")
def simple_state_fixture():
return {
"my_stream": {
"id": 11,
"ts_created": "2014-01-01T22:03:11",
"ts_updated": "2015-01-01T22:03:11",
}
}
@pytest.fixture(name="none_state")
def none_state_fixture():
return {"my_stream": None}
@pytest.fixture(name="nested_state")
def nested_state_fixture(simple_state):
return {"my_stream": {"some_account_id": simple_state["my_stream"]}}
@pytest.fixture(name="singer_state")
def singer_state_fixture(simple_state):
return {"bookmarks": simple_state}
@pytest.fixture(name="stream_schema")
def stream_schema_fixture():
return {
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"id": {"type": "integer"},
"ts_created": {"type": "string", "format": "datetime"},
"nested": {"type": "object", "properties": {"ts_updated": {"type": "string", "format": "date"}}},
},
}
@pytest.fixture(name="stream_mapping")
def stream_mapping_fixture(stream_schema):
return {
"my_stream": ConfiguredAirbyteStream(
stream=AirbyteStream(name="my_stream", json_schema=stream_schema, supported_sync_modes=[SyncMode.full_refresh]),
sync_mode=SyncMode.full_refresh,
destination_sync_mode=DestinationSyncMode.append,
)
}
@pytest.fixture(name="records")
def records_fixture():
return [
AirbyteMessage(
type=Type.RECORD,
record=AirbyteRecordMessage(
stream="my_stream",
data={"id": 1, "ts_created": "2015-11-01T22:03:11", "nested": {"ts_updated": "2015-05-01"}},
emitted_at=0,
),
)
]
def test_simple_path(records, stream_mapping, simple_state):
stream_mapping["my_stream"].cursor_field = ["id"]
paths = {"my_stream": ["id"]}
result = records_with_state(records=records, state=simple_state, stream_mapping=stream_mapping, state_cursor_paths=paths)
record_value, state_value, stream_name = next(result)
assert record_value == 1, "record value must be correctly found"
assert state_value == 11, "state value must be correctly found"
def test_nested_path(records, stream_mapping, nested_state):
stream_mapping["my_stream"].cursor_field = ["nested", "ts_updated"]
paths = {"my_stream": ["some_account_id", "ts_updated"]}
result = records_with_state(records=records, state=nested_state, stream_mapping=stream_mapping, state_cursor_paths=paths)
record_value, state_value, stream_name = next(result)
assert record_value == pendulum.datetime(2015, 5, 1), "record value must be correctly found"
assert state_value == pendulum.datetime(2015, 1, 1, 22, 3, 11), "state value must be correctly found"
def test_absolute_path(records, stream_mapping, singer_state):
stream_mapping["my_stream"].cursor_field = ["ts_created"]
paths = {"my_stream": ["bookmarks", "my_stream", "ts_created"]}
result = records_with_state(records=records, state=singer_state, stream_mapping=stream_mapping, state_cursor_paths=paths)
record_value, state_value, stream_name = next(result)
assert record_value == pendulum.datetime(2015, 11, 1, 22, 3, 11), "record value must be correctly found"
assert state_value == pendulum.datetime(2014, 1, 1, 22, 3, 11), "state value must be correctly found"
def test_none_state(records, stream_mapping, none_state):
stream_mapping["my_stream"].cursor_field = ["ts_created"]
paths = {"my_stream": ["unknown", "ts_created"]}
result = records_with_state(records=records, state=none_state, stream_mapping=stream_mapping, state_cursor_paths=paths)
assert next(result, None) is None
def test_json_schema_helper_pydantic_generated():
class E(str, Enum):
A = "dda"
B = "dds"
C = "ddf"
class E2(BaseModel):
e2: str
class C(BaseModel):
aaa: int
e: Union[E, E2]
class A(BaseModel):
sdf: str
sss: str
c: C
class B(BaseModel):
name: str
surname: str
class Root(BaseModel):
f: Union[A, B]
js_helper = JsonSchemaHelper(Root.schema())
variant_paths = js_helper.find_nodes(keys=["anyOf", "oneOf"])
assert len(variant_paths) == 2
assert variant_paths == [["properties", "f", "anyOf"], ["definitions", "C", "properties", "e", "anyOf"]]
# TODO: implement validation for pydantic generated objects as well
# js_helper.validate_variant_paths(variant_paths)
@pytest.mark.parametrize(
"object, paths",
[
({}, []),
({"a": 12}, ["/a"]),
({"a": {"b": 12}}, ["/a", "/a/b"]),
({"a": {"b": 12}, "c": 45}, ["/a", "/a/b", "/c"]),
(
{"a": [{"b": 12}]},
["/a", "/a/[]", "/a/[]/b"],
),
({"a": [{"b": 12}, {"b": 15}]}, ["/a", "/a/[]", "/a/[]/b"]),
({"a": [[[{"b": 12}, {"b": 15}]]]}, ["/a", "/a/[]", "/a/[]/[]", "/a/[]/[]/[]", "/a/[]/[]/[]/b"]),
],
)
def test_get_object_strucutre(object, paths):
assert get_object_structure(object) == paths
@pytest.mark.parametrize(
"schema, paths",
[
({"type": "object", "properties": {"a": {"type": "string"}}}, ["/a"]),
({"properties": {"a": {"type": "string"}}}, ["/a"]),
({"type": "object", "properties": {"a": {"type": "string"}, "b": {"type": "number"}}}, ["/a", "/b"]),
(
{
"type": "object",
"properties": {"a": {"type": "string"}, "b": {"$ref": "#definitions/b_type"}},
"definitions": {"b_type": {"type": "number"}},
},
["/a", "/b"],
),
({"type": "object", "oneOf": [{"properties": {"a": {"type": "string"}}}, {"properties": {"b": {"type": "string"}}}]}, ["/a", "/b"]),
# Some of pydantic generatec schemas have anyOf keyword
({"type": "object", "anyOf": [{"properties": {"a": {"type": "string"}}}, {"properties": {"b": {"type": "string"}}}]}, ["/a", "/b"]),
(
{"type": "array", "items": {"oneOf": [{"properties": {"a": {"type": "string"}}}, {"properties": {"b": {"type": "string"}}}]}},
["/[]/a", "/[]/b"],
),
# There could be an object with any properties with specific type
({"type": "object", "properties": {"a": {"type": "object", "additionalProperties": {"type": "string"}}}}, ["/a"]),
# Array with no item type specified
({"type": "array"}, ["/[]"]),
({"type": "array", "items": {"type": "object", "additionalProperties": {"type": "string"}}}, ["/[]"]),
],
)
def test_get_expected_schema_structure(schema, paths):
assert paths == get_expected_schema_structure(schema)
@pytest.mark.parametrize(
"keys, num_paths, last_value",
[
(["description"], 1, "Tests that keys can be found inside lists of dicts"),
(["option1"], 2, {"a_key": "a_value"}),
(["option2"], 1, ["value1", "value2"]),
(["nonexistent_key"], 0, None),
(["option1", "option2"], 3, ["value1", "value2"]),
],
)
def test_find_and_get_nodes(keys: List[Text], num_paths: int, last_value: Any):
schema = {
"title": "Key_inside_oneOf",
"description": "Tests that keys can be found inside lists of dicts",
"type": "object",
"properties": {
"credentials": {
"type": "object",
"oneOf": [
{
"type": "object",
"properties": {
"common": {"type": "string", "const": "option1", "default": "option1"},
"option1": {"type": "string"},
},
},
{
"type": "object",
"properties": {
"common": {"type": "string", "const": "option2", "default": "option2"},
"option1": {"a_key": "a_value"},
"option2": ["value1", "value2"],
},
},
],
}
},
}
schema_helper = JsonSchemaHelper(schema)
variant_paths = schema_helper.find_nodes(keys=keys)
assert len(variant_paths) == num_paths
if variant_paths:
values_at_nodes = []
for path in variant_paths:
values_at_nodes.append(schema_helper.get_node(path))
assert last_value in values_at_nodes

181
poetry.lock generated
View File

@@ -1,4 +1,23 @@
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "attrs"
version = "23.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
files = [
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
]
[package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
dev = ["attrs[tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
[[package]]
name = "black"
@@ -86,6 +105,41 @@ colors = ["colorama (>=0.4.3,<0.5.0)"]
pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
requirements-deprecated-finder = ["pip-api", "pipreqs"]
[[package]]
name = "jsonschema"
version = "4.22.0"
description = "An implementation of JSON Schema validation for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"},
{file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"},
]
[package.dependencies]
attrs = ">=22.2.0"
jsonschema-specifications = ">=2023.03.6"
referencing = ">=0.28.4"
rpds-py = ">=0.7.1"
[package.extras]
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
[[package]]
name = "jsonschema-specifications"
version = "2023.12.1"
description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
optional = false
python-versions = ">=3.8"
files = [
{file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"},
{file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"},
]
[package.dependencies]
referencing = ">=0.31.0"
[[package]]
name = "mypy-extensions"
version = "1.0.0"
@@ -153,6 +207,129 @@ tomli = ">=1.2.2"
[package.extras]
poetry-plugin = ["poetry (>=1.0,<2.0)"]
[[package]]
name = "referencing"
version = "0.35.1"
description = "JSON Referencing + Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"},
{file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"},
]
[package.dependencies]
attrs = ">=22.2.0"
rpds-py = ">=0.7.0"
[[package]]
name = "rpds-py"
version = "0.18.1"
description = "Python bindings to Rust's persistent data structures (rpds)"
optional = false
python-versions = ">=3.8"
files = [
{file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"},
{file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"},
{file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"},
{file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"},
{file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"},
{file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"},
{file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"},
{file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"},
{file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"},
{file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"},
{file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"},
{file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"},
{file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"},
{file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"},
{file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"},
{file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"},
{file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"},
{file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"},
{file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"},
{file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"},
{file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"},
{file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"},
{file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"},
{file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"},
{file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"},
{file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"},
{file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"},
{file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"},
{file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"},
{file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"},
{file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"},
{file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"},
{file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"},
{file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"},
{file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"},
{file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"},
]
[[package]]
name = "ruff"
version = "0.4.3"
@@ -193,4 +370,4 @@ files = [
[metadata]
lock-version = "2.0"
python-versions = "~3.10"
content-hash = "86b7578e744e8b71526d947edba4c42a687b4aade96dde24ec0dbc1c3b245eb0"
content-hash = "be63dcfecf979317f9470f51c80ac658687cd949c77018744277b0bce7c348bd"

View File

@@ -6,6 +6,7 @@ authors = ["Airbyte <contact@airbyte.io>"]
[tool.poetry.dependencies]
python = "~3.10"
jsonschema = "^4.22.0"
[tool.poetry.group.dev.dependencies]
isort = "5.6.4"