1
0
mirror of synced 2025-12-23 21:03:15 -05:00

[airbyte-cdk] Fix dpath.util.* deprecation warnings (#38847)

Co-authored-by: Octavia Squidington III <octavia-squidington-iii@users.noreply.github.com>
This commit is contained in:
Natik Gadzhi
2024-06-03 12:51:48 -07:00
committed by GitHub
parent 12f637330e
commit 8b82caa4df
15 changed files with 43 additions and 45 deletions

View File

@@ -4,7 +4,7 @@
from typing import Any, Dict, List, Literal, Optional, Union
import dpath.util
import dpath
from airbyte_cdk.utils.oneof_option_config import OneOfOptionConfig
from airbyte_cdk.utils.spec_schema_transformations import resolve_refs
from pydantic import BaseModel, Field
@@ -264,7 +264,7 @@ class VectorDBConfigModel(BaseModel):
@staticmethod
def remove_discriminator(schema: Dict[str, Any]) -> None:
"""pydantic adds "discriminator" to the schema for oneOfs, which is not treated right by the platform as we inline all references"""
dpath.util.delete(schema, "properties/**/discriminator")
dpath.delete(schema, "properties/**/discriminator")
@classmethod
def schema(cls, by_alias: bool = True, ref_template: str = "") -> Dict[str, Any]:

View File

@@ -7,7 +7,7 @@ import logging
from dataclasses import dataclass
from typing import Any, Dict, List, Mapping, Optional, Tuple
import dpath.util
import dpath
from airbyte_cdk.destinations.vector_db_based.config import ProcessingConfigModel, SeparatorSplitterConfigModel, TextSplitterConfigModel
from airbyte_cdk.destinations.vector_db_based.utils import create_stream_identifier
from airbyte_cdk.models import AirbyteRecordMessage, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode
@@ -137,7 +137,7 @@ class DocumentProcessor:
relevant_fields = {}
if fields and len(fields) > 0:
for field in fields:
values = dpath.util.values(record.data, field, separator=".")
values = dpath.values(record.data, field, separator=".")
if values and len(values) > 0:
relevant_fields[field] = values if len(values) > 1 else values[0]
else:
@@ -162,7 +162,7 @@ class DocumentProcessor:
primary_key = []
for key in current_stream.primary_key:
try:
primary_key.append(str(dpath.util.get(record.data, key)))
primary_key.append(str(dpath.get(record.data, key)))
except KeyError:
primary_key.append("__not_found__")
stringified_primary_key = "_".join(primary_key)

View File

@@ -27,7 +27,7 @@ class SelectiveAuthenticator(DeclarativeAuthenticator):
**kwargs: Any,
) -> DeclarativeAuthenticator:
try:
selected_key = str(dpath.util.get(config, authenticator_selection_path))
selected_key = str(dpath.get(config, authenticator_selection_path))
except KeyError as err:
raise ValueError("The path from `authenticator_selection_path` is not found in the config.") from err

View File

@@ -8,7 +8,7 @@ from abc import abstractmethod
from dataclasses import InitVar, dataclass, field
from typing import Any, List, Mapping, Optional, Union
import dpath.util
import dpath
import pendulum
from airbyte_cdk.sources.declarative.decoders.decoder import Decoder
from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder
@@ -62,7 +62,7 @@ class SessionTokenProvider(TokenProvider):
)
if response is None:
raise ReadException("Failed to get session token, response got ignored by requester")
session_token = dpath.util.get(self._decoder.decode(response), self.session_token_path)
session_token = dpath.get(self._decoder.decode(response), self.session_token_path)
if self.expiration_duration is not None:
self._next_expiration_time = pendulum.now() + self.expiration_duration
self._token = session_token

View File

@@ -5,7 +5,7 @@
from dataclasses import InitVar, dataclass, field
from typing import Any, Iterable, List, Mapping, Union
import dpath.util
import dpath
import requests
from airbyte_cdk.sources.declarative.decoders.decoder import Decoder
from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder
@@ -71,9 +71,9 @@ class DpathExtractor(RecordExtractor):
else:
path = [path.eval(self.config) for path in self._field_path]
if "*" in path:
extracted = dpath.util.values(response_body, path)
extracted = dpath.values(response_body, path)
else:
extracted = dpath.util.get(response_body, path, default=[])
extracted = dpath.get(response_body, path, default=[])
if isinstance(extracted, list):
yield from extracted
elif extracted:

View File

@@ -5,7 +5,7 @@
from dataclasses import InitVar, dataclass
from typing import TYPE_CHECKING, Any, Iterable, List, Mapping, Optional, Union
import dpath.util
import dpath
from airbyte_cdk.models import AirbyteMessage, SyncMode, Type
from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString
from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType
@@ -145,7 +145,7 @@ class SubstreamPartitionRouter(StreamSlicer):
elif isinstance(parent_record, Record):
parent_record = parent_record.data
try:
partition_value = dpath.util.get(parent_record, parent_field)
partition_value = dpath.get(parent_record, parent_field)
except KeyError:
pass
else:

View File

@@ -5,7 +5,7 @@
from dataclasses import InitVar, dataclass, field
from typing import Any, List, Mapping, Optional, Type, Union
import dpath.util
import dpath
from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString
from airbyte_cdk.sources.declarative.transformations import RecordTransformation
from airbyte_cdk.sources.types import Config, FieldPointer, Record, StreamSlice, StreamState
@@ -122,7 +122,7 @@ class AddFields(RecordTransformation):
for parsed_field in self._parsed_fields:
valid_types = (parsed_field.value_type,) if parsed_field.value_type else None
value = parsed_field.value.eval(config, valid_types=valid_types, **kwargs)
dpath.util.new(record, parsed_field.path, value)
dpath.new(record, parsed_field.path, value)
return record

View File

@@ -5,8 +5,8 @@
from dataclasses import InitVar, dataclass
from typing import Any, List, Mapping, Optional
import dpath
import dpath.exceptions
import dpath.util
from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean
from airbyte_cdk.sources.declarative.transformations import RecordTransformation
from airbyte_cdk.sources.types import Config, FieldPointer, StreamSlice, StreamState
@@ -60,7 +60,7 @@ class RemoveFields(RecordTransformation):
for pointer in self.field_pointers:
# the dpath library by default doesn't delete fields from arrays
try:
dpath.util.delete(
dpath.delete(
record,
pointer,
afilter=(lambda x: self._filter_interpolator.eval(config or {}, property=x)) if self.condition else None,

View File

@@ -18,7 +18,7 @@ def get_defined_id(stream: AirbyteStream, data: Dict[str, Any]) -> Optional[str]
primary_key = []
for key in stream.source_defined_primary_key:
try:
primary_key.append(str(dpath.util.get(data, key)))
primary_key.append(str(dpath.get(data, key)))
except KeyError:
primary_key.append("__not_found__")
return "_".join(primary_key)

View File

@@ -6,7 +6,7 @@ import copy
from abc import abstractmethod
from typing import Any, Dict, List, Optional
import dpath.util
import dpath
from airbyte_cdk.sources.file_based.config.file_based_stream_config import FileBasedStreamConfig
from airbyte_cdk.sources.utils import schema_helpers
from pydantic import AnyUrl, BaseModel, Field
@@ -57,7 +57,7 @@ class AbstractFileBasedSpec(BaseModel):
@staticmethod
def remove_discriminator(schema: Dict[str, Any]) -> None:
"""pydantic adds "discriminator" to the schema for oneOfs, which is not treated right by the platform as we inline all references"""
dpath.util.delete(schema, "properties/**/discriminator")
dpath.delete(schema, "properties/**/discriminator")
@staticmethod
def replace_enum_allOf_and_anyOf(schema: Dict[str, Any]) -> Dict[str, Any]:

View File

@@ -8,7 +8,7 @@ from io import BytesIO, IOBase
from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union
import backoff
import dpath.util
import dpath
import requests
from airbyte_cdk.models import FailureType
from airbyte_cdk.sources.file_based.config.file_based_stream_config import FileBasedStreamConfig
@@ -335,15 +335,15 @@ class UnstructuredParser(FileTypeParser):
return "\n\n".join((self._convert_to_markdown(el) for el in elements))
def _convert_to_markdown(self, el: Dict[str, Any]) -> str:
if dpath.util.get(el, "type") == "Title":
heading_str = "#" * (dpath.util.get(el, "metadata/category_depth", default=1) or 1)
return f"{heading_str} {dpath.util.get(el, 'text')}"
elif dpath.util.get(el, "type") == "ListItem":
return f"- {dpath.util.get(el, 'text')}"
elif dpath.util.get(el, "type") == "Formula":
return f"```\n{dpath.util.get(el, 'text')}\n```"
if dpath.get(el, "type") == "Title":
heading_str = "#" * (dpath.get(el, "metadata/category_depth", default=1) or 1)
return f"{heading_str} {dpath.get(el, 'text')}"
elif dpath.get(el, "type") == "ListItem":
return f"- {dpath.get(el, 'text')}"
elif dpath.get(el, "type") == "Formula":
return f"```\n{dpath.get(el, 'text')}\n```"
else:
return str(dpath.util.get(el, "text", default=""))
return str(dpath.get(el, "text", default=""))
@property
def file_read_mode(self) -> FileReadMode:

View File

@@ -153,10 +153,8 @@ class SingleUseRefreshTokenOauth2Authenticator(Oauth2Authenticator):
token_expiry_is_time_of_expiration bool: set True it if expires_in is returned as time of expiration instead of the number seconds until expiration
message_repository (MessageRepository): the message repository used to emit logs on HTTP requests and control message on config update
"""
self._client_id = client_id if client_id is not None else dpath.util.get(connector_config, ("credentials", "client_id"))
self._client_secret = (
client_secret if client_secret is not None else dpath.util.get(connector_config, ("credentials", "client_secret"))
)
self._client_id = client_id if client_id is not None else dpath.get(connector_config, ("credentials", "client_id"))
self._client_secret = client_secret if client_secret is not None else dpath.get(connector_config, ("credentials", "client_secret"))
self._access_token_config_path = access_token_config_path
self._refresh_token_config_path = refresh_token_config_path
self._token_expiry_date_config_path = token_expiry_date_config_path
@@ -193,24 +191,24 @@ class SingleUseRefreshTokenOauth2Authenticator(Oauth2Authenticator):
@property
def access_token(self) -> str:
return dpath.util.get(self._connector_config, self._access_token_config_path, default="")
return dpath.get(self._connector_config, self._access_token_config_path, default="")
@access_token.setter
def access_token(self, new_access_token: str):
dpath.util.new(self._connector_config, self._access_token_config_path, new_access_token)
dpath.new(self._connector_config, self._access_token_config_path, new_access_token)
def get_refresh_token(self) -> str:
return dpath.util.get(self._connector_config, self._refresh_token_config_path, default="")
return dpath.get(self._connector_config, self._refresh_token_config_path, default="")
def set_refresh_token(self, new_refresh_token: str):
dpath.util.new(self._connector_config, self._refresh_token_config_path, new_refresh_token)
dpath.new(self._connector_config, self._refresh_token_config_path, new_refresh_token)
def get_token_expiry_date(self) -> pendulum.DateTime:
expiry_date = dpath.util.get(self._connector_config, self._token_expiry_date_config_path, default="")
expiry_date = dpath.get(self._connector_config, self._token_expiry_date_config_path, default="")
return pendulum.now().subtract(days=1) if expiry_date == "" else pendulum.parse(expiry_date)
def set_token_expiry_date(self, new_token_expiry_date):
dpath.util.new(self._connector_config, self._token_expiry_date_config_path, str(new_token_expiry_date))
dpath.new(self._connector_config, self._token_expiry_date_config_path, str(new_token_expiry_date))
def token_has_expired(self) -> bool:
"""Returns True if the token is expired"""

View File

@@ -4,7 +4,7 @@
from typing import Any, List, Mapping
import dpath.util
import dpath
def get_secret_paths(spec: Mapping[str, Any]) -> List[List[str]]:
@@ -45,7 +45,7 @@ def get_secrets(connection_specification: Mapping[str, Any], config: Mapping[str
result = []
for path in secret_paths:
try:
result.append(dpath.util.get(config, path))
result.append(dpath.get(config, path))
except KeyError:
# Since we try to get paths to all known secrets in the spec, in the case of oneOfs, some secret fields may not be present
# In that case, a KeyError is thrown. This is expected behavior.

View File

@@ -4,7 +4,7 @@
from typing import Union
import dpath.util
import dpath
from airbyte_cdk.destinations.vector_db_based.config import (
AzureOpenAIEmbeddingConfigModel,
CohereEmbeddingConfigModel,
@@ -57,7 +57,7 @@ class ConfigModel(BaseModel):
@staticmethod
def remove_discriminator(schema: dict) -> None:
"""pydantic adds "discriminator" to the schema for oneOfs, which is not treated right by the platform as we inline all references"""
dpath.util.delete(schema, "properties/**/discriminator")
dpath.delete(schema, "properties/**/discriminator")
@classmethod
def schema(cls):

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
import dpath.util
import dpath
import pytest
from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping import InterpolatedNestedMapping
@@ -42,4 +42,4 @@ def test(test_name, path, expected_value):
interpolated = mapping.eval(config, **{"kwargs": kwargs})
assert dpath.util.get(interpolated, path) == expected_value
assert dpath.get(interpolated, path) == expected_value