1
0
mirror of synced 2025-12-25 02:09:19 -05:00

🐛 Source salesforce: processing of failed jobs (#10141)

This commit is contained in:
Maksym Pavlenok
2022-02-10 19:49:08 +02:00
committed by GitHub
parent b230543ba2
commit 5bc8ec205e
13 changed files with 263 additions and 90 deletions

View File

@@ -4,10 +4,13 @@
import json
import logging
import re
from pathlib import Path
from typing import Any, Mapping
import pytest
import requests_mock
from airbyte_cdk.models import SyncMode
from airbyte_cdk.sources.streams import Stream
from source_salesforce.source import SourceSalesforce
@@ -20,6 +23,12 @@ def parse_input_config():
return json.loads(file.read())
@pytest.fixture(name="input_sandbox_config")
def parse_input_sandbox_config():
with open(HERE.parent / "secrets/config_sandbox.json", "r") as file:
return json.loads(file.read())
def get_stream(input_config: Mapping[str, Any], stream_name: str) -> Stream:
stream_cls = type("a", (object,), {"name": stream_name})
configured_stream_cls = type("b", (object,), {"stream": stream_cls()})
@@ -42,3 +51,42 @@ def test_not_queryable_stream(caplog, input_config):
# check logs
assert "is not queryable" in caplog.records[-1].message
@pytest.mark.parametrize(
"stream_name,log_messages",
(
(
"Dashboard",
["switch to STANDARD(non-BULK) sync"],
),
# CategoryNode has access limitation thus SF returns failed job statuses
(
"CategoryNode",
["insufficient access rights on cross-reference id", "switch to STANDARD(non-BULK) sync"],
),
),
ids=["successful_switching", "failed_switching"],
)
def test_failed_jobs_with_successful_switching(caplog, input_sandbox_config, stream_name, log_messages):
stream = get_stream(input_sandbox_config, stream_name)
expected_record_ids = set(record["Id"] for record in stream.read_records(sync_mode=SyncMode.full_refresh))
create_query_matcher = re.compile(r"jobs/query$")
job_matcher = re.compile(r"jobs/query/fake_id$")
loaded_record_ids = []
with requests_mock.Mocker(real_http=True) as m:
m.register_uri(
"POST",
create_query_matcher,
json={
"id": "fake_id",
},
)
m.register_uri("GET", job_matcher, json={"state": "Failed", "errorMessage": "unknown error"})
m.register_uri("DELETE", job_matcher, json={})
with caplog.at_level(logging.WARNING):
loaded_record_ids = set(record["Id"] for record in stream.read_records(sync_mode=SyncMode.full_refresh))
for i, log_message in enumerate(log_messages, 1):
assert log_message in caplog.records[-i].message
assert loaded_record_ids == expected_record_ids