.github,tools: clean up dead files (#30011)
This commit is contained in:
62
.github/ISSUE_TEMPLATE/2-issue-platform.yaml
vendored
62
.github/ISSUE_TEMPLATE/2-issue-platform.yaml
vendored
@@ -1,62 +0,0 @@
|
||||
name: 🐛 Report a platform, infra or deployment bug
|
||||
description: Use this template when you have a problem operating Airbyte platform
|
||||
labels: [type/bug, area/platform, needs-triage]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: >
|
||||
<p align="center">
|
||||
<a target="_blank" href="https://airbyte.com">
|
||||

|
||||
</a>
|
||||
</p>
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report...
|
||||
Make sure to update this issue with a concise title and provide all information you have to
|
||||
help us debug the problem together. Issues not following the template will be closed.
|
||||
- type: dropdown
|
||||
id: deploy
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: What method are you using to run Airbyte?
|
||||
multiple: false
|
||||
options:
|
||||
- Docker
|
||||
- Kubernetes
|
||||
- type: input
|
||||
id: platform-version
|
||||
attributes:
|
||||
label: Platform Version or Helm Chart Version
|
||||
description: "Some examples are: (eg. 0.44.1, 0.30.0), you can find the version in the left bottom in Airbyte UI or in the .env / value.yaml file"
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: step
|
||||
attributes:
|
||||
label: What step the error happened?
|
||||
multiple: false
|
||||
options:
|
||||
- On deploy
|
||||
- During the Sync
|
||||
- Upgrading the Platform or Helm Chart
|
||||
- Other
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Revelant information
|
||||
description: Please give any additional information you have and steps to reproduce the problem.
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
Please copy and paste any relevant log output.
|
||||
This will be automatically formatted into code, so no need for backticks.
|
||||
We strongly recommend to upload the log file for further debugging.
|
||||
render: shell
|
||||
23
.github/actions/ci-java-tests/action.yml
vendored
23
.github/actions/ci-java-tests/action.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: "Runner CI Java Tests"
|
||||
description: "Runner CI Java Tests"
|
||||
inputs:
|
||||
module-name:
|
||||
required: true
|
||||
module-folder:
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: "Build"
|
||||
shell: bash
|
||||
run: |
|
||||
rm -rf ${{ inputs.module-folder }}/.venv ${{ inputs.module-folder }}/build
|
||||
ROOT_DIR=$(git rev-parse --show-toplevel)
|
||||
ARG=:$(python -c "import os; print(os.path.relpath('${{ inputs.module-folder }}', start='${ROOT_DIR}').replace('/', ':') )")
|
||||
echo "./gradlew --no-daemon $ARG:build"
|
||||
./gradlew --no-daemon "$ARG:clean"
|
||||
./gradlew --no-daemon "$ARG:build"
|
||||
|
||||
|
||||
|
||||
54
.github/actions/ci-py-tests/action.yml
vendored
54
.github/actions/ci-py-tests/action.yml
vendored
@@ -1,54 +0,0 @@
|
||||
name: "Runner CI Python Tests"
|
||||
description: "Runner CI Python Tests"
|
||||
inputs:
|
||||
module-name:
|
||||
required: true
|
||||
module-folder:
|
||||
required: true
|
||||
outputs:
|
||||
coverage-paths:
|
||||
description: "Coverage Paths"
|
||||
value: ${{ steps.build-coverage-reports.outputs.coverage-paths }}
|
||||
flake8-logs:
|
||||
description: "Flake8 Logs"
|
||||
value: ${{ steps.build-linter-reports.outputs.flake8-logs }}
|
||||
mypy-logs:
|
||||
description: "MyPy Logs"
|
||||
value: ${{ steps.build-linter-reports.outputs.mypy-logs }}
|
||||
black-diff:
|
||||
description: "Black Diff"
|
||||
value: ${{ steps.build-linter-reports.outputs.black-diff }}
|
||||
isort-diff:
|
||||
description: "Isort Diff"
|
||||
value: ${{ steps.build-linter-reports.outputs.isort-diff }}
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Build Coverage Reports
|
||||
id: build-coverage-reports
|
||||
shell: bash
|
||||
run: |
|
||||
GRADLE_JOB=$(source ./tools/lib/lib.sh; full_path_to_gradle_path ${{ inputs.module-folder }} "unitTest")
|
||||
REPORT_FOLDER="${{ inputs.module-folder }}/coverage/"
|
||||
./gradlew --no-daemon -Preports_folder=${REPORT_FOLDER} ${GRADLE_JOB}
|
||||
|
||||
echo "coverage-paths=coverage/coverage.xml" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
file: ${{ steps.build-coverage-reports.outputs.coverage-paths }}
|
||||
name: "UnitTests of ${{ inputs.module-name }}"
|
||||
|
||||
- name: Build Linter Reports
|
||||
id: build-linter-reports
|
||||
shell: bash
|
||||
run: |
|
||||
GRADLE_JOB=$(source ./tools/lib/lib.sh; full_path_to_gradle_path ${{ inputs.module-folder }} "airbytePythonReport")
|
||||
REPORT_FOLDER="${{ inputs.module-folder }}/reports/"
|
||||
./gradlew --no-daemon -Preports_folder=${REPORT_FOLDER} ${GRADLE_JOB}
|
||||
|
||||
echo "mypy-logs=reports/mypy.log" >> $GITHUB_OUTPUT
|
||||
echo "black-diff=reports/black.diff" >> $GITHUB_OUTPUT
|
||||
echo "isort-diff=reports/isort.diff" >> $GITHUB_OUTPUT
|
||||
echo "flake8-logs=reports/flake.txt" >> $GITHUB_OUTPUT
|
||||
34
.github/labeler.yml
vendored
34
.github/labeler.yml
vendored
@@ -1,36 +1,8 @@
|
||||
# union of frontend, api, server, scheduler, protocol, worker, kubernetes
|
||||
area/platform:
|
||||
- airbyte-api/*
|
||||
- airbyte-api/**/*
|
||||
- airbyte-persistence/*
|
||||
- airbyte-persistence/**/*
|
||||
- airbyte-server/*
|
||||
- airbyte-server/**/*
|
||||
- airbyte-workers/*
|
||||
- airbyte-workers/**/*
|
||||
- kube/*
|
||||
- kube/**/*
|
||||
- charts/*
|
||||
- charts/**/*
|
||||
|
||||
# Union of api, connectors, documentation, octavia-cli, CDK and normalization.
|
||||
area/api:
|
||||
- airbyte-api/*
|
||||
- airbyte-api/**/*
|
||||
|
||||
area/server:
|
||||
- airbyte-server/*
|
||||
- airbyte-server/**/*
|
||||
|
||||
area/worker:
|
||||
- airbyte-workers/*
|
||||
- airbyte-workers/**/*
|
||||
|
||||
kubernetes:
|
||||
- kube/*
|
||||
- kube/**/*
|
||||
- charts/*
|
||||
- charts/**/*
|
||||
|
||||
area/connectors:
|
||||
- airbyte-integrations/connectors/*
|
||||
- airbyte-integrations/connectors/**/*
|
||||
@@ -39,6 +11,10 @@ area/documentation:
|
||||
- docs/*
|
||||
- docs/**/*
|
||||
|
||||
area/octavia-cli:
|
||||
- octavia-cli/*
|
||||
- octavia-cli/**/*
|
||||
|
||||
CDK:
|
||||
- airbyte-cdk/*
|
||||
- airbyte-cdk/**/*
|
||||
|
||||
5
.github/workflows/slash-commands.yml
vendored
5
.github/workflows/slash-commands.yml
vendored
@@ -25,13 +25,8 @@ jobs:
|
||||
test
|
||||
legacy-test
|
||||
test-performance
|
||||
build-connector
|
||||
publish-connector
|
||||
publish
|
||||
legacy-publish
|
||||
publish-external
|
||||
gke-kube-test
|
||||
run-specific-test
|
||||
connector-performance
|
||||
static-args: |
|
||||
repo=${{ steps.getref.outputs.repo }}
|
||||
|
||||
368
CONTRIBUTORS.md
368
CONTRIBUTORS.md
@@ -1,138 +1,420 @@
|
||||
# Contributors
|
||||
|
||||
* [69mb](https://github.com/69mb)
|
||||
* [addack](https://github.com/addack)
|
||||
* [a-honcharenko](https://github.com/a-honcharenko)
|
||||
* [aadityasinha-dotcom](https://github.com/aadityasinha-dotcom)
|
||||
* [aaronsteers](https://github.com/aaronsteers)
|
||||
* [aazam-gh](https://github.com/aazam-gh)
|
||||
* [abaerptc](https://github.com/abaerptc)
|
||||
* [aballiet](https://github.com/aballiet)
|
||||
* [achaussende](https://github.com/achaussende)
|
||||
* [ad-m](https://github.com/ad-m)
|
||||
* [adam-bloom](https://github.com/adam-bloom)
|
||||
* [adamf](https://github.com/adamf)
|
||||
* [adamschmidt](https://github.com/adamschmidt)
|
||||
* [AetherUnbound](https://github.com/AetherUnbound)
|
||||
* [afranzi](https://github.com/afranzi)
|
||||
* [agrass](https://github.com/agrass)
|
||||
* [ahmed-buksh](https://github.com/ahmed-buksh)
|
||||
* [airbyte-jenny](https://github.com/airbyte-jenny)
|
||||
* [ajmhatch](https://github.com/ajmhatch)
|
||||
* [ajzo90](https://github.com/ajzo90)
|
||||
* [akashkulk](https://github.com/akashkulk)
|
||||
* [akulgoel96](https://github.com/akulgoel96)
|
||||
* [alafanechere](https://github.com/alafanechere)
|
||||
* [alallema](https://github.com/alallema)
|
||||
* [albert-marrero](https://github.com/albert-marrero)
|
||||
* [alex-danilin](https://github.com/alex-danilin)
|
||||
* [alex-gron](https://github.com/alex-gron)
|
||||
* [alexander-marquardt](https://github.com/alexander-marquardt)
|
||||
* [AlexanderBatoulis](https://github.com/AlexanderBatoulis)
|
||||
* [alexandertsukanov](https://github.com/alexandertsukanov)
|
||||
* [alexandr-shegeda](https://github.com/alexandr-shegeda)
|
||||
* [alexchouraki](https://github.com/alexchouraki)
|
||||
* [AlexJameson](https://github.com/AlexJameson)
|
||||
* [alexnikitchuk](https://github.com/alexnikitchuk)
|
||||
* [Alihassanc5](https://github.com/Alihassanc5)
|
||||
* [Allexik](https://github.com/Allexik)
|
||||
* [alovew](https://github.com/alovew)
|
||||
* [AM-I-Human](https://github.com/AM-I-Human)
|
||||
* [amaliaroye](https://github.com/amaliaroye)
|
||||
* [ambirdsall](https://github.com/ambirdsall)
|
||||
* [aminamos](https://github.com/aminamos)
|
||||
* [amitku](https://github.com/amitku)
|
||||
* [Amruta-Ranade](https://github.com/Amruta-Ranade)
|
||||
* [anamargaridarl](https://github.com/anamargaridarl)
|
||||
* [andnig](https://github.com/andnig)
|
||||
* [andresbravog](https://github.com/andresbravog)
|
||||
* [andrewlreeve](https://github.com/andrewlreeve)
|
||||
* [andreyAtBB](https://github.com/andreyAtBB)
|
||||
* [andriikorotkov](https://github.com/andriikorotkov)
|
||||
* [andrzejdackiewicz](https://github.com/andrzejdackiewicz)
|
||||
* [andyjih](https://github.com/andyjih)
|
||||
* [AndyTwiss](https://github.com/AndyTwiss)
|
||||
* [animer3009](https://github.com/animer3009)
|
||||
* [anna-geller](https://github.com/anna-geller)
|
||||
* [annalvova05](https://github.com/annalvova05)
|
||||
* [antixar](https://github.com/antixar)
|
||||
* [antonioneto-hotmart](https://github.com/antonioneto-hotmart)
|
||||
* [anujgupta0711](https://github.com/anujgupta0711)
|
||||
* [Anurag870](https://github.com/Anurag870)
|
||||
* [anushree-agrawal](https://github.com/anushree-agrawal)
|
||||
* [apostoltego](https://github.com/apostoltego)
|
||||
* [archangelic](https://github.com/archangelic)
|
||||
* [arimbr](https://github.com/arimbr)
|
||||
* [arnaudjnn](https://github.com/arnaudjnn)
|
||||
* [ArneZsng](https://github.com/ArneZsng)
|
||||
* [arsenlosenko](https://github.com/arsenlosenko)
|
||||
* [artem1205](https://github.com/artem1205)
|
||||
* [artusiep](https://github.com/artusiep)
|
||||
* [asafepy](https://github.com/asafepy)
|
||||
* [asyarif93](https://github.com/asyarif93)
|
||||
* [augan-rymkhan](https://github.com/augan-rymkhan)
|
||||
* [Auric-Manteo](https://github.com/Auric-Manteo)
|
||||
* [avaidyanatha](https://github.com/avaidyanatha)
|
||||
* [avida](https://github.com/avida)
|
||||
* [avirajsingh7](https://github.com/avirajsingh7)
|
||||
* [axaysagathiya](https://github.com/axaysagathiya)
|
||||
* [azhard](https://github.com/azhard)
|
||||
* [b4stien](https://github.com/b4stien)
|
||||
* [bala-ceg](https://github.com/bala-ceg)
|
||||
* [bazarnov](https://github.com/bazarnov)
|
||||
* [bbugh](https://github.com/bbugh)
|
||||
* [bcbeidel](https://github.com/bcbeidel)
|
||||
* [bdashrad](https://github.com/bdashrad)
|
||||
* [benmoriceau](https://github.com/benmoriceau)
|
||||
* [BenoitFayolle](https://github.com/BenoitFayolle)
|
||||
* [BenoitHugonnard](https://github.com/BenoitHugonnard)
|
||||
* [bgroff](https://github.com/bgroff)
|
||||
* [Bhupesh-V](https://github.com/Bhupesh-V)
|
||||
* [BirdboyBolu](https://github.com/BirdboyBolu)
|
||||
* [bjgbeelen](https://github.com/bjgbeelen)
|
||||
* [bkrausz](https://github.com/bkrausz)
|
||||
* [bleonard](https://github.com/bleonard)
|
||||
* [bnchrch](https://github.com/bnchrch)
|
||||
* [bobvanluijt](https://github.com/bobvanluijt)
|
||||
* [brebuanirello-equinix](https://github.com/brebuanirello-equinix)
|
||||
* [BrentSouza](https://github.com/BrentSouza)
|
||||
* [brianjlai](https://github.com/brianjlai)
|
||||
* [brunofaustino](https://github.com/brunofaustino)
|
||||
* [bstrawson](https://github.com/bstrawson)
|
||||
* [btkcodedev](https://github.com/btkcodedev)
|
||||
* [burmecia](https://github.com/burmecia)
|
||||
* [bzAmin](https://github.com/bzAmin)
|
||||
* [calebfornari](https://github.com/calebfornari)
|
||||
* [cameronwtaylor](https://github.com/cameronwtaylor)
|
||||
* [camro](https://github.com/camro)
|
||||
* [carlkibler](https://github.com/carlkibler)
|
||||
* [carlonuccio](https://github.com/carlonuccio)
|
||||
* [catpineapple](https://github.com/catpineapple)
|
||||
* [cgardens](https://github.com/cgardens)
|
||||
* [chandrasekharan98](https://github.com/chandrasekharan98)
|
||||
* [ChristoGrab](https://github.com/ChristoGrab)
|
||||
* [ChristopheDuong](https://github.com/ChristopheDuong)
|
||||
* [ciancullinan](https://github.com/ciancullinan)
|
||||
* [cirdes](https://github.com/cirdes)
|
||||
* [cjwooo](https://github.com/cjwooo)
|
||||
* [clnoll](https://github.com/clnoll)
|
||||
* [cobobrien](https://github.com/cobobrien)
|
||||
* [coetzeevs](https://github.com/coetzeevs)
|
||||
* [coeurdestenebres](https://github.com/coeurdestenebres)
|
||||
* [colesnodgrass](https://github.com/colesnodgrass)
|
||||
* [collinscangarella](https://github.com/collinscangarella)
|
||||
* [cpdeethree](https://github.com/cpdeethree)
|
||||
* [CrafterKolyan](https://github.com/CrafterKolyan)
|
||||
* [cstruct](https://github.com/cstruct)
|
||||
* [ct-martin](https://github.com/ct-martin)
|
||||
* [cuyk](https://github.com/cuyk)
|
||||
* [cynthiaxyin](https://github.com/cynthiaxyin)
|
||||
* [CyprienBarbault](https://github.com/CyprienBarbault)
|
||||
* [czuares](https://github.com/czuares)
|
||||
* [Daemonxiao](https://github.com/Daemonxiao)
|
||||
* [dainiussa](https://github.com/dainiussa)
|
||||
* [dalo390](https://github.com/dalo390)
|
||||
* [damianlegawiec](https://github.com/damianlegawiec)
|
||||
* [dandpz](https://github.com/dandpz)
|
||||
* [daniel-cortez-stevenson](https://github.com/daniel-cortez-stevenson)
|
||||
* [danieldiamond](https://github.com/danieldiamond)
|
||||
* [Danucas](https://github.com/Danucas)
|
||||
* [danvass](https://github.com/danvass)
|
||||
* [darian-heede](https://github.com/darian-heede)
|
||||
* [darynaishchenko](https://github.com/darynaishchenko)
|
||||
* [DavidSpek](https://github.com/DavidSpek)
|
||||
* [davinchia](https://github.com/davinchia)
|
||||
* [davydov-d](https://github.com/davydov-d)
|
||||
* [dbyzero](https://github.com/dbyzero)
|
||||
* [ddoyediran](https://github.com/ddoyediran)
|
||||
* [deepansh96](https://github.com/deepansh96)
|
||||
* [delenamalan](https://github.com/delenamalan)
|
||||
* [denis-sokolov](https://github.com/denis-sokolov)
|
||||
* [dependabot[bot]](https://github.com/apps/dependabot)
|
||||
* [dictcp](https://github.com/dictcp)
|
||||
* [didistars328](https://github.com/didistars328)
|
||||
* [digambar-t7](https://github.com/digambar-t7)
|
||||
* [dijonkitchen](https://github.com/dijonkitchen)
|
||||
* [dizel852](https://github.com/dizel852)
|
||||
* [dmateusp](https://github.com/dmateusp)
|
||||
* [DominusKelvin](https://github.com/DominusKelvin)
|
||||
* [domzae](https://github.com/domzae)
|
||||
* [DoNotPanicUA](https://github.com/DoNotPanicUA)
|
||||
* [Dracyr](https://github.com/Dracyr)
|
||||
* [drrest](https://github.com/drrest)
|
||||
* [dtt101](https://github.com/dtt101)
|
||||
* [edbizarro](https://github.com/edbizarro)
|
||||
* [edgao](https://github.com/edgao)
|
||||
* [edmundito](https://github.com/edmundito)
|
||||
* [efimmatytsin](https://github.com/efimmatytsin)
|
||||
* [eliziario](https://github.com/eliziario)
|
||||
* [elliottrabac](https://github.com/elliottrabac)
|
||||
* [emmaling27](https://github.com/emmaling27)
|
||||
* [erica-airbyte](https://github.com/erica-airbyte)
|
||||
* [erohmensing](https://github.com/erohmensing)
|
||||
* [etsybaev](https://github.com/etsybaev)
|
||||
* [eugene-kulak](https://github.com/eugene-kulak)
|
||||
* [evantahler](https://github.com/evantahler)
|
||||
* [ffabss](https://github.com/ffabss)
|
||||
* [flash1293](https://github.com/flash1293)
|
||||
* [franviera92](https://github.com/franviera92)
|
||||
* [freimer](https://github.com/freimer)
|
||||
* [FUT](https://github.com/FUT)
|
||||
* [gaart](https://github.com/gaart)
|
||||
* [gasparakos](https://github.com/gasparakos)
|
||||
* [geekwhocodes](https://github.com/geekwhocodes)
|
||||
* [gingeard](https://github.com/gingeard)
|
||||
* [ganpatagarwal](https://github.com/ganpatagarwal)
|
||||
* [gargatuma](https://github.com/gargatuma)
|
||||
* [gergelylendvai](https://github.com/gergelylendvai)
|
||||
* [girarda](https://github.com/girarda)
|
||||
* [git-phu](https://github.com/git-phu)
|
||||
* [github-actions[bot]](https://github.com/apps/github-actions)
|
||||
* [Gitznik](https://github.com/Gitznik)
|
||||
* [gordalina](https://github.com/gordalina)
|
||||
* [gosusnp](https://github.com/gosusnp)
|
||||
* [grebessi](https://github.com/grebessi)
|
||||
* [grishick](https://github.com/grishick)
|
||||
* [grubberr](https://github.com/grubberr)
|
||||
* [gvillafanetapia](https://github.com/gvillafanetapia)
|
||||
* [h7kanna](https://github.com/h7kanna)
|
||||
* [haliva-firmbase](https://github.com/haliva-firmbase)
|
||||
* [haithem-souala](https://github.com/haithem-souala)
|
||||
* [haoranyu](https://github.com/haoranyu)
|
||||
* [harshithmullapudi](https://github.com/harshithmullapudi)
|
||||
* [heade](https://github.com/heade)
|
||||
* [hehex9](https://github.com/hehex9)
|
||||
* [helderco](https://github.com/helderco)
|
||||
* [henriblancke](https://github.com/henriblancke)
|
||||
* [Hesperide](https://github.com/Hesperide)
|
||||
* [hillairet](https://github.com/hillairet)
|
||||
* [himanshuc3](https://github.com/himanshuc3)
|
||||
* [hntan](https://github.com/hntan)
|
||||
* [htrueman](https://github.com/htrueman)
|
||||
* [hudsondba](https://github.com/hudsondba)
|
||||
* [hydrosquall](https://github.com/hydrosquall)
|
||||
* [iberchid](https://github.com/iberchid)
|
||||
* [igrankova](https://github.com/igrankova)
|
||||
* [igsaf2](https://github.com/igsaf2)
|
||||
* [Imbruced](https://github.com/Imbruced)
|
||||
* [irynakruk](https://github.com/irynakruk)
|
||||
* [isaacharrisholt](https://github.com/isaacharrisholt)
|
||||
* [isalikov](https://github.com/isalikov)
|
||||
* [itaiad200](https://github.com/itaiad200)
|
||||
* [itaseskii](https://github.com/itaseskii)
|
||||
* [jacqueskpoty](https://github.com/jacqueskpoty)
|
||||
* [jaimefr](https://github.com/jaimefr)
|
||||
* [Jamakase](https://github.com/Jamakase)
|
||||
* [Janardhanpoola](https://github.com/Janardhanpoola)
|
||||
* [jinnig](https://github.com/jinnig)
|
||||
* [Jagrutiti](https://github.com/Jagrutiti)
|
||||
* [jamakase](https://github.com/jamakase)
|
||||
* [jartek](https://github.com/jartek)
|
||||
* [jbfbell](https://github.com/jbfbell)
|
||||
* [jcowanpdx](https://github.com/jcowanpdx)
|
||||
* [jdclarke5](https://github.com/jdclarke5)
|
||||
* [jdpgrailsdev](https://github.com/jdpgrailsdev)
|
||||
* [jeremySrgt](https://github.com/jeremySrgt)
|
||||
* [jhajajaas](https://github.com/jhajajaas)
|
||||
* [jhammarstedt](https://github.com/jhammarstedt)
|
||||
* [jnr0790](https://github.com/jnr0790)
|
||||
* [joelluijmes](https://github.com/joelluijmes)
|
||||
* [johnlafleur](https://github.com/johnlafleur)
|
||||
* [jonathan-duval](https://github.com/jonathan-duval)
|
||||
* [JonsSpaghetti](https://github.com/JonsSpaghetti)
|
||||
* [jonstacks](https://github.com/jonstacks)
|
||||
* [jordan-glitch](https://github.com/jordan-glitch)
|
||||
* [josephkmh](https://github.com/josephkmh)
|
||||
* [jrhizor](https://github.com/jrhizor)
|
||||
* [juliachvyrova](https://github.com/juliachvyrova)
|
||||
* [JulianRommel](https://github.com/JulianRommel)
|
||||
* [juliatournant](https://github.com/juliatournant)
|
||||
* [justinbchau](https://github.com/justinbchau)
|
||||
* [juweins](https://github.com/juweins)
|
||||
* [jzcruiser](https://github.com/jzcruiser)
|
||||
* [kaklakariada](https://github.com/kaklakariada)
|
||||
* [karinakuz](https://github.com/karinakuz)
|
||||
* [kattos-aws](https://github.com/kattos-aws)
|
||||
* [KayakinKoder](https://github.com/KayakinKoder)
|
||||
* [keu](https://github.com/keu)
|
||||
* [kgrover](https://github.com/kgrover)
|
||||
* [kimerinn](https://github.com/kimerinn)
|
||||
* [koconder](https://github.com/koconder)
|
||||
* [koji-m](https://github.com/koji-m)
|
||||
* [krishnaglick](https://github.com/krishnaglick)
|
||||
* [krisjan-oldekamp](https://github.com/krisjan-oldekamp)
|
||||
* [ksengers](https://github.com/ksengers)
|
||||
* [kzzzr](https://github.com/kzzzr)
|
||||
* [lazebnyi](https://github.com/lazebnyi)
|
||||
* [leo-schick](https://github.com/leo-schick)
|
||||
* [letiescanciano](https://github.com/letiescanciano)
|
||||
* [lgomezm](https://github.com/lgomezm)
|
||||
* [lideke](https://github.com/lideke)
|
||||
* [lizdeika](https://github.com/lizdeika)
|
||||
* [lmeyerov](https://github.com/lmeyerov)
|
||||
* [luizgribeiro](https://github.com/luizgribeiro)
|
||||
* [m-ronchi](https://github.com/m-ronchi)
|
||||
* [lmossman](https://github.com/lmossman)
|
||||
* [maciej-nedza](https://github.com/maciej-nedza)
|
||||
* [macmv](https://github.com/macmv)
|
||||
* [Mainara](https://github.com/Mainara)
|
||||
* [makalaaneesh](https://github.com/makalaaneesh)
|
||||
* [manavkohli](https://github.com/manavkohli)
|
||||
* [makyash](https://github.com/makyash)
|
||||
* [malikdiarra](https://github.com/malikdiarra)
|
||||
* [marcelopio](https://github.com/marcelopio)
|
||||
* [marcosmarxm](https://github.com/marcosmarxm)
|
||||
* [mariamthiam](https://github.com/mariamthiam)
|
||||
* [masonwheeler](https://github.com/masonwheeler)
|
||||
* [MatheusdiPaula](https://github.com/MatheusdiPaula)
|
||||
* [masyagin1998](https://github.com/masyagin1998)
|
||||
* [matter-q](https://github.com/matter-q)
|
||||
* [maxi297](https://github.com/maxi297)
|
||||
* [MaxKrog](https://github.com/MaxKrog)
|
||||
* [MaxwellJK](https://github.com/MaxwellJK)
|
||||
* [mbbroberg](https://github.com/mbbroberg)
|
||||
* [mhamas](https://github.com/mhamas)
|
||||
* [mdibaiee](https://github.com/mdibaiee)
|
||||
* [mfsiega-airbyte](https://github.com/mfsiega-airbyte)
|
||||
* [michaelnguyen26](https://github.com/michaelnguyen26)
|
||||
* [michel-tricot](https://github.com/michel-tricot)
|
||||
* [mickaelandrieu](https://github.com/mickaelandrieu)
|
||||
* [midavadim](https://github.com/midavadim)
|
||||
* [mid](https://github.com/mid)
|
||||
* [mildbyte](https://github.com/mildbyte)
|
||||
* [minimax75](https://github.com/minimax75)
|
||||
* [mjirv](https://github.com/mjirv)
|
||||
* [misteryeo](https://github.com/misteryeo)
|
||||
* [mkhokh-33](https://github.com/mkhokh-33)
|
||||
* [mlavoie-sm360](https://github.com/mlavoie-sm360)
|
||||
* [mmolimar](https://github.com/mmolimar)
|
||||
* [MohamadHaziq](https://github.com/MohamadHaziq)
|
||||
* [mohammad-bolt](https://github.com/mohammad-bolt)
|
||||
* [moszutij](https://github.com/moszutij)
|
||||
* [mohamagdy](https://github.com/mohamagdy)
|
||||
* [mohitreddy1996](https://github.com/mohitreddy1996)
|
||||
* [monai](https://github.com/monai)
|
||||
* [mrhallak](https://github.com/mrhallak)
|
||||
* [Muriloo](https://github.com/Muriloo)
|
||||
* [muutech](https://github.com/muutech)
|
||||
* [nclsbayona](https://github.com/nclsbayona)
|
||||
* [nicholasbull](https://github.com/nicholasbull)
|
||||
* [mustangJaro](https://github.com/mustangJaro)
|
||||
* [Mykyta-Serbynevskyi](https://github.com/Mykyta-Serbynevskyi)
|
||||
* [n0rritt](https://github.com/n0rritt)
|
||||
* [nastra](https://github.com/nastra)
|
||||
* [nataliekwong](https://github.com/nataliekwong)
|
||||
* [natalyjazzviolin](https://github.com/natalyjazzviolin)
|
||||
* [nauxliu](https://github.com/nauxliu)
|
||||
* [nguyenaiden](https://github.com/nguyenaiden)
|
||||
* [NipunaPrashan](https://github.com/NipunaPrashan)
|
||||
* [Nmaxime](https://github.com/Nmaxime)
|
||||
* [noahkawasaki-airbyte](https://github.com/noahkawasaki-airbyte)
|
||||
* [noahkawasakigoogle](https://github.com/noahkawasakigoogle)
|
||||
* [novotl](https://github.com/novotl)
|
||||
* [ntucker](https://github.com/ntucker)
|
||||
* [numphileo](https://github.com/numphileo)
|
||||
* [nyergler](https://github.com/nyergler)
|
||||
* [octavia-squidington-iii](https://github.com/octavia-squidington-iii)
|
||||
* [olivermeyer](https://github.com/olivermeyer)
|
||||
* [omid](https://github.com/omid)
|
||||
* [oreopot](https://github.com/oreopot)
|
||||
* [pabloescoder](https://github.com/pabloescoder)
|
||||
* [panhavad](https://github.com/panhavad)
|
||||
* [pecalleja](https://github.com/pecalleja)
|
||||
* [pedroslopez](https://github.com/pedroslopez)
|
||||
* [perangel](https://github.com/perangel)
|
||||
* [peter279k](https://github.com/peter279k)
|
||||
* [PhilipCorr](https://github.com/PhilipCorr)
|
||||
* [philippeboyd](https://github.com/philippeboyd)
|
||||
* [Phlair](https://github.com/Phlair)
|
||||
* [pmossman](https://github.com/pmossman)
|
||||
* [po3na4skld](https://github.com/po3na4skld)
|
||||
* [ppatali](https://github.com/ppatali)
|
||||
* [PoCTo](https://github.com/PoCTo)
|
||||
* [postamar](https://github.com/postamar)
|
||||
* [prasrvenkat](https://github.com/prasrvenkat)
|
||||
* [rclmenezes](https://github.com/rclmenezes)
|
||||
* [prateekmukhedkar](https://github.com/prateekmukhedkar)
|
||||
* [proprefenetre](https://github.com/proprefenetre)
|
||||
* [Pwaldi](https://github.com/Pwaldi)
|
||||
* [rach-r](https://github.com/rach-r)
|
||||
* [ramonvermeulen](https://github.com/ramonvermeulen)
|
||||
* [ReptilianBrain](https://github.com/ReptilianBrain)
|
||||
* [roshan](https://github.com/roshan)
|
||||
* [rileybrook](https://github.com/rileybrook)
|
||||
* [RobertoBonnet](https://github.com/RobertoBonnet)
|
||||
* [robgleason](https://github.com/robgleason)
|
||||
* [RobLucchi](https://github.com/RobLucchi)
|
||||
* [rodireich](https://github.com/rodireich)
|
||||
* [roisinbolt](https://github.com/roisinbolt)
|
||||
* [roman-romanov-o](https://github.com/roman-romanov-o)
|
||||
* [roman-yermilov-gl](https://github.com/roman-yermilov-gl)
|
||||
* [ron-damon](https://github.com/ron-damon)
|
||||
* [rparrapy](https://github.com/rparrapy)
|
||||
* [sabifranjo](https://github.com/sabifranjo)
|
||||
* [ryankfu](https://github.com/ryankfu)
|
||||
* [sajarin](https://github.com/sajarin)
|
||||
* [samos123](https://github.com/samos123)
|
||||
* [sarafonseca-123](https://github.com/sarafonseca-123)
|
||||
* [sashaNeshcheret](https://github.com/sashaNeshcheret)
|
||||
* [SatishChGit](https://github.com/SatishChGit)
|
||||
* [sbjorn](https://github.com/sbjorn)
|
||||
* [schlattk](https://github.com/schlattk)
|
||||
* [scottleechua](https://github.com/scottleechua)
|
||||
* [sdairs](https://github.com/sdairs)
|
||||
* [sergei-solonitcyn](https://github.com/sergei-solonitcyn)
|
||||
* [sergio-ropero](https://github.com/sergio-ropero)
|
||||
* [sh4sh](https://github.com/sh4sh)
|
||||
* [shadabshaukat](https://github.com/shadabshaukat)
|
||||
* [sherifnada](https://github.com/sherifnada)
|
||||
* [subhaklp](https://github.com/subhaklp)
|
||||
* [Shishir-rmv](https://github.com/Shishir-rmv)
|
||||
* [shrodingers](https://github.com/shrodingers)
|
||||
* [shyngysnurzhan](https://github.com/shyngysnurzhan)
|
||||
* [siddhant3030](https://github.com/siddhant3030)
|
||||
* [sivankumar86](https://github.com/sivankumar86)
|
||||
* [snyk-bot](https://github.com/snyk-bot)
|
||||
* [SofiiaZaitseva](https://github.com/SofiiaZaitseva)
|
||||
* [sophia-wiley](https://github.com/sophia-wiley)
|
||||
* [SPTKL](https://github.com/SPTKL)
|
||||
* [subhamX](https://github.com/subhamX)
|
||||
* [subodh1810](https://github.com/subodh1810)
|
||||
* [tgiardina](https://github.com/tgiardina)
|
||||
* [suhomud](https://github.com/suhomud)
|
||||
* [supertopher](https://github.com/supertopher)
|
||||
* [swyxio](https://github.com/swyxio)
|
||||
* [tbcdns](https://github.com/tbcdns)
|
||||
* [tealjulia](https://github.com/tealjulia)
|
||||
* [terencecho](https://github.com/terencecho)
|
||||
* [thanhlmm](https://github.com/thanhlmm)
|
||||
* [thomas-vl](https://github.com/thomas-vl)
|
||||
* [troyharvey](https://github.com/troyharvey)
|
||||
* [timroes](https://github.com/timroes)
|
||||
* [tirth7777777](https://github.com/tirth7777777)
|
||||
* [tjirab](https://github.com/tjirab)
|
||||
* [tkorenko](https://github.com/tkorenko)
|
||||
* [tolik0](https://github.com/tolik0)
|
||||
* [topefolorunso](https://github.com/topefolorunso)
|
||||
* [trowacat](https://github.com/trowacat)
|
||||
* [tryangul](https://github.com/tryangul)
|
||||
* [TSkrebe](https://github.com/TSkrebe)
|
||||
* [tuanchris](https://github.com/tuanchris)
|
||||
* [tuliren](https://github.com/tuliren)
|
||||
* [tyagi-data-wizard](https://github.com/tyagi-data-wizard)
|
||||
* [tybernstein](https://github.com/tybernstein)
|
||||
* [TymoshokDmytro](https://github.com/TymoshokDmytro)
|
||||
* [tyschroed](https://github.com/tyschroed)
|
||||
* [varunbpatil](https://github.com/varunbpatil)
|
||||
* [vinhloc30796](https://github.com/vinhloc30796)
|
||||
* [ufou](https://github.com/ufou)
|
||||
* [Upmitt](https://github.com/Upmitt)
|
||||
* [VitaliiMaltsev](https://github.com/VitaliiMaltsev)
|
||||
* [vitaliizazmic](https://github.com/vitaliizazmic)
|
||||
* [vladimir-remar](https://github.com/vladimir-remar)
|
||||
* [vovavovavovavova](https://github.com/vovavovavovavova)
|
||||
* [vsayer](https://github.com/vsayer)
|
||||
* [wallies](https://github.com/wallies)
|
||||
* [winar-jin](https://github.com/winar-jin)
|
||||
* [wissevrowl](https://github.com/wissevrowl)
|
||||
* [Wittiest](https://github.com/Wittiest)
|
||||
* [wjwatkinson](https://github.com/wjwatkinson)
|
||||
* [Xabilahu](https://github.com/Xabilahu)
|
||||
* [xiaohansong](https://github.com/xiaohansong)
|
||||
* [xpuska513](https://github.com/xpuska513)
|
||||
* [yahu98](https://github.com/yahu98)
|
||||
* [yannibenoit](https://github.com/yannibenoit)
|
||||
* [yaroslav-dudar](https://github.com/yaroslav-dudar)
|
||||
* [yaroslav-hrytsaienko](https://github.com/yaroslav-hrytsaienko)
|
||||
* [YatsukBogdan1](https://github.com/YatsukBogdan1)
|
||||
* [ycherniaiev](https://github.com/ycherniaiev)
|
||||
* [yevhenii-ldv](https://github.com/yevhenii-ldv)
|
||||
* [YiyangLi](https://github.com/YiyangLi)
|
||||
* [YowanR](https://github.com/YowanR)
|
||||
* [yuhuishi-convect](https://github.com/yuhuishi-convect)
|
||||
* [yurii-bidiuk](https://github.com/yurii-bidiuk)
|
||||
* [Zawar92](https://github.com/Zawar92)
|
||||
* [zestyping](https://github.com/zestyping)
|
||||
* [Zirochkaa](https://github.com/Zirochkaa)
|
||||
* [zkid18](https://github.com/zkid18)
|
||||
* [zuc](https://github.com/zuc)
|
||||
* [zzstoatzz](https://github.com/zzstoatzz)
|
||||
* [zzztimbo](https://github.com/zzztimbo)
|
||||
|
||||
```shell
|
||||
p=1;
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
. tools/lib/lib.sh
|
||||
|
||||
assert_root
|
||||
|
||||
## Helper functions
|
||||
|
||||
get_epoch_time() {
|
||||
date +'%s'
|
||||
}
|
||||
|
||||
check_success() {
|
||||
docker compose ps --all | grep "^$1" | grep -ie 'exit 0' -ie 'exited (0)' >/dev/null || (echo "$1 didn't run successfully"; exit 1)
|
||||
}
|
||||
|
||||
##
|
||||
|
||||
echo "Starting app..."
|
||||
|
||||
# Detach so we can run subsequent commands
|
||||
# NOTE: this passes APPLY_FIELD_SELECTION=true, which enables a feature -- field selection -- which is currently disabled by default.
|
||||
# We want to run our CI tests against the new feature while we prepare to release it.
|
||||
VERSION=dev TRACKING_STRATEGY=logging USE_STREAM_CAPABLE_STATE=true BASIC_AUTH_USERNAME="" BASIC_AUTH_PASSWORD="" APPLY_FIELD_SELECTION=true docker compose -f docker-compose.yaml -f docker-compose.acceptance-test.yaml up -d
|
||||
|
||||
# Sometimes source/dest containers using airbyte volumes survive shutdown, which need to be killed in order to shut down properly.
|
||||
shutdown_cmd="docker compose down -v || docker kill \$(docker ps -a -f volume=airbyte_workspace -f volume=airbyte_data -f volume=airbyte_db -q) && docker compose down -v"
|
||||
# Uncomment for debugging. Warning, this is verbose.
|
||||
# trap "echo 'docker compose logs:' && docker compose logs -t --tail 1000 && $shutdown_cmd" EXIT
|
||||
|
||||
echo "Waiting for services to begin"
|
||||
starttime=`get_epoch_time`
|
||||
maxtime=300
|
||||
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' localhost:8000/api/v1/health)" != "200" ]];
|
||||
do
|
||||
echo "Waiting for docker deployment.."
|
||||
currenttime=`get_epoch_time`
|
||||
if [[ $(( $currenttime - $starttime )) -gt $maxtime ]]; then
|
||||
docker compose ps
|
||||
echo "Platform is taking more than ${maxtime}s to start. Aborting..."
|
||||
exit 1
|
||||
fi
|
||||
sleep 5
|
||||
done
|
||||
|
||||
# Getting a snapshot of the docker compose state
|
||||
docker compose ps
|
||||
|
||||
# Make sure init containers ran successfully
|
||||
check_success 'init'
|
||||
check_success 'airbyte-bootloader'
|
||||
|
||||
echo "Running e2e tests via gradle"
|
||||
SUB_BUILD=PLATFORM USE_EXTERNAL_DEPLOYMENT=true ./gradlew :airbyte-tests:acceptanceTests --rerun-tasks --scan
|
||||
@@ -1,107 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
. tools/lib/lib.sh
|
||||
|
||||
assert_root
|
||||
|
||||
|
||||
echo "Getting docker internal host ip"
|
||||
DOCKER_HOST_IP=$(ip -f inet add show docker0 | sed -En -e 's/.*inet ([0-9.]+).*/\1/p')
|
||||
|
||||
echo "Patching coredns configmap NodeHosts with new entry for docker host"
|
||||
kubectl patch configmap/coredns \
|
||||
-n kube-system \
|
||||
--type merge \
|
||||
-p '{"data":{"NodeHosts": "${DOCKER_HOST_IP} host.docker.internal" }}'
|
||||
|
||||
if [ -n "$CI" ]; then
|
||||
echo "Deploying fluentbit"
|
||||
helm repo add fluent https://fluent.github.io/helm-charts
|
||||
helm repo update fluent
|
||||
sed -i "s/PLACEHOLDER/${WORKFLOW_RUN_ID}/" tools/bin/fluent_values.yaml
|
||||
helm install --values tools/bin/fluent_values.yaml --set env[1].name="AWS_ACCESS_KEY_ID" --set env[1].value=$(echo "$AWS_S3_INTEGRATION_TEST_CREDS" | jq -r .aws_access_key_id) \
|
||||
--set env[2].name="AWS_SECRET_ACCESS_KEY" --set env[2].value=$(echo "$AWS_S3_INTEGRATION_TEST_CREDS" | jq -r .aws_secret_access_key) \
|
||||
--set env[3].name="AWS_S3_BUCKET" --set env[3].value=${AWS_S3_BUCKET} \
|
||||
--set env[4].name="SUITE_TYPE" --set env[4].value="helm-logs" \
|
||||
--generate-name fluent/fluent-bit
|
||||
fi
|
||||
|
||||
echo "Replacing default Chart.yaml and values.yaml with a test one"
|
||||
mv charts/airbyte/Chart.yaml charts/airbyte/Chart.yaml.old
|
||||
mv charts/airbyte/Chart.yaml.test charts/airbyte/Chart.yaml
|
||||
mv charts/airbyte/values.yaml charts/airbyte/values.yaml.old
|
||||
mv charts/airbyte/values.yaml.test charts/airbyte/values.yaml
|
||||
|
||||
echo "Starting app..."
|
||||
|
||||
echo "Check if kind cluster is running..."
|
||||
sudo docker ps
|
||||
|
||||
echo "Applying dev-integration-test manifests to kubernetes..."
|
||||
cd charts/airbyte && helm repo add bitnami https://charts.bitnami.com/bitnami && helm dep update && cd -
|
||||
helm upgrade --install --debug airbyte charts/airbyte
|
||||
|
||||
echo "Waiting for server to be ready..."
|
||||
kubectl wait --for=condition=Available deployment/airbyte-server --timeout=300s || (kubectl describe pods && exit 1)
|
||||
|
||||
echo "Scale up workers by 2"
|
||||
kubectl scale --replicas=2 deployment airbyte-worker
|
||||
|
||||
echo "Listing nodes scheduled for pods..."
|
||||
kubectl describe pods | grep "Name\|Node"
|
||||
|
||||
|
||||
# allocates a lot of time to start kube. takes a while for postgres+temporal to work things out
|
||||
sleep 120
|
||||
|
||||
if [ -n "$CI" ]; then
|
||||
server_logs () { kubectl logs deployment.apps/airbyte-server > /tmp/kubernetes_logs/server.txt; }
|
||||
pod_sweeper_logs () { kubectl logs deployment.apps/airbyte-pod-sweeper > /tmp/kubernetes_logs/pod_sweeper.txt; }
|
||||
worker_logs () { kubectl logs deployment.apps/airbyte-worker > /tmp/kubernetes_logs/worker.txt; }
|
||||
db_logs () { kubectl logs deployment.apps/airbyte-db > /tmp/kubernetes_logs/db.txt; }
|
||||
temporal_logs () { kubectl logs deployment.apps/airbyte-temporal > /tmp/kubernetes_logs/temporal.txt; }
|
||||
describe_pods () { kubectl describe pods > /tmp/kubernetes_logs/describe_pods.txt; }
|
||||
describe_nodes () { kubectl describe nodes > /tmp/kubernetes_logs/describe_nodes.txt; }
|
||||
write_all_logs () {
|
||||
server_logs;
|
||||
worker_logs;
|
||||
db_logs;
|
||||
temporal_logs;
|
||||
pod_sweeper_logs;
|
||||
describe_nodes;
|
||||
describe_pods;
|
||||
}
|
||||
# Uncomment for debugging. Warning, this is verbose.
|
||||
# trap "mkdir -p /tmp/kubernetes_logs && write_all_logs" EXIT
|
||||
fi
|
||||
|
||||
kubectl expose $(kubectl get po -l app.kubernetes.io/name=server -o name) --name exposed-server-svc --type NodePort --overrides '{ "apiVersion": "v1","spec":{"ports": [{"port":8001,"protocol":"TCP","targetPort":8001,"nodePort":8001}]}}'
|
||||
|
||||
echo "Running worker integration tests..."
|
||||
KUBE=true SUB_BUILD=PLATFORM ./gradlew :airbyte-workers:integrationTest --scan
|
||||
|
||||
echo "Printing system disk usage..."
|
||||
df -h
|
||||
|
||||
echo "Printing docker disk usage..."
|
||||
docker system df
|
||||
|
||||
if [ -n "$CI" ]; then
|
||||
echo "Pruning all images..."
|
||||
docker image prune --all --force
|
||||
|
||||
echo "Printing system disk usage after pruning..."
|
||||
df -h
|
||||
|
||||
echo "Printing docker disk usage after pruning..."
|
||||
fi
|
||||
docker system df
|
||||
|
||||
echo "Running e2e tests via gradle..."
|
||||
KUBE=true SUB_BUILD=PLATFORM USE_EXTERNAL_DEPLOYMENT=true ./gradlew :airbyte-tests:acceptanceTests --scan
|
||||
|
||||
echo "Reverting changes back"
|
||||
mv charts/airbyte/Chart.yaml charts/airbyte/Chart.yaml.test
|
||||
mv charts/airbyte/Chart.yaml.old charts/airbyte/Chart.yaml
|
||||
@@ -1,52 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# ------------- Import some defaults for the shell
|
||||
|
||||
# Source shell defaults
|
||||
# $0 is the currently running program (this file)
|
||||
this_file_directory=$(dirname $0)
|
||||
relative_path_to_defaults=$this_file_directory/../shell_defaults
|
||||
|
||||
# if a file exists there, source it. otherwise complain
|
||||
if test -f $relative_path_to_defaults; then
|
||||
# source and '.' are the same program
|
||||
source $relative_path_to_defaults
|
||||
else
|
||||
echo -e "\033[31m\nFAILED TO SOURCE TEST RUNNING OPTIONS.\033[39m"
|
||||
echo -e "\033[31mTried $relative_path_to_defaults\033[39m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# ------------- Start Main
|
||||
set +o xtrace
|
||||
echo -e "$blue_text""This test ensures no changes result from running docusaurs build""$default_text"
|
||||
set -o xtrace
|
||||
|
||||
# Generate static files
|
||||
cd $this_file_directory #lets us run this without relative path dep
|
||||
cd ../../docusaurus
|
||||
yarn install
|
||||
yarn run build
|
||||
|
||||
|
||||
# +o counterintuitively unsets the option
|
||||
set +o errexit # exit 1 expected below in normal operation
|
||||
# this line is the test
|
||||
git diff-index --quiet HEAD --
|
||||
clean=$? # $? is the return status of the last command
|
||||
set -o errexit
|
||||
|
||||
|
||||
# ------------- User communication on testing results
|
||||
|
||||
set +o xtrace
|
||||
|
||||
if test $clean -eq 0; then
|
||||
echo -e "$blue_text""\n\n\nDocusaurs has no changes to commit!""$default_text"
|
||||
echo -e "$blue_text""Generated documentation should be as local testing""$default_text"
|
||||
|
||||
else
|
||||
echo -e "$red_text""\n\n\ndocusaurs build resulted in changes from this commit.""$default_text"
|
||||
echo -e "$red_text"" Run docusaurus build locally (yarn run build), commit, and try again""$default_text"
|
||||
fi
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# ------------- Import some defaults for the shell
|
||||
|
||||
# Source shell defaults
|
||||
# $0 is the currently running program (this file)
|
||||
this_file_directory=$(dirname $0)
|
||||
relative_path_to_defaults=$this_file_directory/../shell_defaults
|
||||
|
||||
# if a file exists there, source it. otherwise complain
|
||||
if test -f $relative_path_to_defaults; then
|
||||
# source and '.' are the same program
|
||||
source $relative_path_to_defaults
|
||||
else
|
||||
echo -e "\033[31m\nFAILED TO SOURCE TEST RUNNING OPTIONS.\033[39m"
|
||||
echo -e "\033[31mTried $relative_path_to_defaults\033[39m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
diff_output="$(git --no-pager diff)"
|
||||
|
||||
set +o xtrace
|
||||
if test -n "$diff_output"; then
|
||||
echo -e "$red_text""File changes were detected! Lame!""$default_text"
|
||||
echo -e "$red_text""This is usually do to forgetting to run a command locally before committing""$default_text"
|
||||
echo -e "$red_text""try running the format command and see if you get a diff ""$default_text"
|
||||
echo -e "$red_text""ie: SUB_BUILD=PLATFORM ./gradlew format --scan --info --stacktrace""$default_text"
|
||||
else
|
||||
echo -e "$blue_text""No git changes detected! Yay!""$default_text"
|
||||
fi
|
||||
@@ -1,68 +0,0 @@
|
||||
printf "Docker ";
|
||||
if [[ $(which docker) && $(docker --version) ]]; then
|
||||
printf "is installed"
|
||||
else
|
||||
printf "needs to be installed"
|
||||
fi;
|
||||
printf "\n";
|
||||
desired="14"
|
||||
printf "Java ";
|
||||
if [[ "$(which java)" && "$(java --version)" ]];
|
||||
then
|
||||
printf "installed"
|
||||
str="$(java --version)"
|
||||
IFS=' ' read -ra array <<< "${str}"
|
||||
version="${array[1]}"
|
||||
if [[ "${version}" > "${desired}" || "${version}" == "${desired}" ]];
|
||||
then
|
||||
printf " and functional"
|
||||
else
|
||||
printf " but not functional, must have version ${desired} at least"
|
||||
fi
|
||||
else
|
||||
printf "not installed, must have version ${desired} at least"
|
||||
fi;
|
||||
printf "\n";
|
||||
desired="20.1"
|
||||
printf "Pip ";
|
||||
if [[ "$(which pip)" && "$(pip --version)" ]];
|
||||
then
|
||||
printf "installed"
|
||||
str="$(pip --version)"
|
||||
IFS=' ' read -ra array <<< "${str}"
|
||||
version="${array[1]}"
|
||||
if [[ "${version}" > "${desired}" || "${version}" == "${desired}" ]];
|
||||
then
|
||||
printf " and functional"
|
||||
else
|
||||
printf " but not functional, must have version ${desired} at least"
|
||||
fi
|
||||
else
|
||||
printf "not installed, must have version ${desired} at least"
|
||||
fi;
|
||||
printf "\n";
|
||||
desired="3.9.11"
|
||||
printf "Python ";
|
||||
if [[ "$(which python3)" && "$(python3 --version)" ]];
|
||||
then
|
||||
printf "installed"
|
||||
str="$(python3 --version)"
|
||||
IFS=' ' read -ra array <<< "${str}"
|
||||
version="${array[1]}"
|
||||
if [[ "${version}" > "${desired}" || "${version}" == "${desired}" ]];
|
||||
then
|
||||
printf " and functional"
|
||||
else
|
||||
printf " but not functional, must have version ${desired} at least"
|
||||
fi
|
||||
else
|
||||
printf "not installed, must have version ${desired} at least"
|
||||
fi;
|
||||
printf "\n";
|
||||
printf "JQ ";
|
||||
if [[ $(which jq) && $(jq --version) ]]; then
|
||||
printf "is installed"
|
||||
else
|
||||
printf "needs to be installed"
|
||||
fi;
|
||||
printf "\n";
|
||||
@@ -1,86 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
# GCS resources for the following tests are located in the dataline-integration-testing GCP project.
|
||||
# GCS testing creds can be found in the "google cloud storage ( gcs ) test creds" secret in the `Shared-integration-tests`
|
||||
# folder in Lastpass.
|
||||
|
||||
# AWS resources for the following tests are located in the dev account.
|
||||
# S3 testing creds can be found in the `AWS_S3_INTEGRATION_TEST_CREDS` secret in the `Shared-integration-tests`
|
||||
# folder in Lastpass.
|
||||
|
||||
echo "Writing cloud storage credentials.."
|
||||
|
||||
# S3
|
||||
export AWS_ACCESS_KEY_ID="$(echo "$AWS_S3_INTEGRATION_TEST_CREDS" | jq -r .aws_access_key_id)"
|
||||
export AWS_SECRET_ACCESS_KEY="$(echo "$AWS_S3_INTEGRATION_TEST_CREDS" | jq -r .aws_secret_access_key)"
|
||||
export S3_LOG_BUCKET=airbyte-kube-integration-logging-test
|
||||
export S3_LOG_BUCKET_REGION=us-west-2
|
||||
|
||||
# GCS
|
||||
echo "$GOOGLE_CLOUD_STORAGE_TEST_CREDS" > "/tmp/gcs.json"
|
||||
export GOOGLE_APPLICATION_CREDENTIALS="/tmp/gcs.json"
|
||||
export GCS_LOG_BUCKET=airbyte-kube-integration-logging-test
|
||||
|
||||
# Run the logging test first since the same client is used in the log4j2 integration test.
|
||||
echo "Running log client tests.."
|
||||
SUB_BUILD=PLATFORM ./gradlew :airbyte-config:models:logClientsIntegrationTest --scan
|
||||
|
||||
echo "Running cloud storage tests.."
|
||||
SUB_BUILD=PLATFORM ./gradlew :airbyte-workers:cloudStorageIntegrationTest --scan
|
||||
|
||||
# Reset existing configurations and run this for each possible configuration
|
||||
# These configurations mirror the configurations documented in https://docs.airbyte.io/deploying-airbyte/on-kubernetes#configure-logs.
|
||||
# Some duplication here for clarity.
|
||||
export WORKER_ENVIRONMENT=kubernetes
|
||||
|
||||
echo "Setting S3 configuration.."
|
||||
export AWS_ACCESS_KEY_ID="$(echo "$AWS_S3_INTEGRATION_TEST_CREDS" | jq -r .aws_access_key_id)"
|
||||
export AWS_SECRET_ACCESS_KEY="$(echo "$AWS_S3_INTEGRATION_TEST_CREDS" | jq -r .aws_secret_access_key)"
|
||||
export S3_LOG_BUCKET=airbyte-kube-integration-logging-test
|
||||
export S3_LOG_BUCKET_REGION=us-west-2
|
||||
export S3_MINIO_ENDPOINT=
|
||||
export S3_PATH_STYLE_ACCESS=
|
||||
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=
|
||||
export GCS_LOG_BUCKET=
|
||||
|
||||
echo "Running logging to S3 test.."
|
||||
SUB_BUILD=PLATFORM ./gradlew :airbyte-config:models:log4j2IntegrationTest --scan --rerun-tasks -i
|
||||
|
||||
echo "Setting GCS configuration.."
|
||||
export AWS_ACCESS_KEY_ID=
|
||||
export AWS_SECRET_ACCESS_KEY=
|
||||
export S3_LOG_BUCKET=
|
||||
export S3_LOG_BUCKET_REGION=
|
||||
export S3_MINIO_ENDPOINT=
|
||||
export S3_PATH_STYLE_ACCESS=
|
||||
|
||||
export GOOGLE_APPLICATION_CREDENTIALS="/tmp/gcs.json"
|
||||
export GCS_LOG_BUCKET=airbyte-kube-integration-logging-test
|
||||
|
||||
echo "Running logging to GCS test.."
|
||||
SUB_BUILD=PLATFORM ./gradlew :airbyte-config:models:log4j2IntegrationTest --scan --rerun-tasks -i
|
||||
|
||||
echo "Starting Minio service.."
|
||||
wget https://dl.min.io/server/minio/release/linux-amd64/minio
|
||||
chmod +x minio
|
||||
|
||||
export MINIO_ROOT_USER=minio
|
||||
export MINIO_ROOT_PASSWORD=miniostorage
|
||||
./minio server /tmp/desktop &
|
||||
|
||||
echo "Setting Minio configuration.."
|
||||
export AWS_ACCESS_KEY_ID=minio
|
||||
export AWS_SECRET_ACCESS_KEY=miniostorage
|
||||
export S3_LOG_BUCKET=airbyte-kube-integration-logging-test
|
||||
export S3_LOG_BUCKET_REGION=
|
||||
export S3_MINIO_ENDPOINT=http://localhost:9000
|
||||
export S3_PATH_STYLE_ACCESS=true
|
||||
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=
|
||||
export GCS_LOG_BUCKET=
|
||||
|
||||
echo "Running logging to Minio test.."
|
||||
SUB_BUILD=PLATFORM ./gradlew :airbyte-config:models:log4j2IntegrationTest --scan --rerun-tasks -i
|
||||
@@ -1,15 +0,0 @@
|
||||
config:
|
||||
outputs: |
|
||||
[OUTPUT]
|
||||
Name s3
|
||||
Match kube.*
|
||||
bucket ${AWS_S3_BUCKET}
|
||||
region us-east-2
|
||||
total_file_size 1M
|
||||
upload_timeout 2m
|
||||
use_put_object On
|
||||
log_key log
|
||||
s3_key_format /${SUITE_TYPE}/${WORKFLOW_RUN_ID}/$TAG[4]/$UUID.log
|
||||
env:
|
||||
- name: WORKFLOW_RUN_ID
|
||||
value: 'PLACEHOLDER'
|
||||
@@ -1,75 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
. tools/lib/lib.sh
|
||||
|
||||
assert_root
|
||||
kubectl --help
|
||||
|
||||
NAMESPACE=$(openssl rand -hex 12)
|
||||
echo "Namespace" $NAMESPACE
|
||||
|
||||
TAG=$(openssl rand -hex 12)
|
||||
echo "Tag" $TAG
|
||||
|
||||
docker login -u "$DOCKER_HUB_USERNAME" -p "$DOCKER_HUB_PASSWORD"
|
||||
VERSION=$TAG ./gradlew build
|
||||
# VERSION=$TAG docker compose -f docker-compose.build.yaml push
|
||||
|
||||
# For running on Mac
|
||||
#sed -i .bak 's/default/'$NAMESPACE'/g' kube/overlays/dev/kustomization.yaml
|
||||
#sed -i .bak 's/dev/'$TAG'/g' kube/overlays/dev/kustomization.yaml
|
||||
|
||||
# sed -i 's/default/'$NAMESPACE'/g' kube/overlays/dev/kustomization.yaml
|
||||
# sed -i 's/dev/'$TAG'/g' kube/overlays/dev/kustomization.yaml
|
||||
|
||||
helm upgrade --install --debug --namespace $NAMESPACE \\
|
||||
--set airbyte-bootloader.image.tag=$TAG \\
|
||||
--set webapp.image.tag=$TAG \\
|
||||
--set worker.image.tag=$TAG \\
|
||||
--set server.image.tag=$TAG \\
|
||||
ab charts/airbyte
|
||||
kubectl apply -f tools/bin/gke-kube-helm-acceptance-test/postgres-source.yaml --namespace=$NAMESPACE
|
||||
kubectl apply -f tools/bin/gke-kube-helm-acceptance-test/postgres-destination.yaml --namespace=$NAMESPACE
|
||||
|
||||
sleep 180s
|
||||
|
||||
function findAndDeleteTag () {
|
||||
if [ $(curl --fail -LI --request GET 'https://hub.docker.com/v2/repositories/airbyte/'$1'/tags/'$TAG'/' --header 'Authorization: JWT '$2'' -o /dev/null -w '%{http_code}\n' -s) == "200" ];
|
||||
then
|
||||
echo "FOUND TAG" $TAG "in repository" $1 ",DELETING IT"
|
||||
curl --request DELETE 'https://hub.docker.com/v2/repositories/airbyte/'$1'/tags/'$TAG'/' --header 'Authorization: JWT '$2'';
|
||||
else
|
||||
echo "NOT FOUND TAG" $TAG "in repository" $1;
|
||||
fi
|
||||
}
|
||||
|
||||
function cleanUpImages () {
|
||||
TOKEN=$(curl --request POST 'https://hub.docker.com/v2/users/login/' --header 'Content-Type: application/json' --data-raw '{"username":"'$DOCKER_HUB_USERNAME'","password":"'$DOCKER_HUB_PASSWORD'"}' | jq '.token')
|
||||
TOKEN="${TOKEN%\"}"
|
||||
TOKEN="${TOKEN#\"}"
|
||||
|
||||
findAndDeleteTag "init" $TOKEN
|
||||
findAndDeleteTag "db" $TOKEN
|
||||
findAndDeleteTag "seed" $TOKEN
|
||||
findAndDeleteTag "scheduler" $TOKEN
|
||||
findAndDeleteTag "server" $TOKEN
|
||||
findAndDeleteTag "webapp" $TOKEN
|
||||
findAndDeleteTag "migration" $TOKEN
|
||||
findAndDeleteTag "cron" $TOKEN
|
||||
}
|
||||
|
||||
trap "kubectl delete namespaces $NAMESPACE --grace-period=0 --force" EXIT
|
||||
|
||||
kubectl port-forward svc/airbyte-server-svc 8001:8001 --namespace=$NAMESPACE &
|
||||
|
||||
kubectl port-forward svc/postgres-source-svc 2000:5432 --namespace=$NAMESPACE &
|
||||
|
||||
kubectl port-forward svc/postgres-destination-svc 4000:5432 --namespace=$NAMESPACE &
|
||||
|
||||
sleep 10s
|
||||
|
||||
echo "Running e2e tests via gradle..."
|
||||
KUBE=true IS_GKE=true SUB_BUILD=PLATFORM USE_EXTERNAL_DEPLOYMENT=true ./gradlew :airbyte-tests:acceptanceTests --scan -i
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: postgres-destination-config
|
||||
labels:
|
||||
app: postgres-destination
|
||||
data:
|
||||
POSTGRES_DB: postgresdb
|
||||
POSTGRES_USER: postgresadmin
|
||||
POSTGRES_PASSWORD: admin123
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: postgres-destination
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgres-destination
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgres-destination
|
||||
spec:
|
||||
containers:
|
||||
- name: postgres-destination
|
||||
image: postgres:13-alpine
|
||||
imagePullPolicy: "IfNotPresent"
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: postgres-destination-config
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: postgres-destination-svc
|
||||
labels:
|
||||
app: postgres-destination
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- port: 5432
|
||||
selector:
|
||||
app: postgres-destination
|
||||
@@ -1,47 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: postgres-source-config
|
||||
labels:
|
||||
app: postgres-source
|
||||
data:
|
||||
POSTGRES_DB: postgresdb
|
||||
POSTGRES_USER: postgresadmin
|
||||
POSTGRES_PASSWORD: admin123
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: postgres-source
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgres-source
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgres-source
|
||||
spec:
|
||||
containers:
|
||||
- name: postgres-source
|
||||
image: debezium/postgres:13-alpine
|
||||
imagePullPolicy: "IfNotPresent"
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: postgres-source-config
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: postgres-source-svc
|
||||
labels:
|
||||
app: postgres-source
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- port: 5432
|
||||
selector:
|
||||
app: postgres-source
|
||||
@@ -1,33 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
. tools/lib/lib.sh
|
||||
|
||||
IMG_NAME=airbyte/build-project:dev
|
||||
TMP_VOLUME_NAME=gradlew-tmp
|
||||
|
||||
main() {
|
||||
assert_root
|
||||
|
||||
if [[ $# -gt 0 ]]; then
|
||||
OPTS=
|
||||
CMD="./gradlew $@"
|
||||
else
|
||||
OPTS=-it
|
||||
CMD=/bin/bash
|
||||
fi
|
||||
local args=${@:-/bin/bash}
|
||||
|
||||
docker build -f Dockerfile.build . -t $IMG_NAME --target build-project
|
||||
|
||||
docker run $OPTS --rm \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-v /tmp:/tmp \
|
||||
-v $(pwd):/code \
|
||||
-p 5005:5005 \
|
||||
-e GRADLE_OPTS="-Dorg.gradle.daemon=false" \
|
||||
$IMG_NAME $CMD
|
||||
}
|
||||
|
||||
main "$@"
|
||||
1
tools/bin/load_test/.gitignore
vendored
1
tools/bin/load_test/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
cleanup/
|
||||
@@ -1,103 +0,0 @@
|
||||
# Load Testing Airbyte
|
||||
|
||||
## Overview
|
||||
To perform a stress test of an Airbyte deployment, the `load_test_airbyte.sh` shell script is useful to quickly and easily create many connections.
|
||||
This script creates a new E2E Test Source, E2E Test Destination, and a configurable number of connections in the indicated workspace.
|
||||
|
||||
## Instructions
|
||||
From your top-level `/airbyte` directory, run the following to perform a load test:
|
||||
|
||||
```
|
||||
./tools/bin/load_test/load_test_airbyte.sh -W <workspace id> -C <num_connections>
|
||||
```
|
||||
|
||||
|
||||
By default, the script assumes that the Airbyte instance's server is accessible at `localhost:8001`. This is the default server location when
|
||||
deploying Airbyte with `docker compose up`.
|
||||
|
||||
Additionally, the E2E Test Source created by the script will take 10 minutes to complete a sync by default.
|
||||
|
||||
These defaults can be overridden with flags. All available flags are described as follows:
|
||||
|
||||
```
|
||||
-h
|
||||
Display help
|
||||
|
||||
-W <workspace id>
|
||||
Specify the workspace ID where new connectors and connections should be created.
|
||||
Required.
|
||||
|
||||
-H <hostname>
|
||||
Specify the Airbyte API server hostname that the script should call to create new connectors and connections.
|
||||
Defaults to 'localhost'.
|
||||
|
||||
-P <port>
|
||||
Specify the port for the Airbyte server.
|
||||
Defaults to '8001'.
|
||||
|
||||
-X <header>
|
||||
Specify the X-Endpoint-API-UserInfo header value for API authentication.
|
||||
For Google Cloud Endpoint authentication only.
|
||||
|
||||
-C <count>
|
||||
Specify the number of connections that should be created by the script.
|
||||
Defaults to '1'.
|
||||
|
||||
-T <minutes>
|
||||
Specify the time in minutes that each connection should sync for.
|
||||
Defaults to '10'.
|
||||
```
|
||||
|
||||
|
||||
### Load Testing on Kubernetes
|
||||
|
||||
To load test a deployment of Airbyte running on Kubernetes, you will need to set up port-forwarding to the `airbyte-server` deployment.
|
||||
This can be accomplished with the following command:
|
||||
|
||||
```
|
||||
kubectl port-forward deployment/airbyte-server -n ab 8001:8001
|
||||
```
|
||||
|
||||
This will make the Airbyte server available at `localhost:8001`
|
||||
|
||||
|
||||
### Authentication
|
||||
|
||||
If your deployment of Airbyte happens to use Google Cloud Endpoints for authentication, you can use the `-X` option to pass
|
||||
an `X-Endpoint-API-UserInfo` header value.
|
||||
|
||||
|
||||
## Cleanup
|
||||
The `load_test_airbyte.sh` script writes created IDs to files in the script's `/cleanup` directory. To delete resources that were created by the load
|
||||
test script, you can run `cleanup_load_test.sh`, which reads IDs from the `/cleanup` directory and calls the Airbyte API to delete them.
|
||||
|
||||
|
||||
### Cleanup Instructions
|
||||
To run the cleanup script, from the top-level `airbyte` directory, run the following:
|
||||
|
||||
```
|
||||
./tools/bin/load_test/cleanup_load_test.sh -W <workspace_id>
|
||||
```
|
||||
|
||||
All available cleanup script flags are described as follows:
|
||||
|
||||
```
|
||||
-h
|
||||
Display help
|
||||
|
||||
-W <workspace id>
|
||||
Specify the workspace ID from where connectors and connections should be deleted.
|
||||
Required.
|
||||
|
||||
-H <hostname>
|
||||
Specify the Airbyte API server hostname that the script should call to delete connectors and connections.
|
||||
Defaults to 'localhost'.
|
||||
|
||||
-P <port>
|
||||
Specify the port for the Airbyte server.
|
||||
Defaults to '8001'.
|
||||
|
||||
-X <header>
|
||||
Specify the X-Endpoint-API-UserInfo header value for API authentication.
|
||||
For Google Cloud Endpoint authentication only.
|
||||
```
|
||||
@@ -1,158 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -o errexit
|
||||
set -o nounset
|
||||
|
||||
<<comment
|
||||
This script cleans up an earlier load test. It reads from cleanup files that the load test script writes to
|
||||
in order to determine which IDs to delete.
|
||||
comment
|
||||
|
||||
echo "Sourcing environment variables from .env"
|
||||
source .env
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
source load_test_utils.sh
|
||||
|
||||
function showhelp {
|
||||
echo -e """Usage $(dirname $0)/cleanup_load_test [OPTIONS]
|
||||
|
||||
cleanup_load_test deletes resources that were created from an earlier load test.
|
||||
|
||||
Available OPTIONs:
|
||||
|
||||
${CLEAR}-h
|
||||
${GREEN}Display help
|
||||
|
||||
${CLEAR}-W <workspace id>
|
||||
${GREEN}Specify the workspace ID from where connectors and connections should be deleted.
|
||||
Required.
|
||||
|
||||
${CLEAR}-H <hostname>
|
||||
${GREEN}Specify the Airbyte API server hostname that the script should call to delete connectors and connections.
|
||||
Defaults to 'localhost'.
|
||||
|
||||
${CLEAR}-P <port>
|
||||
${GREEN}Specify the port for the Airbyte server.
|
||||
Defaults to '8001'.
|
||||
|
||||
${CLEAR}-X <header>
|
||||
${GREEN}Specify the X-Endpoint-API-UserInfo header value for API authentication.
|
||||
For Google Cloud Endpoint authentication only.
|
||||
""" && exit 1
|
||||
}
|
||||
|
||||
hostname=localhost
|
||||
api_port=8001
|
||||
x_endpoint_header=""
|
||||
|
||||
while getopts "hW:H:P:X:kN:" options ; do
|
||||
case "${options}" in
|
||||
h)
|
||||
showhelp
|
||||
;;
|
||||
W)
|
||||
workspace_id="${OPTARG}"
|
||||
;;
|
||||
H)
|
||||
hostname="${OPTARG}"
|
||||
;;
|
||||
P)
|
||||
api_port="${OPTARG}"
|
||||
;;
|
||||
X)
|
||||
x_endpoint_header="${OPTARG}"
|
||||
;;
|
||||
*)
|
||||
showhelp
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
function setup {
|
||||
if test -z "$workspace_id"; then
|
||||
echo "error: must set a workspace id with -W"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "set workspace_id to ${workspace_id}"
|
||||
echo "set hostname to ${hostname}"
|
||||
echo "set api_port to ${api_port}"
|
||||
|
||||
setCleanupFilesForWorkspace $workspace_id
|
||||
}
|
||||
|
||||
function deleteConnections {
|
||||
while test -s $CONNECTION_CLEANUP_FILE
|
||||
do
|
||||
connectionId=$(readFirstLineFromFile $CONNECTION_CLEANUP_FILE)
|
||||
callApi "connections/delete" "{\"connectionId\":\"$connectionId\"}"
|
||||
echo "deleted connection with ID $connectionId"
|
||||
|
||||
# deletion succeeded, so remove the ID from the cleanup file
|
||||
removeFirstLineFromFile $CONNECTION_CLEANUP_FILE
|
||||
done
|
||||
|
||||
# if file exists and is empty
|
||||
if test -e $CONNECTION_CLEANUP_FILE && ! test -s $CONNECTION_CLEANUP_FILE
|
||||
then
|
||||
rm $CONNECTION_CLEANUP_FILE
|
||||
echo "removed cleanup file $CONNECTION_CLEANUP_FILE"
|
||||
fi
|
||||
}
|
||||
|
||||
function deleteSources {
|
||||
while test -s $SOURCE_CLEANUP_FILE
|
||||
do
|
||||
sourceId=$(readFirstLineFromFile $SOURCE_CLEANUP_FILE)
|
||||
callApi "sources/delete" "{\"sourceId\":\"$sourceId\"}"
|
||||
echo "deleted source with ID $sourceId"
|
||||
|
||||
# deletion succeeded, so remove the ID from the cleanup file
|
||||
removeFirstLineFromFile $SOURCE_CLEANUP_FILE
|
||||
done
|
||||
|
||||
# if file exists and is empty
|
||||
if test -e $SOURCE_CLEANUP_FILE && ! test -s $SOURCE_CLEANUP_FILE
|
||||
then
|
||||
rm $SOURCE_CLEANUP_FILE
|
||||
echo "removed cleanup file $SOURCE_CLEANUP_FILE"
|
||||
fi
|
||||
}
|
||||
|
||||
function deleteDestinations {
|
||||
while test -s $DESTINATION_CLEANUP_FILE
|
||||
do
|
||||
destinationId=$(readFirstLineFromFile $DESTINATION_CLEANUP_FILE)
|
||||
callApi "destinations/delete" "{\"destinationId\":\"$destinationId\"}"
|
||||
echo "deleted destination with ID $destinationId"
|
||||
|
||||
# deletion succeeded, so remove the ID from the cleanup file
|
||||
removeFirstLineFromFile $DESTINATION_CLEANUP_FILE
|
||||
done
|
||||
|
||||
# if file exists and is empty
|
||||
if test -e $DESTINATION_CLEANUP_FILE && ! test -s $DESTINATION_CLEANUP_FILE
|
||||
then
|
||||
rm $DESTINATION_CLEANUP_FILE
|
||||
echo "removed cleanup file $DESTINATION_CLEANUP_FILE"
|
||||
fi
|
||||
}
|
||||
|
||||
############
|
||||
## MAIN ##
|
||||
############
|
||||
|
||||
if [[ $# -eq 0 ]] ; then
|
||||
showhelp
|
||||
exit 0
|
||||
fi
|
||||
|
||||
setup
|
||||
|
||||
deleteConnections
|
||||
|
||||
deleteSources
|
||||
|
||||
deleteDestinations
|
||||
|
||||
echo "Finished!"
|
||||
@@ -1,47 +0,0 @@
|
||||
{
|
||||
"sourceId": "replace_source_id",
|
||||
"destinationId": "replace_destination_id",
|
||||
"syncCatalog": {
|
||||
"streams": [
|
||||
{
|
||||
"config": {
|
||||
"syncMode": "full_refresh",
|
||||
"cursorField": [],
|
||||
"destinationSyncMode": "overwrite",
|
||||
"primaryKey": [],
|
||||
"aliasName": "data_stream",
|
||||
"selected": true
|
||||
},
|
||||
"stream": {
|
||||
"name": "data_stream",
|
||||
"jsonSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"column1": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"supportedSyncModes": [
|
||||
"full_refresh"
|
||||
],
|
||||
"defaultCursorField": [],
|
||||
"sourceDefinedPrimaryKey": []
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"prefix": "",
|
||||
"namespaceDefinition": "destination",
|
||||
"namespaceFormat": "${SOURCE_NAMESPACE}",
|
||||
"scheduleType": "basic",
|
||||
"scheduleData": {
|
||||
"basicSchedule": {
|
||||
"units": 24,
|
||||
"timeUnit": "hours"
|
||||
}
|
||||
},
|
||||
"name": "replace_connection_name",
|
||||
"operations": [],
|
||||
"status": "active"
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"name": "End-to-End Testing (/dev/null)",
|
||||
"destinationDefinitionId": "replace_destination_definition_id",
|
||||
"workspaceId": "replace_workspace_id",
|
||||
"connectionConfiguration": {
|
||||
"type": "SILENT"
|
||||
}
|
||||
}
|
||||
@@ -1,243 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -o errexit
|
||||
set -o nounset
|
||||
|
||||
<<comment
|
||||
This script performs a load test against an existing Airbyte instance by calling the instance's API to create and sync new connections.
|
||||
It is intended to work with any Airbyte instance (local or remote, docker or kube, OSS or Cloud). It authenticates using a special auth header
|
||||
that requires port-forwarding. It stores connector and connection IDs that it creates in a local file. The script can be run in cleanup mode,
|
||||
which means the script will delete every connector and connection ID that it created and stored in that file.
|
||||
comment
|
||||
|
||||
echo "Sourcing environment variables from .env"
|
||||
source .env
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
source load_test_utils.sh
|
||||
|
||||
function showhelp {
|
||||
echo -e """Usage $(dirname $0)/load_test_airbyte [OPTIONS]
|
||||
|
||||
load_test_airbyte performs a load-test against an existing Airbyte instance.
|
||||
|
||||
Available OPTIONs:
|
||||
|
||||
${CLEAR}-h
|
||||
${GREEN}Display help
|
||||
|
||||
${CLEAR}-W <workspace id>
|
||||
${GREEN}Specify the workspace ID where new connectors and connections should be created.
|
||||
Required.
|
||||
|
||||
${CLEAR}-H <hostname>
|
||||
${GREEN}Specify the Airbyte API server hostname that the script should call to create new connectors and connections.
|
||||
Defaults to 'localhost'.
|
||||
|
||||
${CLEAR}-P <port>
|
||||
${GREEN}Specify the port for the Airbyte server.
|
||||
Defaults to '8001'.
|
||||
|
||||
${CLEAR}-X <header>
|
||||
${GREEN}Specify the X-Endpoint-API-UserInfo header value for API authentication.
|
||||
For Google Cloud Endpoint authentication only.
|
||||
|
||||
${CLEAR}-C <count>
|
||||
${GREEN}Specify the number of connections that should be created by the script.
|
||||
Defaults to '1'.
|
||||
|
||||
${CLEAR}-T <minutes>
|
||||
${GREEN}Specify the time in minutes that each connection should sync for.
|
||||
Defaults to '10'.
|
||||
"""
|
||||
}
|
||||
|
||||
hostname=localhost
|
||||
api_port=8001
|
||||
x_endpoint_header=
|
||||
num_connections=1
|
||||
sync_minutes=10
|
||||
|
||||
while getopts "hW:H:P:X:C:T:kN:-:" options ; do
|
||||
case "${options}" in
|
||||
-)
|
||||
case "${OPTARG}" in
|
||||
debug)
|
||||
PS4="$GREEN"'${BASH_SOURCE}:${LINENO}:$CLEAR '
|
||||
set -o xtrace #xtrace calls the PS4 string and show all lines as executed
|
||||
;;
|
||||
*)
|
||||
showhelp
|
||||
exit 0
|
||||
;;
|
||||
esac;;
|
||||
h)
|
||||
showhelp
|
||||
;;
|
||||
W)
|
||||
workspace_id="${OPTARG}"
|
||||
;;
|
||||
H)
|
||||
hostname="${OPTARG}"
|
||||
;;
|
||||
P)
|
||||
api_port="${OPTARG}"
|
||||
;;
|
||||
X)
|
||||
x_endpoint_header="${OPTARG}"
|
||||
;;
|
||||
C)
|
||||
num_connections="${OPTARG}"
|
||||
;;
|
||||
T)
|
||||
sync_minutes="${OPTARG}"
|
||||
;;
|
||||
*)
|
||||
showhelp
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
function setup {
|
||||
set -e
|
||||
if test -z "$workspace_id"; then
|
||||
echo "error: must set a workspace id with -W"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "set workspace_id to ${workspace_id}"
|
||||
echo "set hostname to ${hostname}"
|
||||
echo "set api_port to ${api_port}"
|
||||
echo "set x_endpoint_header to ${x_endpoint_header}"
|
||||
echo "set num_connections to ${num_connections}"
|
||||
echo "set sync_minutes to ${sync_minutes}"
|
||||
|
||||
setCleanupFilesForWorkspace $workspace_id
|
||||
|
||||
mkdir -p cleanup
|
||||
|
||||
touch $CONNECTION_CLEANUP_FILE
|
||||
touch $SOURCE_CLEANUP_FILE
|
||||
touch $DESTINATION_CLEANUP_FILE
|
||||
}
|
||||
|
||||
function getE2ETestSourceDefinitionId {
|
||||
# call source_definitions/list and search response for the E2E Test dockerRepository to get the ID.
|
||||
# local uses `source-e2e-test`, while cloud uses `source-e2e-test-cloud`
|
||||
sourceDefinitionId=$(
|
||||
callApi "source_definitions/list" |
|
||||
jq -r '.sourceDefinitions[] |
|
||||
select(
|
||||
(.dockerRepository == "airbyte/source-e2e-test") or
|
||||
(.dockerRepository == "airbyte/source-e2e-test-cloud")
|
||||
) |
|
||||
.sourceDefinitionId'
|
||||
)
|
||||
export sourceDefinitionId
|
||||
}
|
||||
|
||||
function getE2ETestDestinationDefinition {
|
||||
# call destination_definitions/list and search response for the E2E Test dockerRepository to get the ID.
|
||||
# local uses `destination-dev-null`, while cloud uses `destination-e2e-test-cloud`
|
||||
destinationDefinitionId=$(
|
||||
callApi "destination_definitions/list" |
|
||||
jq -r '.destinationDefinitions[] |
|
||||
select(
|
||||
(.dockerRepository == "airbyte/destination-e2e-test") or
|
||||
(.dockerRepository == "airbyte/destination-dev-null")
|
||||
) |
|
||||
.destinationDefinitionId'
|
||||
)
|
||||
export destinationDefinitionId
|
||||
}
|
||||
|
||||
function createSource {
|
||||
body=$(
|
||||
sed "
|
||||
s/replace_source_read_secs/$(( 60*sync_minutes ))/g ;
|
||||
s/replace_source_definition_id/$sourceDefinitionId/g ;
|
||||
s/replace_workspace_id/$workspace_id/g" source_spec.json |
|
||||
tr -d '\n' |
|
||||
tr -d ' '
|
||||
)
|
||||
|
||||
sourceId=$(
|
||||
callApi "sources/create" $body |
|
||||
jq -r '.sourceId'
|
||||
)
|
||||
export sourceId
|
||||
echo $sourceId >> $SOURCE_CLEANUP_FILE
|
||||
}
|
||||
|
||||
function createDestination {
|
||||
body=$(
|
||||
sed "
|
||||
s/replace_destination_definition_id/$destinationDefinitionId/g ;
|
||||
s/replace_workspace_id/$workspace_id/g" destination_spec.json |
|
||||
tr -d '\n' |
|
||||
tr -d ' '
|
||||
)
|
||||
destinationId=$(
|
||||
callApi "destinations/create" $body |
|
||||
jq -r '.destinationId'
|
||||
)
|
||||
export destinationId
|
||||
echo $destinationId >> $DESTINATION_CLEANUP_FILE
|
||||
}
|
||||
|
||||
function createMultipleConnections {
|
||||
for i in $(seq 1 $num_connections)
|
||||
do
|
||||
echo "Creating connection number $i (out of $num_connections)..."
|
||||
createConnection $i
|
||||
done
|
||||
echo "Finished creating $num_connections connections."
|
||||
}
|
||||
|
||||
# Call the API to create a connection. Replace strings in connection_spec.json with real IDs.
|
||||
# $1 arg is the connection count, which is used in the name of the created connection
|
||||
# Connection spec might change and this function could break in the future. If that happens, we need
|
||||
# to update the connection spec.
|
||||
function createConnection {
|
||||
body=$(
|
||||
sed "
|
||||
s/replace_source_id/$sourceId/g ;
|
||||
s/replace_destination_id/$destinationId/g ;
|
||||
s/replace_connection_name/load_test_connection_$1/g" connection_spec.json |
|
||||
tr -d '\n' |
|
||||
tr -d ' '
|
||||
)
|
||||
|
||||
connectionId=$(
|
||||
callApi "web_backend/connections/create" $body |
|
||||
jq -r '.connectionId'
|
||||
)
|
||||
echo $connectionId >> $CONNECTION_CLEANUP_FILE
|
||||
}
|
||||
|
||||
############
|
||||
## MAIN ##
|
||||
############
|
||||
|
||||
if [[ $# -eq 0 ]] ; then
|
||||
showhelp
|
||||
exit 0
|
||||
fi
|
||||
|
||||
setup
|
||||
|
||||
getE2ETestSourceDefinitionId
|
||||
echo "Retrieved E2E Test Source Definition ID: ${sourceDefinitionId}"
|
||||
|
||||
getE2ETestDestinationDefinition
|
||||
echo "Retrieved E2E Test Destination Definition ID: ${destinationDefinitionId}"
|
||||
|
||||
createSource
|
||||
echo "Created Source with ID: ${sourceId}"
|
||||
|
||||
createDestination
|
||||
echo "Created Destination with ID: ${destinationId}"
|
||||
|
||||
createMultipleConnections
|
||||
|
||||
echo "Finished!"
|
||||
@@ -1,46 +0,0 @@
|
||||
<<comment
|
||||
This file contains common util functions for use in load testing scripts.
|
||||
comment
|
||||
|
||||
echo "Loading utils from $0"
|
||||
|
||||
GREEN='\033[0;32m'
|
||||
RED='\033[0;31m'
|
||||
BLUE='\033[0;34m'
|
||||
CLEAR='\033[0m'
|
||||
|
||||
function callApi {
|
||||
# call the API with the endpoint passed as arg $1, and (optional) payload passed as arg $2
|
||||
# example of calling the API with a payload:
|
||||
# callApi "destinations/list" "{\"workspaceId\":\"${workspace}\"}"
|
||||
endpoint=$1
|
||||
payload=${2:-""}
|
||||
|
||||
curl --silent \
|
||||
--request POST \
|
||||
--fail-with-body \
|
||||
--show-error \
|
||||
--header 'Content-Type: application/json' \
|
||||
--header "X-Endpoint-API-UserInfo: ${x_endpoint_header}" \
|
||||
--user "${BASIC_AUTH_USERNAME}:${BASIC_AUTH_PASSWORD}" \
|
||||
--data "${payload}" \
|
||||
"${hostname}:${api_port}/api/v1/${endpoint}"
|
||||
}
|
||||
|
||||
function readFirstLineFromFile {
|
||||
echo "$(head -1 $1)"
|
||||
}
|
||||
|
||||
function removeFirstLineFromFile {
|
||||
echo "$(sed -i '' -e '1d' $1)"
|
||||
}
|
||||
|
||||
function setCleanupFilesForWorkspace {
|
||||
export CONNECTION_CLEANUP_FILE="cleanup/${1}_connection_ids.txt"
|
||||
export DESTINATION_CLEANUP_FILE="cleanup/${1}_destination_ids.txt"
|
||||
export SOURCE_CLEANUP_FILE="cleanup/${1}_source_ids.txt"
|
||||
|
||||
echo "set connection cleanup file to $CONNECTION_CLEANUP_FILE"
|
||||
echo "set destination cleanup file to $DESTINATION_CLEANUP_FILE"
|
||||
echo "set source cleanup file to $SOURCE_CLEANUP_FILE"
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"name": "End-to-End Testing (Mock API)",
|
||||
"sourceDefinitionId": "replace_source_definition_id",
|
||||
"workspaceId": "replace_workspace_id",
|
||||
"connectionConfiguration": {
|
||||
"type": "CONTINUOUS_FEED",
|
||||
"mock_catalog": {
|
||||
"type": "SINGLE_STREAM",
|
||||
"stream_name": "data_stream",
|
||||
"stream_schema": "{ \"type\": \"object\", \"properties\": { \"column1\": { \"type\": \"string\" } } }",
|
||||
"stream_duplication": 1
|
||||
},
|
||||
"seed": 0,
|
||||
"max_messages": replace_source_read_secs,
|
||||
"message_interval_ms": 1000
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
. tools/lib/lib.sh
|
||||
|
||||
[[ -z "$GIT_REVISION" ]] && echo "Couldn't get the git revision..." && exit 1
|
||||
|
||||
echo "*IMPORTANT: Only merge if the platform build is passing!*"
|
||||
echo
|
||||
# Do not change the following line - the Create Release Github action relies on it
|
||||
echo "Changelog:"
|
||||
echo
|
||||
PAGER=cat git log v${PREV_VERSION}..${GIT_REVISION} --oneline --decorate=no
|
||||
# The following empty 'echo' is also important for marking the end of the changelog for the Create Release Github action
|
||||
echo
|
||||
echo "### Instructions"
|
||||
echo "1) *SQUASH MERGE* this PR - this is necessary to ensure the automated Create Release action is triggered."
|
||||
echo "2) Double check that the [Create Release](https://github.com/airbytehq/airbyte/actions/workflows/create-release.yml) action was triggered and ran successfully on the commit to master \
|
||||
(this should only take a few seconds)."
|
||||
echo "3) If the Create Release action failed due to a transient issue, retry the action. If it failed due to \
|
||||
a non-transient issue, create a release manually by following the below instructions."
|
||||
echo
|
||||
echo "<details>"
|
||||
echo "<summary>Create the GitHub release manually</summary>"
|
||||
echo
|
||||
echo "1. Pull most recent version of master"
|
||||
echo "2. Run ./tools/bin/tag_version.sh"
|
||||
echo "3. Create a GitHub release with the changelog"
|
||||
echo "</details>"
|
||||
@@ -1,60 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# todo (cgardens) - remove this file. used in platform build script only.
|
||||
# List of directories without "airbyte-" prefix.
|
||||
projectDir=(
|
||||
"bootloader"
|
||||
"container-orchestrator"
|
||||
"cron"
|
||||
"connector-builder-server"
|
||||
"metrics/reporter"
|
||||
"proxy"
|
||||
"server"
|
||||
"temporal"
|
||||
"webapp"
|
||||
"workers"
|
||||
)
|
||||
|
||||
# Set default values to required vars. If set in env, values will be taken from there.
|
||||
# Primarily for testing.
|
||||
JDK_VERSION=${JDK_VERSION:-17.0.4}
|
||||
ALPINE_IMAGE=${ALPINE_IMAGE:-alpine:3.14}
|
||||
POSTGRES_IMAGE=${POSTGRES_IMAGE:-postgres:13-alpine}
|
||||
|
||||
# Iterate over all directories in list to build one by one.
|
||||
# metrics-reporter are exception due to wrong artifact naming
|
||||
for workdir in "${projectDir[@]}"
|
||||
do
|
||||
case $workdir in
|
||||
"metrics/reporter")
|
||||
artifactName="metrics-reporter"
|
||||
;;
|
||||
|
||||
"config/init")
|
||||
artifactName="init"
|
||||
;;
|
||||
|
||||
"workers")
|
||||
artifactName="worker"
|
||||
;;
|
||||
|
||||
*)
|
||||
artifactName=${workdir%/*}
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Publishing airbyte/$artifactName..."
|
||||
sleep 1
|
||||
|
||||
docker buildx create --use --name $artifactName && \
|
||||
docker buildx build -t "airbyte/$artifactName:$VERSION" \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--build-arg VERSION=$VERSION \
|
||||
--build-arg ALPINE_IMAGE=$ALPINE_IMAGE \
|
||||
--build-arg POSTGRES_IMAGE=$POSTGRES_IMAGE \
|
||||
--build-arg JDK_VERSION=$JDK_VERSION \
|
||||
--push \
|
||||
airbyte-$workdir/build/docker
|
||||
docker buildx rm $artifactName
|
||||
done
|
||||
@@ -1,42 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
retries=3
|
||||
|
||||
function pull_dockerhub_image_with_retries() {
|
||||
local image=$1
|
||||
local retries=$2
|
||||
|
||||
for (( i=1; i<=$retries; i++ )); do
|
||||
docker pull $image
|
||||
# NOTE: this does not discriminate on the failure, any failure will retry
|
||||
test "$?" -eq 0 && return || echo "Docker pull failed, sleeping for 5 seconds before retrying ($i/$retries)" && sleep 5
|
||||
done
|
||||
}
|
||||
|
||||
function main() {
|
||||
while getopts ':i:r:' OPTION; do
|
||||
case "$OPTION" in
|
||||
i)
|
||||
image="$OPTARG"
|
||||
;;
|
||||
r)
|
||||
if [[ "$OPTARG" =~ ^(-)?[0-9]+$ ]]; then
|
||||
retries="$OPTARG"
|
||||
else
|
||||
echo "retries (-r) must be a number" && exit 1
|
||||
fi
|
||||
;;
|
||||
?)
|
||||
echo "script usage: $(basename "$0") [-i image] [-r retries]" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
shift "$(($OPTIND -1))"
|
||||
|
||||
pull_dockerhub_image_with_retries $image $retries
|
||||
}
|
||||
|
||||
main "$@"
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# When running acceptance tests, this often manifests as a conenctor sometimes getting stuck for no discernable reason.
|
||||
# This script allows building all of the connector images used in the acceptance tests.
|
||||
# We will need to update this file if the versions used for source-e2e-test and destination-e2e-test change before we start publishing ARM images.
|
||||
|
||||
set -e
|
||||
|
||||
. tools/lib/lib.sh
|
||||
|
||||
assert_root
|
||||
|
||||
unset SUB_BUILD
|
||||
|
||||
LATEST_POSTGRES_SOURCE=$(grep -A0 'dockerImageTag' ./airbyte-integrations/connectors/source-postgres/metadata.yaml | cut -d ' ' -f 4)
|
||||
LATEST_POSTGRES_DESTINATION=$(grep -A0 'dockerImageTag' ./airbyte-integrations/connectors/destination-postgres/metadata.yaml | cut -d ' ' -f 4)
|
||||
|
||||
git checkout master && ./gradlew clean :airbyte-integrations:connectors:source-postgres:build -x test && docker tag airbyte/source-postgres:dev airbyte/source-postgres:"$LATEST_POSTGRES_SOURCE"
|
||||
git checkout master && ./gradlew clean :airbyte-integrations:connectors:destination-postgres:build -x test && docker tag airbyte/destination-postgres:dev airbyte/destination-postgres:"$LATEST_POSTGRES_DESTINATION"
|
||||
git checkout 464c485b94c9f023b4c5929610f60a6b53bf657b && ./gradlew clean :airbyte-integrations:connectors:source-e2e-test:build -x test && docker tag airbyte/source-e2e-test:dev airbyte/source-e2e-test:0.1.1
|
||||
git checkout 464c485b94c9f023b4c5929610f60a6b53bf657b && ./gradlew clean :airbyte-integrations:connectors:destination-e2e-test:build -x test && docker tag airbyte/destination-e2e-test:dev airbyte/destination-e2e-test:0.1.1
|
||||
@@ -1,39 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
. tools/lib/lib.sh
|
||||
|
||||
if [[ -z "${CLOUDREPO_USER}" ]]; then
|
||||
echo 'CLOUDREPO_USER env var not set. Please retrieve the user email from the CloudRepo lastpass secret and run export CLOUDREPO_USER=<user_from_secret>.';
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [[ -z "${CLOUDREPO_PASSWORD}" ]]; then
|
||||
echo 'CLOUDREPO_PASSWORD env var not set. Please retrieve the user email from the CloudRepo lastpass secret and run export CLOUDREPO_PASSWORD=<password_from_secret>.';
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [[ -z "${DOCKER_HUB_USERNAME}" ]]; then
|
||||
echo 'DOCKER_HUB_USERNAME not set.';
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [[ -z "${DOCKER_HUB_PASSWORD}" ]]; then
|
||||
echo 'DOCKER_HUB_PASSWORD for docker user not set.';
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
docker login -u "${DOCKER_HUB_USERNAME}" -p "${DOCKER_HUB_PASSWORD}"
|
||||
|
||||
source ./tools/bin/bump_version.sh
|
||||
|
||||
echo "Building and publishing PLATFORM version $NEW_VERSION for git revision $GIT_REVISION..."
|
||||
VERSION=$NEW_VERSION SUB_BUILD=PLATFORM ./gradlew clean build --scan
|
||||
VERSION=$NEW_VERSION SUB_BUILD=PLATFORM ./gradlew publish --scan
|
||||
|
||||
# Container should be running before build starts
|
||||
# It generates binaries to build images for different CPU architecture
|
||||
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||
VERSION=$NEW_VERSION ./tools/bin/publish_docker.sh
|
||||
echo "Completed building and publishing PLATFORM..."
|
||||
@@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
. tools/lib/lib.sh
|
||||
|
||||
BRANCH=$(git rev-parse --abbrev-ref HEAD)
|
||||
if [[ "$BRANCH" != "master" ]]; then
|
||||
echo 'This script should be run from master after merging the changes from release_version.sh!';
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
[[ ! -z "$(git status --porcelain)" ]] && echo "Cannot tag revision if all changes aren't checked in..." && exit 1
|
||||
|
||||
# make sure your master branch is up to date
|
||||
git pull --rebase
|
||||
|
||||
VERSION=$(cat .env | grep -w VERSION | cut -d= -f 2)
|
||||
[[ -z "$VERSION" ]] && echo "Couldn't find version in env file..." && exit 1
|
||||
|
||||
TAG_NAME="v$VERSION"
|
||||
git tag -a "$TAG_NAME" -m "Version $VERSION"
|
||||
git push origin "$TAG_NAME"
|
||||
@@ -1,30 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
|
||||
if ! command -v timeout &> /dev/null
|
||||
then
|
||||
echo "timeout could not be found, installing it"
|
||||
brew install coreutils
|
||||
fi
|
||||
|
||||
NEW_HASH="$( git rev-parse HEAD )"
|
||||
|
||||
git checkout master
|
||||
git pull --no-rebase
|
||||
|
||||
SUB_BUILD=PLATFORM "$SCRIPT_DIR"/../../gradlew -p "$SCRIPT_DIR"/../.. generate-docker
|
||||
|
||||
cd "$SCRIPT_DIR"/../..
|
||||
VERSION=dev docker compose -f "$SCRIPT_DIR"/../../docker-compose.yaml up &
|
||||
|
||||
sleep 75
|
||||
VERSION=dev docker compose down
|
||||
|
||||
git stash
|
||||
git checkout $NEW_HASH
|
||||
SUB_BUILD=PLATFORM "$SCRIPT_DIR"/../../gradlew -p "$SCRIPT_DIR"/../.. generate-docker
|
||||
|
||||
VERSION=dev docker compose -f "$SCRIPT_DIR"/../../docker-compose.yaml up
|
||||
@@ -1,11 +0,0 @@
|
||||
FROM mcr.microsoft.com/mssql/server:2019-latest
|
||||
|
||||
COPY mssql.key /etc/ssl/private/mssql.key
|
||||
COPY mssql.pem /etc/ssl/certs/mssql.pem
|
||||
COPY mssql.conf /var/opt/mssql/mssql.conf
|
||||
|
||||
EXPOSE 1433
|
||||
|
||||
USER root
|
||||
RUN chmod 755 /etc/ssl/private
|
||||
USER mssql
|
||||
@@ -1,8 +0,0 @@
|
||||
FROM postgres:11-alpine
|
||||
|
||||
COPY server.key /var/lib/postgresql/server.key
|
||||
COPY server.crt /var/lib/postgresql/server.crt
|
||||
|
||||
# update the privileges on the .key, no need to touch the .crt
|
||||
RUN chmod 600 /var/lib/postgresql/server.key
|
||||
RUN chown postgres:postgres /var/lib/postgresql/server.key
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
openssl req -new -text -passout pass:abcd -subj /CN=localhost -out server.req -keyout privkey.pem
|
||||
openssl rsa -in privkey.pem -passin pass:abcd -out server.key
|
||||
openssl req -x509 -in server.req -text -key server.key -out server.crt
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
openssl req -x509 -nodes -newkey rsa:2048 -subj '/CN=sqltest.airbyte.com' -keyout mssql.key -out mssql.pem
|
||||
chmod 440 mssql.pem
|
||||
chmod 440 mssql.key
|
||||
@@ -1,5 +0,0 @@
|
||||
[network]
|
||||
tlscert = /etc/ssl/certs/mssql.pem
|
||||
tlskey = /etc/ssl/private/mssql.key
|
||||
tlsprotocols = 1.2
|
||||
forceencryption = 1
|
||||
@@ -1,28 +0,0 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC5bUoMylrFiaAC
|
||||
1sYCQlMqqX/yvGwZAIoWq5mo4Kc1PB1JDS8+vB7s8SMsJSMldB1udSEsv0P4k1sV
|
||||
pymRZp0Y2IAepcJ1qpFEUJGH5E1A6JIDW3EyvddO5u4BoA+ZDUFvlRyozZumSbP8
|
||||
hvBtFv8OR/guB1938XB185CM5EFZydKeTkKiPhvTA2fbqmmi96DAxDMI2ggBBKDI
|
||||
UVTUWJqJMOcj/sLGY87jPiltgQGknSxKZoJYeqFeyhYPrzvUrzLJJQPBJ9L7HwoO
|
||||
In4jxIN8sDhl/4yXSmX8bGRRXOLhrspGp0KRIQXUeClZLswz+YT07pPl6oMXQnD/
|
||||
dl/WcSuZAgMBAAECggEABbzoCbVJUcuMdAoJXpCG2k8ccnp6LdviagktXBh3lCIk
|
||||
Fdqel6ZinppnqDoN+F67emuNd0ED7XFB5E2j76fpPJeWf1xJxDJfBGop1rat3VBV
|
||||
FF2EBznwq7RhsRMu6GGMoNNQa7jRFDg7pZjXX8jSY7K+b04zGhcSj9PVqUZ27zxO
|
||||
AwKsRVDwbJ3SkQKhUTMvzgJiaIinvE8qMjFby+ar5R7HrQOXKKKwZDuTlLQGinMF
|
||||
Nskb6e9uI5T4KVnp7JDGPw/rojBYEJnCnUv3nL51UYjRDl1lX+KPEf8a3nK37Nk5
|
||||
Xalj7IKAbyw6vbqr8qPcRWsOloC3KF/O5v+5nxSAMQKBgQDzahPceQxeZHpeWpC3
|
||||
9nUIHBt9zqkg9eszNZ9MyVjUbBraBRwYX1sBjZz+l5ohMA5VqyPlv31LFp9B6V/A
|
||||
GI6b1qFWQ0RnzNT7DT0xSq5Ble6DKmksJjcONEVy6oROhj/GGQuufx1+sstARxk8
|
||||
H3A6uFtL53Hjgf8vGQEeFjIqcwKBgQDDA6sW41xwfGPGfu355oN6s4fVrfA8wocv
|
||||
zNGXSMtJbAXLAC1fpOhbWzAWGRt8KqQWhTcpmBAClotNi4ollWlzHit6u34l4tL+
|
||||
dba7Vo2qbEs6IggE4zLHznEO/yKBOXVNB2PV+fnQo90kZ76AxEbGywz4BJ5ImO2C
|
||||
rpV+flZSwwKBgQCbCyY7eJ74QOfw0Z78jm9dCwo3yDrSU9HMfItLTbTXGUTBOh/7
|
||||
JkHBa4JkaAw0t3dp+eiTnrUf7vjh8tSadwnfGYcKey5HL6E5h+VCUF9OR0H1Kj5z
|
||||
cKQA2CqkV9yOZ9SXSby3GSCgYyIzfxYDxcKmpGcCohlY4KS6SyL7Fwg9IQKBgQCk
|
||||
ktTw1ODvANqG6hlU+ubcRuQMPOTvsc66VSRPgpwkEyh0X2rrO1Tnu/XBwGCEkcu2
|
||||
QagCzxQ7yuY2g9sKyqOaBcz1n4Le4CPloFucj3ewagG2Rn/z9/Sj0CFzYXayDVZj
|
||||
sifbrUDYhWEb1v1a18lO/I6uQ998LqrJzSHWBTI+VwKBgBek9td3SMUCjQ49toer
|
||||
WKb69wqHIwgniOZmemX7EStfI7oQUtUZi0ZRvQd220oJI/jmarNlmi4Myuj2E8bI
|
||||
pn3TRi3tAsxZ8y59BFNZkeHNAv4//ei0+6Y9HN43Ie7rpXIvEmL/8QMDUsfOXkto
|
||||
s4bzmT5NcOMZuDr7fjKic/hl
|
||||
-----END PRIVATE KEY-----
|
||||
@@ -1,19 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDHTCCAgWgAwIBAgIUXEXm761ENtHmsrdb12sJyPedfW8wDQYJKoZIhvcNAQEL
|
||||
BQAwHjEcMBoGA1UEAwwTc3FsdGVzdC5haXJieXRlLmNvbTAeFw0yMTA1MDcyMzA3
|
||||
NDBaFw0yMTA2MDYyMzA3NDBaMB4xHDAaBgNVBAMME3NxbHRlc3QuYWlyYnl0ZS5j
|
||||
b20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC5bUoMylrFiaAC1sYC
|
||||
QlMqqX/yvGwZAIoWq5mo4Kc1PB1JDS8+vB7s8SMsJSMldB1udSEsv0P4k1sVpymR
|
||||
Zp0Y2IAepcJ1qpFEUJGH5E1A6JIDW3EyvddO5u4BoA+ZDUFvlRyozZumSbP8hvBt
|
||||
Fv8OR/guB1938XB185CM5EFZydKeTkKiPhvTA2fbqmmi96DAxDMI2ggBBKDIUVTU
|
||||
WJqJMOcj/sLGY87jPiltgQGknSxKZoJYeqFeyhYPrzvUrzLJJQPBJ9L7HwoOIn4j
|
||||
xIN8sDhl/4yXSmX8bGRRXOLhrspGp0KRIQXUeClZLswz+YT07pPl6oMXQnD/dl/W
|
||||
cSuZAgMBAAGjUzBRMB0GA1UdDgQWBBQO7E0qXXC6/C63Ph02pmr1DGhTfjAfBgNV
|
||||
HSMEGDAWgBQO7E0qXXC6/C63Ph02pmr1DGhTfjAPBgNVHRMBAf8EBTADAQH/MA0G
|
||||
CSqGSIb3DQEBCwUAA4IBAQA9uejsECQbFqNw2oXnOZfcHSvfslWq11GdPLqMWFYW
|
||||
NBWAEw2PS5uiB3B8Q+sFZ/7sXcGyK7e55JPMUxnsH3yIiE0NB0S56pcfFBL9k9xB
|
||||
4zL7h1LMnmYTueIhUWOInbc1VNrdycMjpTqkVjNYabiXwvza/iWG+EQfxh3bABtE
|
||||
+t1omtwMGtOB/XF7jPndfBk7Tdj2PgsTGBru3HVP7hTwHOSlhpGt+p5hsWQVAbBl
|
||||
PSQyvP1xX/KfjGOs8WtKtpwc6RMNbreJfA4ktqvYTYPCvVm9+LpdLZ1jj0OAxKe6
|
||||
dcshkMOfJUPBb4HmDX0RZrDJH+4UKbqQ9vC6sTvPQtDO
|
||||
-----END CERTIFICATE-----
|
||||
Reference in New Issue
Block a user