1
0
mirror of synced 2026-01-23 13:01:50 -05:00
Files
airbyte/airbyte-integrations/infrastructure/ssh_tunnel/module/main.tf
Sherif A. Nada 2e3dfccc68 Setup terraform structure for connector dev infra for GCP and AWS (#4641)
* setup GCP terraform structure for connector dev infra

* fxes

* Created aws terraform bootstrap infrastructure

* Corrected path for S3 state file

* Creating ssh tunnel testing infrastructure

* Creating a bastion host

* Docs

* Created bastion host with airbyte unprivileged account for tunneling

* Added yum updates

* Create a private subnet and a postgres database within it

* Identifier for db

* Create postgres and bastion within a module.

* Set up postgres infrastructure

* Don't need this terraform wrapper when we run with tfenv

* Dropped incomplete WIP from GCP setup for this PR.

* Touchups to how to use terraform

* Updated to resolve merge conflict

* More separated top level structure to avoid monorepo problems with terraform destroy.

* Clarifying directory structure

* Migrated directory structure for testing infrastructure

Co-authored-by: Jenny Brown <jenny@airbyte.io>
2021-08-03 16:39:26 -05:00

68 lines
2.1 KiB
HCL

# Create a bastion host with a user account we can ssh in as, for using an ssh tunnel.
# The standard amazon-linux-2 ami will work fine. Don't care about version except stay recent-ish.
data "aws_ami" "amazon-linux-2" {
owners = [137112412989]
filter {
name = "owner-alias"
values = ["amazon"]
}
filter {
name = "name"
values = ["amzn2-ami-hvm-2.0.20210701.0-x86_64-gp2"]
}
}
# Create a host we can ssh into for database ssh tunnel connections from airbyte connectors.
resource "aws_instance" "dbtunnel-bastion" {
ami = data.aws_ami.amazon-linux-2.id
instance_type = "t3.small"
subnet_id = aws_subnet.main-subnet-public-dbtunnel.id
vpc_security_group_ids = [aws_security_group.ssh-and-egress-allowed.id]
key_name = var.sudo_keypair_name
user_data = file("${path.module}/userdata.sh")
lifecycle {
ignore_changes = [associate_public_ip_address]
}
tags = {
Name = "dbtunnel-bastion"
}
provisioner "file" {
source = var.airbyte_user_authorized_keys_local_filepath
destination = "/tmp/airbyte_authorized_keys"
connection {
type = "ssh"
user = "ec2-user" # presumes you have the ssh key in your ssh-agent already
host = aws_instance.dbtunnel-bastion.public_ip
}
}
provisioner "remote-exec" {
inline = [
"sudo bash -cx \"adduser airbyte -m && mkdir /home/airbyte/.ssh && chmod 700 /home/airbyte/.ssh && touch /home/airbyte/.ssh/authorized_keys && chmod 600 /home/airbyte/.ssh/authorized_keys && chown -R airbyte.airbyte /home/airbyte/.ssh && cat /tmp/airbyte_authorized_keys > /home/airbyte/.ssh/authorized_keys && rm /tmp/airbyte_authorized_keys\""
]
connection {
type = "ssh"
user = "ec2-user" # presumes you have the ssh private key in your ssh-agent already
host = aws_instance.dbtunnel-bastion.public_ip
}
}
}
# We're using a static IP for connector testing for now since dns isn't usable for this.
# We would prefer DNS someday.
resource "aws_eip" "dbtunnel-eip" {
vpc = true
}
resource "aws_eip_association" "dbtunnel-eip-assoc" {
instance_id = aws_instance.dbtunnel-bastion.id
allocation_id = aws_eip.dbtunnel-eip.id
}