Managing AWS Transfer Family with Terraform
Learn how to set up and manage AWS Transfer Family for secure file transfers using SFTP, FTPS, and FTP protocols with Terraform
Managing AWS Transfer Family with Terraform
AWS Transfer Family is a fully managed service that enables secure file transfers over SFTP, FTPS, and FTP protocols. This guide demonstrates how to set up and manage Transfer Family using Terraform.
Video Tutorial
Learn more about managing AWS Transfer Family with Terraform in this comprehensive video tutorial:
Prerequisites
- AWS CLI configured with appropriate permissions
- Terraform installed (version 1.0.0 or later)
- S3 bucket for file storage
- Route 53 hosted zone (optional, for custom domain)
Project Structure
terraform-transfer/
├── main.tf
├── variables.tf
├── outputs.tf
├── modules/
│ └── transfer/
│ ├── main.tf
│ ├── variables.tf
│ └── outputs.tf
└── config/
└── users.json
Transfer Family Configuration
Create modules/transfer/main.tf
:
# Transfer Server
resource "aws_transfer_server" "main" {
identity_provider_type = "SERVICE_MANAGED"
protocols = ["SFTP", "FTPS"]
endpoint_type = "VPC"
security_policy_name = "TransferSecurityPolicy-2020-06"
endpoint_details {
vpc_id = var.vpc_id
subnet_ids = var.subnet_ids
security_group_ids = [aws_security_group.transfer.id]
}
certificate {
certificate_chain = file("${path.module}/certs/chain.pem")
private_key = file("${path.module}/certs/private-key.pem")
usage = "FTPS"
}
protocol_details {
passive_ip = "AUTO"
set_stat_option = "ENABLE_NO_OP"
tls_session_resumption_mode = "ENFORCED"
}
structured_log_destinations = [
"${aws_cloudwatch_log_group.transfer.arn}:*"
]
tags = merge(
var.tags,
{
Name = "${var.project_name}-server"
}
)
}
# Transfer User
resource "aws_transfer_user" "main" {
server_id = aws_transfer_server.main.id
user_name = var.transfer_user_name
role = aws_iam_role.transfer_user.arn
home_directory_type = "LOGICAL"
home_directory_mappings {
entry = "/"
target = "/${var.s3_bucket}/$${Transfer:UserName}"
}
posix_profile {
gid = 1000
uid = 1000
}
tags = merge(
var.tags,
{
Name = "${var.project_name}-user"
}
)
}
# SSH Key
resource "aws_transfer_ssh_key" "main" {
server_id = aws_transfer_server.main.id
user_name = aws_transfer_user.main.user_name
body = var.ssh_public_key
}
# Security Group
resource "aws_security_group" "transfer" {
name = "${var.project_name}-transfer"
description = "Security group for Transfer server"
vpc_id = var.vpc_id
ingress {
description = "SFTP"
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = var.allowed_cidr_blocks
}
ingress {
description = "FTPS Control"
from_port = 21
to_port = 21
protocol = "tcp"
cidr_blocks = var.allowed_cidr_blocks
}
ingress {
description = "FTPS Data"
from_port = 8192
to_port = 8200
protocol = "tcp"
cidr_blocks = var.allowed_cidr_blocks
}
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
tags = merge(
var.tags,
{
Name = "${var.project_name}-transfer"
}
)
}
# IAM Roles
resource "aws_iam_role" "transfer_user" {
name = "${var.project_name}-transfer-user"
assume_role_policy = jsonencode({
Version = "2012-10-17"
Statement = [
{
Action = "sts:AssumeRole"
Effect = "Allow"
Principal = {
Service = "transfer.amazonaws.com"
}
}
]
})
}
resource "aws_iam_role_policy" "transfer_user" {
name = "${var.project_name}-transfer-user"
role = aws_iam_role.transfer_user.id
policy = jsonencode({
Version = "2012-10-17"
Statement = [
{
Sid = "AllowListingOfUserFolder"
Effect = "Allow"
Action = [
"s3:ListBucket"
]
Resource = [
"arn:aws:s3:::${var.s3_bucket}"
]
Condition = {
StringLike = {
"s3:prefix": [
"$${aws:username}/*",
"$${aws:username}"
]
}
}
},
{
Sid = "HomeDirObjectAccess"
Effect = "Allow"
Action = [
"s3:PutObject",
"s3:GetObject",
"s3:DeleteObject",
"s3:DeleteObjectVersion"
]
Resource = [
"arn:aws:s3:::${var.s3_bucket}/$${aws:username}/*"
]
}
]
})
}
# CloudWatch Log Group
resource "aws_cloudwatch_log_group" "transfer" {
name = "/aws/transfer/${var.project_name}"
retention_in_days = 30
tags = merge(
var.tags,
{
Name = "${var.project_name}-logs"
}
)
}
# Custom Hostname
resource "aws_transfer_server_certificate" "main" {
certificate = file("${path.module}/certs/certificate.pem")
private_key = file("${path.module}/certs/private-key.pem")
usage = "CERTIFICATE"
tags = merge(
var.tags,
{
Name = "${var.project_name}-certificate"
}
)
}
resource "aws_route53_record" "transfer" {
zone_id = var.route53_zone_id
name = "transfer.${var.domain_name}"
type = "CNAME"
ttl = "300"
records = [aws_transfer_server.main.endpoint]
}
Monitoring and Alerts
- CloudWatch Alarms
resource "aws_cloudwatch_metric_alarm" "bytes_transferred" {
alarm_name = "${var.project_name}-bytes-transferred"
comparison_operator = "GreaterThanThreshold"
evaluation_periods = "1"
metric_name = "BytesTransferred"
namespace = "AWS/Transfer"
period = "300"
statistic = "Sum"
threshold = "1000000000"
alarm_description = "Large amount of data transferred"
alarm_actions = [aws_sns_topic.alerts.arn]
dimensions = {
ServerId = aws_transfer_server.main.id
}
}
resource "aws_cloudwatch_metric_alarm" "failed_transfers" {
alarm_name = "${var.project_name}-failed-transfers"
comparison_operator = "GreaterThanThreshold"
evaluation_periods = "1"
metric_name = "FilesTransferredFailed"
namespace = "AWS/Transfer"
period = "300"
statistic = "Sum"
threshold = "10"
alarm_description = "High number of failed transfers"
alarm_actions = [aws_sns_topic.alerts.arn]
dimensions = {
ServerId = aws_transfer_server.main.id
}
}
Access Control
- Custom Identity Provider
resource "aws_transfer_server" "custom_idp" {
identity_provider_type = "AWS_LAMBDA"
function = aws_lambda_function.auth.arn
protocols = ["SFTP"]
# ... other configuration ...
}
resource "aws_lambda_function" "auth" {
filename = "auth_function.zip"
function_name = "${var.project_name}-auth"
role = aws_iam_role.lambda.arn
handler = "index.handler"
runtime = "nodejs18.x"
environment {
variables = {
USER_TABLE = aws_dynamodb_table.users.name
}
}
}
- Workflow
resource "aws_transfer_workflow" "main" {
description = "File processing workflow"
steps {
type = "COPY"
copy_step_details {
destination_file_location {
s3_file_location {
bucket = var.processing_bucket
key = "$${original.file}"
}
}
name = "CopyToProcessing"
overwrite_existing = true
}
}
steps {
type = "CUSTOM"
custom_step_details {
name = "ProcessFile"
target = aws_lambda_function.process.arn
timeout_seconds = 900
}
}
tags = merge(
var.tags,
{
Name = "${var.project_name}-workflow"
}
)
}
Integration Examples
- S3 Event Notification
resource "aws_s3_bucket_notification" "transfer" {
bucket = var.s3_bucket
lambda_function {
lambda_function_arn = aws_lambda_function.process.arn
events = ["s3:ObjectCreated:*"]
filter_prefix = "uploads/"
}
}
- EventBridge Rule
resource "aws_cloudwatch_event_rule" "transfer" {
name = "${var.project_name}-transfer-events"
description = "Capture Transfer events"
event_pattern = jsonencode({
source = ["aws.transfer"]
detail-type = ["AWS API Call via CloudTrail"]
detail = {
eventSource = ["transfer.amazonaws.com"]
eventName = ["CreateUser", "DeleteUser", "UpdateUser"]
}
})
}
Best Practices
-
Security
- Use VPC endpoints
- Enable encryption
- Implement logging
- Regular key rotation
-
Access Control
- Implement least privilege
- Use custom authentication
- Monitor access patterns
- Regular audits
-
Performance
- Configure bandwidth
- Monitor transfers
- Optimize storage
- Use workflows
-
Cost Optimization
- Monitor data transfer
- Optimize storage usage
Conclusion
You’ve learned how to set up and manage AWS Transfer Family using Terraform. This setup provides:
- Secure file transfer protocols
- User management
- Access control
- Monitoring capabilities
Remember to:
- Monitor transfer activity
- Implement security best practices
- Optimize performance
- Maintain access controls