I created a terraform deployment for gcp cloudfunction which should assume AWS role to manipulate on AWS s3. The problem that flow which looks like created correctly causes to (AccessDenied) when calling the AssumeRoleWithWebIdentity operation: Not authorized to perform sts:AssumeRoleWithWebIdentity error. Maybe somebody knows what I do wrongly?
requirements.txt:
google-auth
google-auth-oauthlib
google-auth-httplib2
boto3
functions-framework==3.0.0
flask>=1.0,<3.0
requests
main.py.tmpl:
from flask import jsonify
import google.auth
import google.oauth2.id_token
import google.auth.transport.requests
from google.auth import compute_engine
import boto3
import os
import traceback
import requests
def get_service_account_email():
# Fetch the service account email from the metadata server
metadata_url = 'http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/email'
headers = {'Metadata-Flavor': 'Google'}
response = requests.get(metadata_url, headers=headers)
return response.text
def get_google_identity_token(audience):
auth_req = google.auth.transport.requests.Request()
id_token = google.oauth2.id_token.fetch_id_token(auth_req, audience)
return id_token
def assume_role_with_web_identity(role_arn, identity_token):
sts_client = boto3.client('sts')
response = sts_client.assume_role_with_web_identity(
RoleArn=role_arn,
RoleSessionName='GCPAssumeRoleSession',
WebIdentityToken=identity_token,
)
return response['Credentials']
def transfer_to_s3(request):
service_account_email = get_service_account_email()
print(f"Service Account Email: {service_account_email}")
try:
print("Starting the function execution...")
role_arn = "${gcp_assume_role_arn}"
identity_token = get_google_identity_token(service_account_email)
credentials = assume_role_with_web_identity(role_arn, identity_token)
print(f"Credentials were successfully assumed")
return jsonify({
"status": "success",
"message": f"File {file_name} successfully transferred from GCS to S3"
}), 200
except Exception as e:
print(f"Exception: {e}\n{traceback.print_exc()}")
return jsonify({
"status": "error",
"message": str(e)
}), 500
main.tf
variable "gcp_project_id" {
type = string
description = "refers to the client GCP project"
}
##############################################################################
# aws PROVIDER
##############################################################################
provider "aws" {
region = "us-east-1"
}
##############################################################################
# gcp PROVIDER
##############################################################################
provider "google" {
credentials = file("${path.module}/service-account.json")
project = var.gcp_project_id
region = "us-central1"
}
##############################################################################
# DATA & locals
##############################################################################
data "aws_caller_identity" "current" {}
data "google_project" "client-project" {
project_id = var.gcp_project_id
}
locals {
client_deploy_dir = "${path.module}/deployment/"
gcp_service_account_email = "${var.gcp_project_id}@appspot.gserviceaccount.com"
}
##############################################################################
# AWS S3
##############################################################################
resource "aws_s3_bucket" "client-cur-gcp-bucket" {
bucket = "client-gcp-bucket"
force_destroy = true
}
# Enable versioning
resource "aws_s3_bucket_versioning" "client-cur-gcp-bucket-versioning" {
bucket = aws_s3_bucket.client-cur-gcp-bucket.id
# enable versionning
versioning_configuration {
status = "Enabled"
}
}
# Enable logging on s3 actioins
resource "aws_s3_bucket_acl" "client-cur-gcp-bucket-acl" {
bucket = aws_s3_bucket.client-cur-gcp-bucket.id
acl = "private"
depends_on = [aws_s3_bucket_ownership_controls.client-cur-gcp-bucket-acl-ownership]
}
# Resource to avoid error "AccessControlListNotSupported: The bucket does not allow ACLs"
resource "aws_s3_bucket_ownership_controls" "client-cur-gcp-bucket-acl-ownership" {
bucket = aws_s3_bucket.client-cur-gcp-bucket.id
rule {
object_ownership = "ObjectWriter"
}
}
##############################################################################
# AWS Permissions for GCP
##############################################################################
resource "aws_iam_role" "gcp_assume_role" {
name = "GCPAssumeRole"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Federated": "accounts.google.com"
},
"Action": "sts:AssumeRoleWithWebIdentity",
"Condition": {
"StringEquals": {
"accounts.google.com:aud": "${local.gcp_service_account_email}"
}
}
}
]
}
EOF
}
resource "aws_iam_policy" "s3_access_policy" {
name = "S3AccessPolicy"
policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "s3:*",
"Resource": "${aws_s3_bucket.client-cur-gcp-bucket.arn}/*"
}
]
}
EOF
}
resource "aws_iam_role_policy_attachment" "attach_s3_access_policy" {
role = aws_iam_role.gcp_assume_role.name
policy_arn = aws_iam_policy.s3_access_policy.arn
}
##############################################################################
# BUCKET FOR BigQuery results CSV
##############################################################################
resource "google_storage_bucket" "client-cloud-functions-source-bucket" {
name = "max-test-cloud-functions-source-bucket"
location = "US"
force_destroy = true
}
##############################################################################
# GCP to AWS Transfer Function
##############################################################################
# fille the template
data "template_file" "transwer-to-s3-function" {
template = file("${path.module}/main.py.tmpl")
vars = {
gcp_assume_role_arn = aws_iam_role.gcp_assume_role.arn
}
}
# save file
resource "local_file" "transwer-to-s3-function" {
content = data.template_file.transwer-to-s3-function.rendered
filename = "${local.client_deploy_dir}/transwer_to_s3/main.py"
}
# zip files
resource "null_resource" "zip_transwer_to_s3_code" {
depends_on = [local_file.transwer-to-s3-function]
provisioner "local-exec" {
command = "7z a ${local.client_deploy_dir}/transwer_to_s3.zip ${abspath(local.client_deploy_dir)}/transwer_to_s3/main.py ${abspath(path.module)}/requirements.txt"
}
}
####################
# cloud function
resource "google_storage_bucket_object" "transwer-to-s3-function-zip" {
depends_on = [null_resource.zip_transwer_to_s3_code]
name = "transwer_to_s3.zip"
bucket = google_storage_bucket.client-cloud-functions-source-bucket.name
source = "${local.client_deploy_dir}/transwer_to_s3.zip"
}
resource "google_cloudfunctions_function" "transwer-to-s3-function" {
name = "transwer-to-s3-function"
description = "Run transwer to s3 function"
runtime = "python39"
source_archive_bucket = google_storage_bucket.client-cloud-functions-source-bucket.name
source_archive_object = google_storage_bucket_object.transwer-to-s3-function-zip.name
entry_point = "transfer_to_s3"
trigger_http = true
}
resource "google_project_iam_member" "transwer-to-s3-function-invoker" {
project = var.gcp_project_id
role = "roles/pubsub.subscriber"
member = "serviceAccount:${local.gcp_service_account_email}"
}
CMD
you need to have your gcp service-account.json in the current dir
terraform init
terraform apply -auto-approve -var gcp_project_id=<gcp project id>