0% found this document useful (0 votes)
7 views3 pages

Py

Uploaded by

Vikas Gowda
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
7 views3 pages

Py

Uploaded by

Vikas Gowda
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 3

import boto3

import csv
from datetime import datetime

def lambda_handler(event, context):


# Source and destination S3 bucket names
source_bucket_name = 'tpc-aws-ted-tst-edpp-mart-rnd-us-east-1'
destination_bucket_name = '3vue-takeda-tpc-ted-tst'
source_bucket_path = 'rnd_us_mart/gma/outbound/'
destination_bucket_path = '/'

# Initialize IAM client to assume role


iam_client = boto3.client('sts')
role_to_assume_arn = 'arn:aws:iam::600316963474:role/tst-apms-92187-3vue-
transfer-role'
role_session_name = 'AssumedRoleSession'

# Assume IAM role


assumed_role_object = iam_client.assume_role( RoleArn=role_to_assume_arn,
RoleSessionName=role_session_name )

# Extract assumed role credentials


assumed_credentials = assumed_role_object['Credentials']
# print(assumed_credentials)

# Initialize S3 clients with assumed role credentials


s3_source = boto3.client(
's3',
aws_access_key_id=assumed_credentials['AccessKeyId'],
aws_secret_access_key=assumed_credentials['SecretAccessKey'],
aws_session_token=assumed_credentials['SessionToken']
)

s3_destination = boto3.client(
's3',
aws_access_key_id=assumed_credentials['AccessKeyId'],
aws_secret_access_key=assumed_credentials['SecretAccessKey'],
aws_session_token=assumed_credentials['SessionToken']
)

# Get list of objects in source and destination buckets


source_objects = get_objects(s3_source, source_bucket_name, source_bucket_path)
dest_objects = get_objects(s3_destination, destination_bucket_name,
destination_bucket_path)

# Identify out-of-sync objects


out_of_sync_objects = find_out_of_sync(source_objects, dest_objects)

# Copy out-of-sync objects to destination bucket


copy_objects(s3_source, s3_destination, source_bucket_name,
destination_bucket_name, out_of_sync_objects)

# Send email based on sync status


send_email(out_of_sync_objects, source_bucket_name, destination_bucket_name)

return { 'statusCode': 200, 'body': 'S3 Sync Completed' }

def get_objects(s3_source, bucket_name, bucket_path):


""" Retrieves all object keys from a bucket path. """
paginator = s3_source.get_paginator('list_objects_v2')
object_keys = []
for page in paginator.paginate(Bucket=bucket_name, Prefix=bucket_path):
contents = page.get('Contents', [])
object_keys.extend([obj['Key'] for obj in contents])
return object_keys

def find_out_of_sync(source_objects, dest_objects):


""" Identifies objects present only in the source or destination bucket. """
out_of_sync = []
for obj in source_objects:
if obj not in dest_objects:
out_of_sync.append(obj)
for obj in dest_objects:
if obj not in source_objects:
out_of_sync.append(obj)
return out_of_sync

# def copy_objects(s3_source, s3_destination, source_bucket, dest_bucket, objects,


source_path):
# """ Copies out-of-sync objects from source bucket path to destination bucket
path. """
# for obj in objects:
# source_key = obj[len(source_path):] # Remove the source path from the
key
# s3_source.copy_object(Bucket=dest_bucket, Key=obj, CopySource={'Bucket':
source_bucket, 'Key': source_key})
def copy_objects(s3_source, s3_destination, source_bucket, dest_bucket, objects):
"""
Copies out-of-sync objects from source bucket to destination bucket.
"""
for obj in objects:
s3_source.copy_object(Bucket=dest_bucket, Key=obj, CopySource={'Bucket':
source_bucket, 'Key': obj})

def send_email(out_of_sync_objects, source_bucket, dest_bucket):


""" Sends email with a message based on sync status. """
sender_email = "[email protected]"
recipient_email = "[email protected]"
subject = f"S3 Sync Report - {datetime.now().strftime('%Y-%m-%d')}"

# Prepare email body based on sync status


if not out_of_sync_objects:
body = f"All objects in source bucket '{source_bucket}' are synced with
destination bucket '{dest_bucket}'. No new objects need to be copied."
else:
body = f"The following objects are out of sync and have been copied to the
destination bucket:\n"
body += "\n".join(out_of_sync_objects)

# Send email using SES


ses_client = boto3.client('ses')
response = ses_client.send_email(
Source=sender_email,
Destination={ 'ToAddresses': [recipient_email] },
Message={
'Subject': {'Data': subject},
'Body': {'Text': {'Data': body}}
}
)

You might also like