This commit is contained in:
2026-01-25 13:20:58 -06:00
parent 3884abc695
commit 2d9aba7e04
37 changed files with 0 additions and 0 deletions

View File

@@ -0,0 +1,11 @@
#!/usr/bin/env python3
"""
AWS service handlers
"""
from .s3_handler import S3Handler
from .sqs_handler import SQSHandler
from .ses_handler import SESHandler
from .dynamodb_handler import DynamoDBHandler
__all__ = ['S3Handler', 'SQSHandler', 'SESHandler', 'DynamoDBHandler']

View File

@@ -0,0 +1,192 @@
#!/usr/bin/env python3
"""
DynamoDB operations handler
"""
import time
from typing import Optional, Dict, Any, List
import boto3
from botocore.exceptions import ClientError
from logger import log
from config import config
class DynamoDBHandler:
"""Handles all DynamoDB operations"""
def __init__(self):
self.resource = boto3.resource('dynamodb', region_name=config.aws_region)
self.available = False
self.rules_table = None
self.messages_table = None
self.blocked_table = None
self._initialize_tables()
def _initialize_tables(self):
"""Initialize DynamoDB table connections"""
try:
self.rules_table = self.resource.Table(config.rules_table)
self.messages_table = self.resource.Table(config.messages_table)
self.blocked_table = self.resource.Table(config.blocked_table)
# Test connection
self.rules_table.table_status
self.messages_table.table_status
self.blocked_table.table_status
self.available = True
log("✓ DynamoDB tables connected successfully")
except Exception as e:
log(f"⚠ DynamoDB not fully available: {e}", 'WARNING')
self.available = False
def get_email_rules(self, email_address: str) -> Optional[Dict[str, Any]]:
"""
Get email rules for recipient (OOO, Forwarding)
Args:
email_address: Recipient email address
Returns:
Rule dictionary or None if not found
"""
if not self.available or not self.rules_table:
return None
try:
response = self.rules_table.get_item(Key={'email_address': email_address})
return response.get('Item')
except ClientError as e:
if e.response['Error']['Code'] != 'ResourceNotFoundException':
log(f"⚠ DynamoDB error for {email_address}: {e}", 'ERROR')
return None
except Exception as e:
log(f"⚠ DynamoDB error for {email_address}: {e}", 'WARNING')
return None
def get_bounce_info(self, message_id: str, worker_name: str = 'unified') -> Optional[Dict]:
"""
Get bounce information from DynamoDB with retry logic
Args:
message_id: SES Message ID
worker_name: Worker name for logging
Returns:
Bounce info dictionary or None
"""
if not self.available or not self.messages_table:
return None
for attempt in range(config.bounce_lookup_retries):
try:
response = self.messages_table.get_item(Key={'MessageId': message_id})
item = response.get('Item')
if item:
return {
'original_source': item.get('original_source', ''),
'bounceType': item.get('bounceType', 'Unknown'),
'bounceSubType': item.get('bounceSubType', 'Unknown'),
'bouncedRecipients': item.get('bouncedRecipients', []),
'timestamp': item.get('timestamp', '')
}
if attempt < config.bounce_lookup_retries - 1:
log(
f" Bounce record not found yet, retrying in {config.bounce_lookup_delay}s "
f"(attempt {attempt + 1}/{config.bounce_lookup_retries})...",
'INFO',
worker_name
)
time.sleep(config.bounce_lookup_delay)
else:
log(
f"⚠ No bounce record found after {config.bounce_lookup_retries} attempts "
f"for Message-ID: {message_id}",
'WARNING',
worker_name
)
return None
except Exception as e:
log(
f"⚠ DynamoDB Error (attempt {attempt + 1}/{config.bounce_lookup_retries}): {e}",
'ERROR',
worker_name
)
if attempt < config.bounce_lookup_retries - 1:
time.sleep(config.bounce_lookup_delay)
else:
return None
return None
def get_blocked_patterns(self, email_address: str) -> List[str]:
"""
Get blocked sender patterns for recipient
Args:
email_address: Recipient email address
Returns:
List of blocked patterns (may include wildcards)
"""
if not self.available or not self.blocked_table:
return []
try:
response = self.blocked_table.get_item(Key={'email_address': email_address})
item = response.get('Item', {})
return item.get('blocked_patterns', [])
except Exception as e:
log(f"⚠ Error getting block list for {email_address}: {e}", 'ERROR')
return []
def batch_get_blocked_patterns(self, email_addresses: List[str]) -> Dict[str, List[str]]:
"""
Batch get blocked patterns for multiple recipients (more efficient)
Args:
email_addresses: List of recipient email addresses
Returns:
Dictionary mapping email_address -> list of blocked patterns
"""
if not self.available or not self.blocked_table:
return {addr: [] for addr in email_addresses}
try:
# DynamoDB BatchGetItem
keys = [{'email_address': addr} for addr in email_addresses]
response = self.resource.batch_get_item(
RequestItems={
config.blocked_table: {'Keys': keys}
}
)
items = response.get('Responses', {}).get(config.blocked_table, [])
# Build result dictionary
result = {}
for email_address in email_addresses:
matching_item = next(
(item for item in items if item['email_address'] == email_address),
None
)
if matching_item:
result[email_address] = matching_item.get('blocked_patterns', [])
else:
result[email_address] = []
return result
except Exception as e:
log(f"⚠ Batch blocklist check error: {e}", 'ERROR')
return {addr: [] for addr in email_addresses}

View File

@@ -0,0 +1,193 @@
#!/usr/bin/env python3
"""
S3 operations handler
"""
import time
from typing import Optional, List
import boto3
from botocore.exceptions import ClientError
from logger import log
from config import config, domain_to_bucket_name
class S3Handler:
"""Handles all S3 operations"""
def __init__(self):
self.client = boto3.client('s3', region_name=config.aws_region)
def get_email(self, domain: str, message_id: str, receive_count: int) -> Optional[bytes]:
"""
Download email from S3
Args:
domain: Email domain
message_id: SES Message ID
receive_count: Number of times this message was received from queue
Returns:
Raw email bytes or None if not found/error
"""
bucket = domain_to_bucket_name(domain)
try:
response = self.client.get_object(Bucket=bucket, Key=message_id)
return response['Body'].read()
except self.client.exceptions.NoSuchKey:
if receive_count < 5:
log(f"⏳ S3 Object not found yet (Attempt {receive_count}). Retrying...", 'WARNING')
return None
else:
log(f"❌ S3 Object missing permanently after retries.", 'ERROR')
raise
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchKey':
if receive_count < 5:
log(f"⏳ S3 Object not found yet (Attempt {receive_count}). Retrying...", 'WARNING')
return None
else:
log(f"❌ S3 Object missing permanently after retries.", 'ERROR')
raise
log(f"❌ S3 Download Error: {e}", 'ERROR')
raise
except Exception as e:
log(f"❌ S3 Download Error: {e}", 'ERROR')
raise
def mark_as_processed(
self,
domain: str,
message_id: str,
worker_name: str,
invalid_inboxes: Optional[List[str]] = None
):
"""Mark email as successfully delivered"""
bucket = domain_to_bucket_name(domain)
try:
head = self.client.head_object(Bucket=bucket, Key=message_id)
metadata = head.get('Metadata', {}) or {}
metadata['processed'] = 'true'
metadata['processed_at'] = str(int(time.time()))
metadata['processed_by'] = worker_name
metadata['status'] = 'delivered'
metadata.pop('processing_started', None)
metadata.pop('queued_at', None)
if invalid_inboxes:
metadata['invalid_inboxes'] = ','.join(invalid_inboxes)
log(f"⚠ Invalid inboxes recorded: {', '.join(invalid_inboxes)}", 'WARNING', worker_name)
self.client.copy_object(
Bucket=bucket,
Key=message_id,
CopySource={'Bucket': bucket, 'Key': message_id},
Metadata=metadata,
MetadataDirective='REPLACE'
)
except Exception as e:
log(f"Failed to mark as processed: {e}", 'WARNING', worker_name)
def mark_as_all_invalid(
self,
domain: str,
message_id: str,
invalid_inboxes: List[str],
worker_name: str
):
"""Mark email as failed because all recipients are invalid"""
bucket = domain_to_bucket_name(domain)
try:
head = self.client.head_object(Bucket=bucket, Key=message_id)
metadata = head.get('Metadata', {}) or {}
metadata['processed'] = 'true'
metadata['processed_at'] = str(int(time.time()))
metadata['processed_by'] = worker_name
metadata['status'] = 'failed'
metadata['error'] = 'All recipients are invalid (mailboxes do not exist)'
metadata['invalid_inboxes'] = ','.join(invalid_inboxes)
metadata.pop('processing_started', None)
metadata.pop('queued_at', None)
self.client.copy_object(
Bucket=bucket,
Key=message_id,
CopySource={'Bucket': bucket, 'Key': message_id},
Metadata=metadata,
MetadataDirective='REPLACE'
)
except Exception as e:
log(f"Failed to mark as all invalid: {e}", 'WARNING', worker_name)
def mark_as_blocked(
self,
domain: str,
message_id: str,
blocked_recipients: List[str],
sender: str,
worker_name: str
):
"""
Mark email as blocked by sender blacklist
This sets metadata BEFORE deletion for audit trail
"""
bucket = domain_to_bucket_name(domain)
try:
head = self.client.head_object(Bucket=bucket, Key=message_id)
metadata = head.get('Metadata', {}) or {}
metadata['processed'] = 'true'
metadata['processed_at'] = str(int(time.time()))
metadata['processed_by'] = worker_name
metadata['status'] = 'blocked'
metadata['blocked_recipients'] = ','.join(blocked_recipients)
metadata['blocked_sender'] = sender
metadata.pop('processing_started', None)
metadata.pop('queued_at', None)
self.client.copy_object(
Bucket=bucket,
Key=message_id,
CopySource={'Bucket': bucket, 'Key': message_id},
Metadata=metadata,
MetadataDirective='REPLACE'
)
log(f"✓ Marked as blocked in S3 metadata", 'INFO', worker_name)
except Exception as e:
log(f"⚠ Failed to mark as blocked: {e}", 'ERROR', worker_name)
raise
def delete_blocked_email(
self,
domain: str,
message_id: str,
worker_name: str
):
"""
Delete email after marking as blocked
Only call this after mark_as_blocked() succeeded
"""
bucket = domain_to_bucket_name(domain)
try:
self.client.delete_object(Bucket=bucket, Key=message_id)
log(f"🗑 Deleted blocked email from S3", 'SUCCESS', worker_name)
except Exception as e:
log(f"⚠ Failed to delete blocked email: {e}", 'ERROR', worker_name)
raise

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env python3
"""
SES operations handler
"""
import boto3
from botocore.exceptions import ClientError
from logger import log
from config import config
class SESHandler:
"""Handles all SES operations"""
def __init__(self):
self.client = boto3.client('ses', region_name=config.aws_region)
def send_raw_email(
self,
source: str,
destination: str,
raw_message: bytes,
worker_name: str
) -> bool:
"""
Send raw email via SES
Args:
source: From address
destination: To address
raw_message: Raw MIME message bytes
worker_name: Worker name for logging
Returns:
True if sent successfully, False otherwise
"""
try:
self.client.send_raw_email(
Source=source,
Destinations=[destination],
RawMessage={'Data': raw_message}
)
return True
except ClientError as e:
error_code = e.response['Error']['Code']
log(f"⚠ SES send failed to {destination} ({error_code}): {e}", 'ERROR', worker_name)
return False
except Exception as e:
log(f"⚠ SES send failed to {destination}: {e}", 'ERROR', worker_name)
return False

View File

@@ -0,0 +1,103 @@
#!/usr/bin/env python3
"""
SQS operations handler
"""
from typing import Optional, List, Dict, Any
import boto3
from botocore.exceptions import ClientError
from logger import log
from config import config, domain_to_queue_name
class SQSHandler:
"""Handles all SQS operations"""
def __init__(self):
self.client = boto3.client('sqs', region_name=config.aws_region)
def get_queue_url(self, domain: str) -> Optional[str]:
"""
Get SQS queue URL for domain
Args:
domain: Email domain
Returns:
Queue URL or None if not found
"""
queue_name = domain_to_queue_name(domain)
try:
response = self.client.get_queue_url(QueueName=queue_name)
return response['QueueUrl']
except ClientError as e:
if e.response['Error']['Code'] == 'AWS.SimpleQueueService.NonExistentQueue':
log(f"Queue not found for domain: {domain}", 'WARNING')
else:
log(f"Error getting queue URL for {domain}: {e}", 'ERROR')
return None
def receive_messages(self, queue_url: str) -> List[Dict[str, Any]]:
"""
Receive messages from queue
Args:
queue_url: SQS Queue URL
Returns:
List of message dictionaries
"""
try:
response = self.client.receive_message(
QueueUrl=queue_url,
MaxNumberOfMessages=config.max_messages,
WaitTimeSeconds=config.poll_interval,
VisibilityTimeout=config.visibility_timeout,
AttributeNames=['ApproximateReceiveCount', 'SentTimestamp']
)
return response.get('Messages', [])
except Exception as e:
log(f"Error receiving messages: {e}", 'ERROR')
return []
def delete_message(self, queue_url: str, receipt_handle: str):
"""
Delete message from queue
Args:
queue_url: SQS Queue URL
receipt_handle: Message receipt handle
"""
try:
self.client.delete_message(
QueueUrl=queue_url,
ReceiptHandle=receipt_handle
)
except Exception as e:
log(f"Error deleting message: {e}", 'ERROR')
raise
def get_queue_size(self, queue_url: str) -> int:
"""
Get approximate number of messages in queue
Args:
queue_url: SQS Queue URL
Returns:
Number of messages (0 if error)
"""
try:
attrs = self.client.get_queue_attributes(
QueueUrl=queue_url,
AttributeNames=['ApproximateNumberOfMessages']
)
return int(attrs['Attributes'].get('ApproximateNumberOfMessages', 0))
except Exception:
return 0