cleanwork

This commit is contained in:
2026-01-12 17:19:44 -06:00
parent 87e00ae867
commit be9c5b4ceb
17 changed files with 0 additions and 324 deletions

106
queue_helper/check-dlq.py Normal file
View File

@@ -0,0 +1,106 @@
#!/usr/bin/env python3
# check-dlq.py
"""
Überprüft Dead Letter Queues und zeigt fehlgeschlagene E-Mails
"""
import boto3
import json
from datetime import datetime
sqs = boto3.client('sqs', region_name='eu-central-1')
s3 = boto3.client('s3', region_name='eu-central-1')
DOMAINS = ['andreasknuth.de', 'bizmatch.net']
def check_dlq_for_domain(domain):
"""Überprüft DLQ für eine Domain"""
dlq_name = domain.replace('.', '-') + '-queue-dlq'
try:
dlq_url = sqs.get_queue_url(QueueName=dlq_name)['QueueUrl']
except:
print(f"⚠️ DLQ not found for {domain}")
return
# DLQ Stats
attrs = sqs.get_queue_attributes(
QueueUrl=dlq_url,
AttributeNames=['ApproximateNumberOfMessages']
)['Attributes']
count = int(attrs.get('ApproximateNumberOfMessages', 0))
if count == 0:
print(f"{domain}: No failed messages")
return
print(f"\n{'='*70}")
print(f"⚠️ {domain}: {count} failed message(s)")
print(f"{'='*70}\n")
# Messages holen
response = sqs.receive_message(
QueueUrl=dlq_url,
MaxNumberOfMessages=10,
WaitTimeSeconds=0,
AttributeNames=['All']
)
messages = response.get('Messages', [])
for i, msg in enumerate(messages, 1):
try:
body = json.loads(msg['Body'])
print(f"{i}. Failed Message:")
print(f" MessageId: {body.get('message_id', 'unknown')}")
print(f" From: {body.get('from', 'unknown')}")
print(f" To: {body.get('recipient', 'unknown')}")
print(f" Subject: {body.get('subject', 'unknown')}")
# S3 Metadata für Fehlerdetails
bucket = body.get('bucket')
key = body.get('key')
if bucket and key:
try:
head = s3.head_object(Bucket=bucket, Key=key)
metadata = head.get('Metadata', {})
error = metadata.get('error', 'Unknown error')
failed_at = metadata.get('failed_at', 'unknown')
if failed_at != 'unknown':
failed_dt = datetime.fromtimestamp(int(failed_at))
print(f" Failed at: {failed_dt}")
print(f" Error: {error}")
except:
print(f" (Could not retrieve error details)")
print()
except Exception as e:
print(f" Error parsing message: {e}\n")
def main():
print(f"\n{'='*70}")
print(f"Dead Letter Queue Check - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print(f"{'='*70}")
for domain in DOMAINS:
check_dlq_for_domain(domain)
print(f"\n{'='*70}")
print("Options:")
print(" - Fix SMTP server issues")
print(" - Re-queue: python requeue-dlq.py <domain>")
print(" - Delete: python purge-dlq.py <domain>")
print(f"{'='*70}\n")
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,4 @@
#!/bin/bash
# get-queue-urls.sh
aws sqs list-queues --region us-east-2 --query 'QueueUrls[?contains(@, `-queue`) && !contains(@, `-dlq`)]' --output table

View File

@@ -0,0 +1,76 @@
#!/usr/bin/env python3
import boto3
from datetime import datetime
# Region fest auf us-east-2
sqs = boto3.client('sqs', region_name='us-east-2')
def get_all_queues():
"""Findet automatisch alle Queues, die auf '-queue' enden (keine DLQs)"""
queues = []
paginator = sqs.get_paginator('list_queues')
for page in paginator.paginate():
for url in page.get('QueueUrls', []):
name = url.split('/')[-1]
if name.endswith('-queue'):
queues.append((name, url))
return queues
def main():
print(f"\n{'='*70}")
print(f"Email Queue Monitoring (us-east-2) - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print(f"{'='*70}\n")
queues = get_all_queues()
if not queues:
print("No queues found matching '*-queue'. Check your region or permissions.")
return
# Sortieren für schönere Ausgabe
queues.sort(key=lambda x: x[0])
for name, url in queues:
dlq_name = name + '-dlq'
try:
# Main Queue Stats - NUR gültige Attribute abfragen
attrs = sqs.get_queue_attributes(
QueueUrl=url,
AttributeNames=['ApproximateNumberOfMessages', 'ApproximateNumberOfMessagesNotVisible']
)['Attributes']
# DLQ Stats (Versuch URL zu finden)
try:
dlq_url = sqs.get_queue_url(QueueName=dlq_name)['QueueUrl']
dlq_attrs = sqs.get_queue_attributes(QueueUrl=dlq_url, AttributeNames=['ApproximateNumberOfMessages'])['Attributes']
dlq_count = int(dlq_attrs.get('ApproximateNumberOfMessages', 0))
except:
dlq_count = -1 # Keine DLQ gefunden oder Fehler
available = int(attrs.get('ApproximateNumberOfMessages', 0))
flight = int(attrs.get('ApproximateNumberOfMessagesNotVisible', 0))
# Status-Icon Bestimmung
status = ""
if dlq_count > 0: status = "⚠️ " # DLQ nicht leer
if available > 50: status = "🔥" # Stau in der Main Queue
print(f"{status} Queue: {name}")
print(f" Pending: {available:<5} (Waiting for worker)")
print(f" Processing: {flight:<5} (Currently in worker)")
if dlq_count >= 0:
if dlq_count > 0:
print(f" DLQ Errors: \033[91m{dlq_count:<5}\033[0m (In {dlq_name})") # Rot markiert
else:
print(f" DLQ Errors: {dlq_count:<5} (In {dlq_name})")
else:
print(f" DLQ: Not found / No access")
print("-" * 30)
except Exception as e:
print(f"❌ Error checking {name}: {e}")
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env python3
import sys
import boto3
# WICHTIG: Region auf us-east-2 gesetzt
sqs = boto3.client('sqs', region_name='us-east-2')
def requeue_dlq(domain, max_messages=10):
# Namen normalisieren (Punkte zu Bindestrichen)
queue_name = domain.replace('.', '-') + '-queue'
dlq_name = queue_name + '-dlq'
print(f"Connecting to AWS in us-east-2 for domain: {domain}")
try:
# URLs holen
q_url = sqs.get_queue_url(QueueName=queue_name)['QueueUrl']
dlq_url = sqs.get_queue_url(QueueName=dlq_name)['QueueUrl']
except Exception as e:
print(f"❌ Error finding queues: {e}")
return
# Status prüfen
attrs = sqs.get_queue_attributes(QueueUrl=dlq_url, AttributeNames=['ApproximateNumberOfMessages'])
count = int(attrs['Attributes']['ApproximateNumberOfMessages'])
if count == 0:
print(f"✅ No messages in DLQ ({dlq_name}).")
return
print(f"⚠️ Found {count} messages in {dlq_name}")
print(f" Target: {queue_name}")
if input(" Move messages now? (y/n): ").lower() != 'y':
print("Cancelled.")
return
moved = 0
while moved < max_messages:
# Messages holen
resp = sqs.receive_message(
QueueUrl=dlq_url,
MaxNumberOfMessages=10, # Max allowed by AWS per call
WaitTimeSeconds=1
)
msgs = resp.get('Messages', [])
if not msgs:
break
for msg in msgs:
# 1. In Main Queue senden
sqs.send_message(QueueUrl=q_url, MessageBody=msg['Body'])
# 2. Aus DLQ löschen
sqs.delete_message(QueueUrl=dlq_url, ReceiptHandle=msg['ReceiptHandle'])
moved += 1
print(f" ✓ Moved message {msg['MessageId']}")
print(f"✅ Successfully moved {moved} messages.")
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Usage: python3 requeue-dlq.py <domain>")
sys.exit(1)
requeue_dlq(sys.argv[1])

175
queue_helper/requeue-email.sh Executable file
View File

@@ -0,0 +1,175 @@
#!/bin/bash
# requeue-email.sh - Sendet eine E-Mail aus S3 manuell in die SQS Queue
# Simuliert das SNS-ähnliche Format, das der Worker erwartet (aus Lambda).
# Extrahiert FROM und RECIPIENTS aus der E-Mail, falls nicht angegeben.
set -e
# Parameter prüfen
if [ $# -lt 2 ]; then
echo "Usage: $0 <DOMAIN> <S3_KEY> [FROM] [RECIPIENTS]"
echo ""
echo "RECIPIENTS kann kommagetrennt sein, z.B. user1@domain.com,user2@domain.com"
echo "Falls FROM und RECIPIENTS nicht angegeben, werden sie aus der E-Mail extrahiert."
echo "Example:"
echo " $0 bayarea-cc.com rgskc3d59dqdm6lq1scenpuvdq7ikhi3cqk382g1"
echo " oder mit manuellen Werten: $0 bayarea-cc.com rgskc3d59dqdm6lq1scenpuvdq7ikhi3cqk382g1 sender@example.com user@bayarea-cc.com"
echo ""
exit 1
fi
DOMAIN=$1
S3_KEY=$2
FROM=${3:-}
RECIPIENTS=${4:-}
# Konfiguration
AWS_REGION="us-east-2"
# Bucket-Name aus Domain ableiten
BUCKET="${DOMAIN//./-}-emails"
# Temporäre Datei für E-Mail
TEMP_FILE="/tmp/email-${S3_KEY}.eml"
echo "==================================="
echo "Requeue E-Mail zu SQS Queue"
echo "==================================="
echo "Domain: $DOMAIN"
echo "Bucket: $BUCKET"
echo "S3 Key: $S3_KEY"
# Prüfen ob S3 Object existiert
echo "Prüfe S3 Object..."
if ! aws s3api head-object \
--bucket "$BUCKET" \
--key "$S3_KEY" \
--region "$AWS_REGION" &>/dev/null; then
echo "ERROR: S3 Object nicht gefunden: s3://$BUCKET/$S3_KEY"
exit 1
fi
echo "✓ S3 Object existiert"
# Wenn FROM oder RECIPIENTS nicht angegeben, extrahiere aus E-Mail
if [ -z "$FROM" ] || [ -z "$RECIPIENTS" ]; then
echo "Extrahiere Headers aus E-Mail..."
aws s3 cp "s3://$BUCKET/$S3_KEY" "$TEMP_FILE" --region "$AWS_REGION" --quiet
# Aufruf der separaten Python-Datei
if ! command -v python3 &> /dev/null; then
echo "ERROR: python3 ist nicht installiert."
rm -f "$TEMP_FILE"
exit 1
fi
if [ ! -f "extract_email_headers.py" ]; then
echo "ERROR: extract_email_headers.py nicht gefunden (muss im selben Verzeichnis liegen)."
rm -f "$TEMP_FILE"
exit 1
fi
PARSE_OUTPUT=$(python3 extract_email_headers.py "$TEMP_FILE")
# Output parsen
EXTRACTED_FROM=$(echo "$PARSE_OUTPUT" | grep '^FROM:' | cut -d':' -f2-)
EXTRACTED_RECIPIENTS=$(echo "$PARSE_OUTPUT" | grep '^RECIPIENTS:' | cut -d':' -f2-)
# Verwende extrahierte Werte, falls nicht angegeben
FROM=${FROM:-$EXTRACTED_FROM}
RECIPIENTS=${RECIPIENTS:-$EXTRACTED_RECIPIENTS}
# Aufräumen
rm -f "$TEMP_FILE"
fi
# Wenn immer noch leer, Error
if [ -z "$FROM" ] || [ -z "$RECIPIENTS" ]; then
echo "ERROR: Konnte FROM oder RECIPIENTS nicht extrahieren oder angeben."
exit 1
fi
# Domain aus erstem Recipient extrahieren und validieren
FIRST_RECIPIENT=$(echo "$RECIPIENTS" | cut -d',' -f1 | sed 's/.*<//' | sed 's/>.*//') # Reinige falls <email>
EXTRACTED_DOMAIN=$(echo "$FIRST_RECIPIENT" | cut -d'@' -f2)
# Lowercase für case-insensitive Vergleich
EXTRACTED_DOMAIN_LOWER=$(echo "$EXTRACTED_DOMAIN" | tr '[:upper:]' '[:lower:]')
DOMAIN_LOWER=$(echo "$DOMAIN" | tr '[:upper:]' '[:lower:]')
if [ "$EXTRACTED_DOMAIN_LOWER" != "$DOMAIN_LOWER" ]; then
echo "ERROR: Extrahierte Domain ($EXTRACTED_DOMAIN) passt nicht zur angegebenen Domain ($DOMAIN)."
exit 1
fi
# Queue-Name aus Domain ableiten
QUEUE_NAME="${DOMAIN//./-}-queue"
echo "From: $FROM"
echo "Recipients: $RECIPIENTS"
echo "Queue: $QUEUE_NAME"
echo ""
# Queue URL ermitteln
echo "Ermittle Queue URL..."
QUEUE_URL=$(aws sqs get-queue-url \
--queue-name "$QUEUE_NAME" \
--region "$AWS_REGION" \
--query 'QueueUrl' \
--output text 2>/dev/null)
if [ -z "$QUEUE_URL" ]; then
echo "ERROR: Queue nicht gefunden: $QUEUE_NAME"
exit 1
fi
echo "✓ Queue URL: $QUEUE_URL"
# Recipients als Array für JSON
RECIPIENTS_ARRAY=$(echo "$RECIPIENTS" | tr ',' '\n' | jq -R . | jq -s .)
# Fake SES-Event erstellen
SES_DATA=$(jq -n \
--arg from "$FROM" \
--arg msgid "$S3_KEY" \
--argjson recipients "$RECIPIENTS_ARRAY" \
'{
mail: {
source: $from,
messageId: $msgid,
destination: $recipients
},
receipt: {
recipients: $recipients
}
}')
# Fake SNS-Payload (Wrapper)
FAKE_SNS_PAYLOAD=$(jq -n \
--argjson message "$SES_DATA" \
--arg msgid "$(uuidgen)" \
--arg timestamp "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \
'{
Type: "Notification",
MessageId: $msgid,
TopicArn: "arn:aws:sns:ses-shim:global-topic",
Subject: "Amazon SES Email Receipt Notification",
Message: ($message | tostring),
Timestamp: $timestamp
}')
# Message in Queue senden
echo "Sende Message in Queue..."
RESPONSE=$(aws sqs send-message \
--queue-url "$QUEUE_URL" \
--region "$AWS_REGION" \
--message-body "$FAKE_SNS_PAYLOAD")
MESSAGE_ID=$(echo "$RESPONSE" | jq -r '.MessageId')
echo ""
echo "==================================="
echo "✅ E-Mail erfolgreich in Queue"
echo "==================================="
echo "SQS Message ID: $MESSAGE_ID"
echo "Queue: $QUEUE_NAME"
echo ""
echo "Der Worker wird die E-Mail in Kürze verarbeiten."