Compare commits

...

227 Commits

Author SHA1 Message Date
56b7d065b8 get rid of old stuff 2026-04-10 18:31:16 -05:00
96348c17ce remove bayarea 2026-04-09 17:58:08 -05:00
02b721ff51 remove annaville 2026-04-09 10:16:09 -05:00
3a628fe676 api on port 3000 2026-04-03 15:13:24 -05:00
93535750a2 internal name 2026-04-03 14:42:07 -05:00
c949457f4c greenlens_net 2026-04-03 14:37:12 -05:00
5292e2728f caddy_net 2026-04-03 14:35:20 -05:00
8c1770882b host.docker.internal 2026-04-03 13:28:24 -05:00
6837cf4f17 greenlenspro.com 2026-04-03 13:04:38 -05:00
5c61a74e3d Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-04-03 12:49:52 -05:00
ffe6bdd0f4 greenlenspro.com 2026-04-03 12:49:42 -05:00
f391e35221 innungsapp.com 2026-03-08 09:13:24 -05:00
9f88b58f96 innungsapp 2026-03-02 14:38:15 -06:00
9d32e0962e buddelectric.bayarea-cc.com 2026-02-10 09:15:40 -06:00
4d3ca7bb14 switch back 2026-02-03 11:59:17 -06:00
27f929dfbc update for samsung 2026-02-03 11:49:58 -06:00
be0642c389 reduce caching 2026-02-02 17:52:41 -06:00
69af529410 add volume 2026-02-02 13:13:07 -06:00
c4d8e980da host.docker.internal 2026-02-02 13:11:19 -06:00
676e0a91b6 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-02-02 12:18:10 -06:00
d626b19e4a www.bizmatch.net change 2026-02-02 12:17:51 -06:00
8efc6bfcd2 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-28 16:48:21 -06:00
7fcc380b0f english 2026-01-28 16:48:15 -06:00
38e327c847 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-28 16:40:46 -06:00
5e8559ec97 QRCode 2026-01-28 16:40:40 -06:00
2ee5fc8842 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-28 16:06:05 -06:00
9e107cb96c email setup page 2026-01-28 16:05:59 -06:00
0ef8eb0938 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-28 14:59:05 -06:00
05aac691b3 dfgfdg 2026-01-28 14:59:02 -06:00
de57180976 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-28 14:53:29 -06:00
45ae435223 fix 2026-01-28 14:53:22 -06:00
6cd4371829 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-28 14:44:06 -06:00
eec4458604 apple 2026-01-28 14:43:34 -06:00
4e2369f35c Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-28 14:14:30 -06:00
c73b400f52 ruehrgedoens 2026-01-28 14:14:09 -06:00
c6ee22ef12 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 16:47:48 -06:00
4842bd7f03 imap 2026-01-27 16:47:40 -06:00
6f289424e3 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 16:43:30 -06:00
900ae6c257 only pop 2026-01-27 16:43:19 -06:00
8ef9420396 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 16:38:54 -06:00
2dd1dc21b6 fix for wrong closing .. 2026-01-27 16:38:47 -06:00
0794242198 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 16:27:59 -06:00
d926064493 pop 2026-01-27 16:27:54 -06:00
0d6bf386d0 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 16:14:58 -06:00
3ad3ab38c8 <LoginName>{header.X-Anchormailbox}</LoginName> 2026-01-27 16:14:53 -06:00
200567f23c Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 15:48:03 -06:00
ab4958dfa2 dsfdsf 2026-01-27 15:47:42 -06:00
aa75224d03 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 15:20:33 -06:00
4530a2f80e fixed loginname 2026-01-27 14:09:36 -06:00
83ae97e627 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 13:58:05 -06:00
fa2ef2b743 mail.email-srvr.com cert 2026-01-27 13:57:16 -06:00
7f9042e612 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 13:28:32 -06:00
2cfa226361 debug 2026-01-27 13:28:17 -06:00
d37109a696 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 13:18:04 -06:00
4f7dc6f8b4 autodiscover 2026-01-27 13:17:55 -06:00
afdd3d903a Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 13:14:47 -06:00
6c6b4d345f autodiscover 2026-01-27 13:14:39 -06:00
28741de633 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 09:54:31 -06:00
7d1c0b9a6d remove debug 2026-01-27 09:54:29 -06:00
38b425e1d8 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 09:53:58 -06:00
b3d184259e info log 2026-01-27 09:53:54 -06:00
dfddc38c89 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 09:52:08 -06:00
29e35bfad6 email_autodiscover 2026-01-27 09:52:05 -06:00
fe9651409e Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 09:49:29 -06:00
9438eeaa75 container_name: caddy 2026-01-27 09:49:24 -06:00
286619fc62 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 09:48:08 -06:00
969aec9278 import 2026-01-27 09:48:04 -06:00
1bb297f0cf Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 09:43:11 -06:00
5dc09a5651 remove autodiscover 2026-01-27 09:43:07 -06:00
d0af616b8d Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-27 09:38:22 -06:00
7cbbaedd5e autodiscover 2026-01-27 09:38:06 -06:00
9acc06646a Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-25 14:34:56 -06:00
f541ea9248 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-25 14:34:03 -06:00
96fa643095 roundcube 2026-01-25 14:33:44 -06:00
c70f031dff Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-01 09:58:47 -06:00
61820fe772 qrmaster 2026-01-01 09:58:22 -06:00
7e3fac6907 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-01 06:24:03 -06:00
013f1c8994 path to container ... 2026-01-01 06:23:54 -06:00
c24049d3fb Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-01 06:18:58 -06:00
dde671fe3d neues root 2026-01-01 06:18:51 -06:00
f340cc0a43 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2026-01-01 06:11:35 -06:00
aded85eb66 use config-email in prod mode 2026-01-01 06:11:08 -06:00
9dd22589de Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-12-31 11:37:18 -06:00
0ddfa51265 api.email-bayarea.com 2025-12-31 11:36:59 -06:00
4a5222a781 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-12-31 10:56:21 -06:00
490721c8b6 pictures in bizmatch-projects folder 2025-12-31 10:55:53 -06:00
ce6a115684 config.email-bayarea.com 2025-12-31 10:50:35 -06:00
f79dde2d1d Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-11-17 15:52:26 -06:00
08d1c0f265 new certificate generation 2025-11-17 15:37:03 -06:00
3b3d20f89a actual function 2025-10-18 16:45:56 -05:00
286de26c87 actual 2025-10-16 21:34:23 -05:00
1b899985a1 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-13 15:47:09 -05:00
cd731c502b fix 2025-10-13 15:47:06 -05:00
f0096bc27f Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-13 15:22:20 -05:00
ac008aff8e SES - Lambda Invokation added 2025-10-13 15:21:54 -05:00
432259d459 remove emails prefix 2025-10-10 17:42:30 -05:00
b9066a8f59 ac 2025-10-09 17:58:27 -05:00
39d50b7d3b Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-02 18:46:23 -05:00
5533fbff14 fghfgh 2025-10-02 18:46:18 -05:00
824fbbe3eb Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-02 18:41:24 -05:00
08657e7282 xcvxcv 2025-10-02 18:41:19 -05:00
1968faab99 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-02 18:35:00 -05:00
2617f049f3 host.docker.internal 2025-10-02 18:34:54 -05:00
3d961d6536 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-02 18:29:02 -05:00
7f071435c7 reverse_prox 2025-10-02 18:28:41 -05:00
5469a01893 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-01 21:20:53 -05:00
d04bb2f5cb mount 2025-10-01 21:20:49 -05:00
1bf893f683 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-01 17:37:27 -05:00
fecfc59988 fix 2025-10-01 17:37:20 -05:00
a2c4ac8685 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-01 17:26:09 -05:00
29181ce13b new port 2025-10-01 17:25:57 -05:00
08489162bf Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-10-01 17:20:17 -05:00
d5b7986761 annavillesda 2025-10-01 17:19:53 -05:00
92afa46d5d Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-19 17:32:05 -05:00
3da5e3c814 s3mail 2025-09-19 17:31:51 -05:00
22eadee4cd retry mechanism 2025-09-17 17:38:49 -05:00
a709172a99 Fixes 2025-09-17 17:19:34 -05:00
0f29d06653 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-15 16:43:07 -05:00
ce296ecdab iitwelders 2025-09-15 16:39:41 -05:00
a22a30ac3b Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-14 16:38:02 -05:00
798842ba9b email prefix removed 2025-09-14 16:37:55 -05:00
9b490a9233 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 15:24:42 -05:00
5ed6c15ba2 fghfgh 2025-09-11 15:24:40 -05:00
bf5569522a Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 15:19:01 -05:00
b8915cb692 dfgdfg 2025-09-11 15:18:59 -05:00
3c84604de8 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 15:16:06 -05:00
b210e49ad4 dfgdfg 2025-09-11 15:15:59 -05:00
38a1a08c2a Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 15:12:46 -05:00
b8dd30987e sdfsdf 2025-09-11 15:12:43 -05:00
ee26c3dc0a Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 15:10:17 -05:00
bc038b0a70 dfgfdg 2025-09-11 15:10:11 -05:00
6331391f1c Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 15:06:37 -05:00
22af3a5273 sdfsd 2025-09-11 15:06:31 -05:00
ee3e5952ac Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 15:02:56 -05:00
7bd4c73306 fghfgh 2025-09-11 15:02:52 -05:00
541059c0c4 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 14:58:11 -05:00
cd545ee056 sdfsdf 2025-09-11 14:58:04 -05:00
ac68074178 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 14:53:17 -05:00
c62b72dac0 sdfsdf 2025-09-11 14:53:10 -05:00
ce0b44ac9c Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 14:45:50 -05:00
0d2d5d9e38 sdfsdf 2025-09-11 14:45:45 -05:00
3553cdcf59 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 14:43:00 -05:00
06f6ee43cc sdfdsf 2025-09-11 14:42:54 -05:00
834aa48d09 dfgdfg 2025-09-11 14:40:42 -05:00
14f6b30444 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 14:37:19 -05:00
e9a266534a asdsa 2025-09-11 14:37:16 -05:00
96f3ccbc1a Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 14:36:03 -05:00
3b3cb3aec1 sdgfsdf 2025-09-11 14:35:43 -05:00
669ef1b220 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 14:04:28 -05:00
ffee5c0568 replace 2025-09-11 14:04:22 -05:00
ee235d5863 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 14:00:19 -05:00
06e070f9b7 replace_response 2025-09-11 14:00:15 -05:00
dde2134d87 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 13:58:49 -05:00
0f7e8c1dd5 remove 2025-09-11 13:58:34 -05:00
bdcd57aba8 replace-response 2025-09-11 13:58:00 -05:00
57ec03cebe Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 13:35:29 -05:00
7282cbdd59 new name 2025-09-11 13:34:59 -05:00
082c465985 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 13:14:24 -05:00
101a128c9f order 2025-09-11 13:14:20 -05:00
8b5b984d22 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 13:12:03 -05:00
2562fc49b0 korrekturen 2025-09-11 13:10:53 -05:00
212fa09534 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 13:02:54 -05:00
a3eef3055e CADDY_VERSION=v2.9.1 2025-09-11 13:02:50 -05:00
f66016633e Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 12:56:39 -05:00
57fbce27f6 caddy with replace/response 2025-09-11 12:56:27 -05:00
b10f49a283 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 12:24:18 -05:00
47b5b7e8fd iitwelders 2. try 2025-09-11 12:24:15 -05:00
379cc87257 Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 12:18:13 -05:00
d80df95f43 iitwelders as proxy 2025-09-11 12:17:57 -05:00
ceaf82d5da Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-11 11:59:04 -05:00
fb5b0cc48e nqsltd & gregknoppcpa 2025-09-11 11:58:21 -05:00
dfadc74b2d :Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-09-01 13:55:13 -05:00
a3873f8649 bizmatch.net redir to www.bizmatch.net 2025-09-01 13:55:10 -05:00
cbe58d4cb2 ses2dms 2025-08-31 16:11:14 -05:00
7692aef4fc Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-08-28 17:34:11 -05:00
31aea63c61 wewer 2025-08-28 17:34:02 -05:00
973be97c70 deleted 2025-08-28 17:11:07 -05:00
bd38b9a5f2 home dir 2025-08-28 17:07:36 -05:00
09f6bf1a27 dms removed 2025-08-28 17:03:07 -05:00
b72cfdc67e sdfsdf 2025-08-28 16:52:29 -05:00
e96631bafd Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-08-28 16:47:25 -05:00
76b8f17ed3 changes 2025-08-28 16:47:18 -05:00
77ec9800aa Merge branch 'master' of git.bizmatch.net:aknuth/docker 2025-08-28 16:43:02 -05:00
3efc9ab1f1 changed port & 2025-08-28 16:43:00 -05:00
4c34709526 merge 2025-08-27 17:56:00 -05:00
b8e3cb6e1f hash 2025-08-27 17:53:54 -05:00
de7a541857 hamptonbrown 2025-08-27 17:50:16 -05:00
e8327f6824 neuer PW Hash 2025-08-25 18:10:27 -05:00
4ad79c3c29 tls 2025-08-25 17:52:36 -05:00
7e2b2ca310 sqllite 2025-08-22 18:05:34 -05:00
4be8ff61c5 change ports 2025-08-22 17:58:50 -05:00
f51e4ab44b changed formatting 2025-08-22 17:56:56 -05:00
338c630f57 docker-mailserver 2025-08-22 17:56:01 -05:00
08501f863a host.docker.internal instead of localhost 2025-08-22 15:19:54 -05:00
d5aaa64555 different comments signs 2025-08-22 15:05:03 -05:00
215d7a3978 cielectrical changed 2025-08-22 14:56:19 -05:00
217ad84815 localhost 2025-08-22 14:36:34 -05:00
e349f0142f cielectrical 2025-08-22 14:20:16 -05:00
8f1fdbfb96 change hostname 2025-08-18 17:20:20 -05:00
766f0d4a18 fancytextstuff 2025-08-18 17:17:41 -05:00
1369944996 dfgdfg 2025-08-01 15:14:23 -05:00
96950e43b0 dfgdfg 2025-08-01 15:13:12 -05:00
b9cc17b997 sdfsdf 2025-08-01 14:58:19 -05:00
e0db2595a9 dfgdfg 2025-08-01 14:55:08 -05:00
a6db7b130b sdfsdf 2025-08-01 12:43:16 -05:00
8bb05f499f sdfsd 2025-08-01 12:41:35 -05:00
0ce09ef969 updated env variables 2025-08-01 10:55:11 -05:00
e73641f258 sdfsdf 2025-07-31 17:51:52 -05:00
2e3ca446f5 asdas 2025-07-31 17:45:35 -05:00
1d88681f28 sdfdsf 2025-07-31 17:38:05 -05:00
24f6890357 sdfsdf 2025-07-31 17:37:22 -05:00
fc9102c5d3 sdfdsf 2025-07-31 17:28:10 -05:00
210148c305 sdfsdf 2025-07-31 17:20:37 -05:00
b126861406 sdfsdf 2025-07-31 16:53:20 -05:00
ee7b6fd1fb prod preparation 2025-07-30 17:30:39 -05:00
04e9f4ccec asdsad 2025-07-27 17:16:17 -05:00
e7519cc0b5 asdsad 2025-07-27 16:59:13 -05:00
3a585ea604 fsdf 2025-07-27 16:54:47 -05:00
1525cda50f sdfgds 2025-07-27 16:52:30 -05:00
80acd37da7 sdf 2025-07-27 15:59:45 -05:00
4efb69a356 asdfas 2025-07-27 15:59:17 -05:00
53827a8277 fix 2025-07-27 15:55:20 -05:00
55959ce22d test 2025-07-27 15:48:30 -05:00
d550342492 test 2025-07-27 15:20:04 -05:00
ade3a5780f no exception even if recipient list is empty 2025-07-21 15:33:04 -05:00
675c00209c check for valid Domain 2025-07-21 11:51:01 -05:00
5fadef1aac changes 2025-07-21 11:40:40 -05:00
36 changed files with 1101 additions and 1952 deletions

2
.gitignore vendored
View File

@@ -4,3 +4,5 @@ auth
.venv* .venv*
__pycache__ __pycache__
node_modules node_modules
ses-lambda-python/*
!ses-lambda-python/lambda_function.py

View File

@@ -1,6 +0,0 @@
DB_HOST=postgres
DB_PORT=5432
DB_SCHEMA=public
POSTGRES_DB=bizmatch
POSTGRES_USER=bizmatch
POSTGRES_PASSWORD=xieng7Seih

View File

@@ -0,0 +1,44 @@
services:
app:
image: node:22-alpine
working_dir: /app
volumes:
- ~/git/bizmatch-project/bizmatch-server:/app
ports:
- "3000:3000"
environment:
- NODE_ENV=development
- DB_HOST=postgres
- DB_PORT=5432
- DB_NAME=${POSTGRES_DB}
- DB_USER=${POSTGRES_USER}
- DB_PASSWORD=${POSTGRES_PASSWORD}
env_file:
- ~/git/docker/app/.env # Pfad zur .env-Datei
command: sh -c "npm install && npm run build --omit=dev && node dist/src/main.js"
restart: unless-stopped
depends_on:
- postgres
networks:
- bizmatch
postgres:
container_name: bizmatchdb
image: postgres:latest
restart: always
volumes:
- ${PWD}/bizmatchdb-data:/var/lib/postgresql/data
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
env_file:
- ~/git/docker/app/.env # Neu: Separate Env-File für Prod
ports:
- "5432:5432"
networks:
- bizmatch
networks:
bizmatch:
external: true

View File

@@ -0,0 +1,47 @@
services:
app:
image: node:22-alpine
container_name: bizmatch-app-prod # Neu: Unterscheide Namen
working_dir: /app
volumes:
- /home/aknuth/git/bizmatch-project-prod/bizmatch-server:/app # Verwende Prod-Checkout
ports:
- "3001:3000" # Neu: Host-Port 3001, Container-Port bleibt 3000
env_file:
- path: ./env.prod # Neu: Separate Env-File für Prod
required: true
environment:
- NODE_ENV=development # Neu: Production-Modus (für Nest.js-Config)
- DB_HOST=postgres-prod # Neu: Passe an neuen Service-Namen
- DB_PORT=5432
- DB_NAME=${POSTGRES_DB} # Neu: Separate DB-Name aus Env-File
- DB_USER=${POSTGRES_USER}
- DB_PASSWORD=${POSTGRES_PASSWORD}
command: sh -c "npm install && npm run build && node dist/src/main.js" # Entferne --omit=dev für Prod
restart: unless-stopped
depends_on:
- postgres-prod
networks:
- bizmatch-prod # Neu: Separates Network für Isolation
postgres-prod: # Neu: Umbenannt für Unterscheidung
container_name: bizmatchdb-prod
image: postgres:latest
restart: always
volumes:
- ${PWD}/bizmatchdb-data-prod:/var/lib/postgresql/data # Neu: Separates Daten-Volume
env_file:
- path: ./env.prod # Neu: Separate Env-File für Prod
required: true
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
ports:
- "5433:5432" # Neu: Host-Port 5433, Container-Port bleibt 5432
networks:
- bizmatch-prod
networks:
bizmatch-prod:
external: true # Neu: Erstelle es mit `docker network create bizmatch-prod`

View File

@@ -1,20 +0,0 @@
services:
postgres:
container_name: bizmatchdb
image: postgres:latest
restart: always
volumes:
- ${PWD}/bizmatchdb-data:/var/lib/postgresql/data
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
ports:
- "5432:5432"
networks:
- bizmatch
networks:
bizmatch:
external: true

File diff suppressed because it is too large Load Diff

View File

@@ -4,35 +4,59 @@
acme_ca https://acme-v02.api.letsencrypt.org/directory acme_ca https://acme-v02.api.letsencrypt.org/directory
debug debug
} }
bizmatch.net {
} # Prod: Neue Domains
www.bizmatch.net { www.bizmatch.net {
handle /pictures/* {
root * /home/aknuth/git/bizmatch-project/bizmatch-server # Prod-Ordner
file_server
}
# Statische Dateien (CSS, JS, Bilder) lange cachen, da sich der Name bei Änderungen ändert
header /assets/* Cache-Control "public, max-age=31536000, immutable"
header /*.css Cache-Control "public, max-age=31536000, immutable"
header /*.js Cache-Control "public, max-age=31536000, immutable"
# Die index.html und API-Antworten NIEMALS cachen
header /index.html Cache-Control "no-cache, no-store, must-revalidate"
handle {
reverse_proxy host.docker.internal:4200
}
log {
output file /var/log/caddy/access.prod.log # Separate Logs
}
encode gzip zstd
} }
bayarea-cc.com { bizmatch.net {
# TLS-Direktive entfernen, falls Cloudflare die Verbindung terminiert redir https://www.bizmatch.net{uri} permanent
# tls { }
# dns cloudflare {env.CLOUDFLARE_API_TOKEN} www.qrmaster.net {
# }
handle /api {
reverse_proxy host.docker.internal:3001
}
handle { handle {
root * /app reverse_proxy host.docker.internal:3050
try_files {path} /index.html
file_server
} }
log { log {
output stderr output file /var/log/caddy/qrmaster.log
format console format console
} }
encode gzip encode gzip
} }
www.bayarea-cc.com { qrmaster.net {
redir https://bayarea-cc.com{uri} permanent redir https://www.qrmaster.net{uri} permanent
} }
www.innungsapp.com {
handle {
reverse_proxy host.docker.internal:3010
}
log {
output file /var/log/caddy/innungsapp.log
format console
}
encode gzip
}
innungsapp.com {
redir https://www.innungsapp.com{uri} permanent
}
auth.bizmatch.net { auth.bizmatch.net {
reverse_proxy https://bizmatch-net.firebaseapp.com { reverse_proxy https://bizmatch-net.firebaseapp.com {
header_up Host bizmatch-net.firebaseapp.com header_up Host bizmatch-net.firebaseapp.com
@@ -45,54 +69,30 @@ gitea.bizmatch.net {
reverse_proxy gitea:3500 reverse_proxy gitea:3500
} }
dev.bizmatch.net { api.bizmatch.net {
handle /pictures/* { reverse_proxy host.docker.internal:3001 { # Neu: Proxy auf Prod-Port 3001
root * /home/aknuth/git/bizmatch-project/bizmatch-server header_up X-Real-IP {http.request.header.CF-Connecting-IP}
file_server header_up X-Forwarded-For {http.request.header.CF-Connecting-IP}
header_up X-Forwarded-Proto {http.request.header.X-Forwarded-Proto}
header_up CF-IPCountry {http.request.header.CF-IPCountry}
}
}
greenlenspro.com {
encode zstd gzip
@storage path /storage /storage/*
handle @storage {
uri strip_prefix /storage
reverse_proxy minio:9000
}
@api path /api /api/* /auth /auth/* /v1 /v1/* /health /plants /plants/*
handle @api {
reverse_proxy api:3000
} }
handle { handle {
root * /srv reverse_proxy landing:3000
try_files {path} {path}/ /index.html
file_server
}
log {
output file /var/log/caddy/access.log {
roll_size 10MB
roll_keep 5
roll_keep_for 48h
}
}
encode gzip
}
api-dev.bizmatch.net {
reverse_proxy host.docker.internal:3000 {
header_up X-Real-IP {http.request.header.CF-Connecting-IP}
header_up X-Forwarded-For {http.request.header.CF-Connecting-IP}
header_up X-Forwarded-Proto {http.request.header.X-Forwarded-Proto}
header_up CF-IPCountry {http.request.header.CF-IPCountry}
} }
} }
mailsync.bizmatch.net {
reverse_proxy host.docker.internal:5000 {
header_up X-Real-IP {http.request.header.CF-Connecting-IP}
header_up X-Forwarded-For {http.request.header.CF-Connecting-IP}
header_up X-Forwarded-Proto {http.request.header.X-Forwarded-Proto}
header_up CF-IPCountry {http.request.header.CF-IPCountry}
}
}
mail.andreasknuth.de {
reverse_proxy nginx-mailcow:8080
}
web.email-bayarea.com {
reverse_proxy nginx-mailcow:8080
}
mail.email-srvr.com autodiscover.mail.email-srvr.com autoconfig.mail.email-srvr.com {
reverse_proxy nginx-mailcow:8080
}

13
caddy/Dockerfile.caddy Normal file
View File

@@ -0,0 +1,13 @@
# Dockerfile.caddy
ARG CADDY_VERSION=2.9.1
FROM caddy:${CADDY_VERSION}-builder AS builder
# Caddy in exakt dieser Version + Plugins bauen
RUN xcaddy build ${CADDY_VERSION} \
--with github.com/caddy-dns/cloudflare \
--with github.com/caddyserver/replace-response
FROM caddy:${CADDY_VERSION}
COPY --from=builder /usr/bin/caddy /usr/bin/caddy
RUN mkdir -p /var/log/caddy

View File

@@ -1,7 +1,10 @@
services: services:
caddy: caddy:
image: custom-caddy:2.9.1-rr1
container_name: caddy container_name: caddy
image: iarekylew00t/caddy-cloudflare:latest build:
context: .
dockerfile: Dockerfile.caddy
restart: unless-stopped restart: unless-stopped
ports: ports:
- "80:80" - "80:80"
@@ -13,18 +16,25 @@ services:
- keycloak - keycloak
- gitea - gitea
- mail_network - mail_network
- greenlens_net
volumes: volumes:
- $PWD/Caddyfile:/etc/caddy/Caddyfile - $PWD/Caddyfile:/etc/caddy/Caddyfile
- $PWD/email_autodiscover:/etc/caddy/email_autodiscover
- $PWD/email.mobileconfig.tpl:/etc/caddy/email.mobileconfig.tpl
- $PWD/email-setup:/var/www/email-setup
- caddy_data:/data - caddy_data:/data
- caddy_config:/config - caddy_config:/config
#- /home/aknuth/git/bizmatch/dist/bizmatch/browser:/srv - /home/aknuth/git/bizmatch-project/bizmatch/dist/bizmatch/browser:/home/aknuth/git/bizmatch-project/bizmatch/dist/bizmatch/browser
- /home/aknuth/git/bizmatch-project/bizmatch/dist/bizmatch/browser:/srv - /home/aknuth/git/bizmatch-project-prod/bizmatch/dist/bizmatch/browser:/home/aknuth/git/bizmatch-project-prod/bizmatch/dist/bizmatch/browser
- /home/aknuth/git/bizmatch-project/bizmatch-server/pictures:/home/aknuth/git/bizmatch-project/bizmatch-server/pictures - /home/aknuth/git/bizmatch-project/bizmatch-server/pictures:/home/aknuth/git/bizmatch-project/bizmatch-server/pictures
- /home/aknuth/git/bizmatch-project-prod/bizmatch-server/pictures:/home/aknuth/git/bizmatch-project-prod/bizmatch-server/pictures
- /home/aknuth/git/annaville-sda-site/dist:/home/aknuth/git/annaville-sda-site/dist:ro # ← DAS FEHLT!
- /home/aknuth/git/bay-area-affiliates/dist/bay-area-affiliates/browser:/app - /home/aknuth/git/bay-area-affiliates/dist/bay-area-affiliates/browser:/app
- /home/aknuth/log/caddy:/var/log/caddy - /home/aknuth/log/caddy:/var/log/caddy
- /home/aknuth/git/config-email/frontend/dist:/home/aknuth/git/config-email/frontend/dist:ro
environment: environment:
- CLOUDFLARE_API_TOKEN=q1P7J3uqS96FGj_iiX2mI8y1ulTaIFrTp8tyTXhG - CLOUDFLARE_API_TOKEN=${CLOUDFLARE_API_TOKEN}
- CLOUDFLARE_EMAIL=andreas.knuth@gmail.com - CLOUDFLARE_EMAIL=${CLOUDFLARE_EMAIL}
networks: networks:
bizmatch: bizmatch:
@@ -35,6 +45,8 @@ networks:
external: true external: true
mail_network: mail_network:
external: true external: true
greenlens_net:
external: true
volumes: volumes:
caddy_data: caddy_data:

View File

@@ -0,0 +1,29 @@
<?xml version="1.0" encoding="utf-8" ?>
<Autodiscover xmlns="http://schemas.microsoft.com/exchange/autodiscover/responseschema/2006">
<Response xmlns="http://schemas.microsoft.com/exchange/autodiscover/outlook/responseschema/2006a">
<Account>
<AccountType>email</AccountType>
<Action>settings</Action>
<Protocol>
<Type>IMAP</Type>
<Server>mail.email-srvr.com</Server>
<Port>993</Port>
<DomainRequired>off</DomainRequired>
<LoginName></LoginName>
<SPA>off</SPA>
<SSL>on</SSL>
<AuthRequired>on</AuthRequired>
</Protocol>
<Protocol>
<Type>SMTP</Type>
<Server>mail.email-srvr.com</Server>
<Port>465</Port>
<DomainRequired>off</DomainRequired>
<LoginName></LoginName>
<SPA>off</SPA>
<SSL>on</SSL>
<AuthRequired>on</AuthRequired>
</Protocol>
</Account>
</Response>
</Autodiscover>

BIN
caddy/email-setup/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

View File

@@ -0,0 +1,122 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Email Setup</title>
<script src="https://cdnjs.cloudflare.com/ajax/libs/qrcodejs/1.0.0/qrcode.min.js"></script>
<style>
body { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif; background: #f2f2f7; display: flex; justify-content: center; align-items: center; min-height: 100vh; margin: 0; padding: 20px; box-sizing: border-box; }
.card { background: white; padding: 2.5rem; border-radius: 24px; box-shadow: 0 12px 30px rgba(0,0,0,0.1); width: 100%; max-width: 420px; text-align: center; transition: all 0.3s ease; }
.logo { width: 80px; height: 80px; margin-bottom: 1.5rem; }
h1 { margin: 0 0 1rem 0; color: #1a1a1a; font-size: 1.8rem; }
p { color: #666; line-height: 1.5; margin-bottom: 2rem; }
/* Input Section */
#input-section { transition: opacity 0.3s ease; }
input { width: 100%; padding: 16px; margin-bottom: 16px; border: 2px solid #eee; border-radius: 14px; font-size: 16px; box-sizing: border-box; transition: border-color 0.2s; outline: none; }
input:focus { border-color: #007AFF; }
button { width: 100%; padding: 16px; background: #007AFF; color: white; border: none; border-radius: 14px; font-size: 18px; font-weight: 600; cursor: pointer; transition: background 0.2s, transform 0.1s; }
button:hover { background: #0062cc; }
button:active { transform: scale(0.98); }
/* QR Section (initially hidden) */
#qr-section { display: none; opacity: 0; transition: opacity 0.5s ease; }
#qrcode { margin: 2rem auto; padding: 15px; background: white; border-radius: 16px; box-shadow: 0 4px 12px rgba(0,0,0,0.08); display: inline-block; }
#qrcode img { margin: auto; } /* Centers the generated QR code */
.hint { font-size: 0.9rem; color: #888; margin-top: 1.5rem; }
.hint strong { color: #333; }
.error { color: #d32f2f; background: #fde8e8; padding: 10px; border-radius: 8px; font-size: 0.9rem; display: none; margin-bottom: 16px; }
.back-btn { background: transparent; color: #007AFF; margin-top: 1rem; font-size: 16px; }
.back-btn:hover { background: #f0f8ff; }
</style>
</head>
<body>
<div class="card">
<img src="/logo.png" alt="Logo" class="logo">
<div id="input-section">
<h1>Email Setup</h1>
<p>Enter your email address to automatically configure your iPhone or iPad.</p>
<div id="error-msg" class="error">Please enter a valid email address.</div>
<input type="email" id="email" placeholder="name@company.com" required autocomplete="email">
<button onclick="generateQR()">Generate QR Code</button>
</div>
<div id="qr-section">
<h1>Scan me!</h1>
<p>Open the <strong>Camera app</strong> on your iPhone and point it at this code.</p>
<div id="qrcode"></div>
<p class="hint">
Tap the banner that appears at the top.<br>
Click <strong>"Allow"</strong> and then go to <strong>Settings</strong> to install the profile.
</p>
<button class="back-btn" onclick="resetForm()">Back</button>
</div>
</div>
<script>
const inputSection = document.getElementById('input-section');
const qrSection = document.getElementById('qr-section');
const emailInput = document.getElementById('email');
const errorMsg = document.getElementById('error-msg');
let qrcode = null;
function generateQR() {
const email = emailInput.value.trim();
if (!email || !email.includes('@') || email.split('@')[1].length < 3) {
errorMsg.style.display = 'block';
emailInput.focus();
return;
}
errorMsg.style.display = 'none';
const domain = email.split('@')[1];
// The magic link
const targetUrl = `https://autodiscover.${domain}/apple?email=${email}`;
// Hide input, show QR
inputSection.style.display = 'none';
qrSection.style.display = 'block';
setTimeout(() => qrSection.style.opacity = '1', 50);
// Generate (or update) QR Code
if (qrcode === null) {
qrcode = new QRCode(document.getElementById("qrcode"), {
text: targetUrl,
width: 200,
height: 200,
colorDark : "#000000",
colorLight : "#ffffff",
correctLevel : QRCode.CorrectLevel.H
});
} else {
qrcode.clear();
qrcode.makeCode(targetUrl);
}
}
function resetForm() {
qrSection.style.opacity = '0';
setTimeout(() => {
qrSection.style.display = 'none';
inputSection.style.display = 'block';
emailInput.value = '';
emailInput.focus();
}, 300);
}
emailInput.addEventListener("keypress", function(event) {
if (event.key === "Enter") generateQR();
});
</script>
</body>
</html>

View File

@@ -0,0 +1,67 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>PayloadContent</key>
<array>
<dict>
<key>EmailAccountDescription</key>
<string>{{.Req.URL.Query.Get "email"}}</string>
<key>EmailAccountName</key>
<string>{{.Req.URL.Query.Get "email"}}</string>
<key>EmailAccountType</key>
<string>EmailTypeIMAP</string>
<key>EmailAddress</key>
<string>{{.Req.URL.Query.Get "email"}}</string>
<key>IncomingMailServerAuthentication</key>
<string>EmailAuthPassword</string>
<key>IncomingMailServerHostName</key>
<string>mail.email-srvr.com</string>
<key>IncomingMailServerPortNumber</key>
<integer>993</integer>
<key>IncomingMailServerUseSSL</key>
<true/>
<key>IncomingMailServerUsername</key>
<string>{{.Req.URL.Query.Get "email"}}</string>
<key>OutgoingMailServerAuthentication</key>
<string>EmailAuthPassword</string>
<key>OutgoingMailServerHostName</key>
<string>mail.email-srvr.com</string>
<key>OutgoingMailServerPortNumber</key>
<integer>465</integer>
<key>OutgoingMailServerUseSSL</key>
<true/>
<key>OutgoingMailServerUsername</key>
<string>{{.Req.URL.Query.Get "email"}}</string>
<key>PayloadDescription</key>
<string>E-Mail Konfiguration für {{.Req.URL.Query.Get "email"}}</string>
<key>PayloadDisplayName</key>
<string>{{.Req.URL.Query.Get "email"}}</string>
<key>PayloadIdentifier</key>
<string>com.email-srvr.profile.{{.Req.URL.Query.Get "email"}}</string>
<key>PayloadType</key>
<string>com.apple.mail.managed</string>
<key>PayloadUUID</key>
<string>{{uuidv4}}</string>
<key>PayloadVersion</key>
<integer>1</integer>
</dict>
</array>
<key>PayloadDescription</key>
<string>Automatische E-Mail Einrichtung für {{.Req.URL.Query.Get "email"}}</string>
<key>PayloadDisplayName</key>
<string>E-Mail Einstellungen</string>
<key>PayloadIdentifier</key>
<string>com.email-srvr.profile.root</string>
<key>PayloadOrganization</key>
<string>IT Support</string>
<key>PayloadRemovalDisallowed</key>
<false/>
<key>PayloadType</key>
<string>Configuration</string>
<key>PayloadUUID</key>
<string>{{uuidv4}}</string>
<key>PayloadVersion</key>
<integer>1</integer>
</dict>
</plist>

97
caddy/email_autodiscover Normal file
View File

@@ -0,0 +1,97 @@
(email_settings) {
# 1. Autodiscover für Outlook
route /autodiscover/autodiscover.xml {
header Content-Type "application/xml"
# Wir nutzen {header.X-Anchormailbox} um die Email dynamisch einzufügen
respond `<?xml version="1.0" encoding="utf-8"?>
<Autodiscover xmlns="http://schemas.microsoft.com/exchange/autodiscover/responseschema/2006">
<Response xmlns="http://schemas.microsoft.com/exchange/autodiscover/outlook/responseschema/2006a">
<Account>
<AccountType>email</AccountType>
<Action>settings</Action>
<Protocol>
<Type>IMAP</Type>
<Server>mail.email-srvr.com</Server>
<Port>993</Port>
<DomainRequired>on</DomainRequired>
<LoginName>{header.X-Anchormailbox}</LoginName>
<SPA>off</SPA>
<SSL>on</SSL>
<AuthRequired>on</AuthRequired>
</Protocol>
<Protocol>
<Type>POP3</Type>
<Server>mail.email-srvr.com</Server>
<Port>995</Port>
<DomainRequired>on</DomainRequired>
<LoginName>{header.X-Anchormailbox}</LoginName>
<SPA>off</SPA>
<SSL>on</SSL>
<AuthRequired>on</AuthRequired>
</Protocol>
<Protocol>
<Type>SMTP</Type>
<Server>mail.email-srvr.com</Server>
<Port>465</Port>
<DomainRequired>on</DomainRequired>
<LoginName>{header.X-Anchormailbox}</LoginName>
<SPA>off</SPA>
<SSL>on</SSL>
<AuthRequired>on</AuthRequired>
</Protocol>
</Account>
</Response>
</Autodiscover>` 200
}
# 2. JSON Autodiscover (Modern Outlook) - bleibt gleich
route /autodiscover/autodiscover.json {
header Content-Type "application/json"
respond `{
"Protocol": "AutodiscoverV1",
"Url": "https://autodiscover.bayarea-cc.com/autodiscover/autodiscover.xml"
}` 200
}
# 3. Thunderbird Autoconfig - bleibt gleich (dort funktioniert %EMAILADDRESS% ja nativ)
route /mail/config-v1.1.xml {
header Content-Type "application/xml"
respond `<?xml version="1.0"?>
<clientConfig version="1.1">
<emailProvider id="email-srvr.com">
<displayName>Rackspace Email</displayName>
<incomingServer type="imap">
<hostname>mail.email-srvr.com</hostname>
<port>993</port>
<socketType>SSL</socketType>
<authentication>password-cleartext</authentication>
<username>%EMAILADDRESS%</username>
</incomingServer>
<outgoingServer type="smtp">
<hostname>mail.email-srvr.com</hostname>
<port>465</port>
<socketType>SSL</socketType>
<authentication>password-cleartext</authentication>
<username>%EMAILADDRESS%</username>
</outgoingServer>
</emailProvider>
</clientConfig>` 200
}
# NEU: Apple MobileConfig Route
# Aufrufbar über: /apple?email=kunde@domain.de
route /apple {
# KORREKTUR: Wir müssen Caddy sagen, dass er diesen MIME-Type bearbeiten soll!
templates {
mime "application/x-apple-aspen-config"
}
# Den richtigen MIME-Type setzen
header Content-Type "application/x-apple-aspen-config; charset=utf-8"
# Pfad zur Datei im Container
root * /etc/caddy
rewrite * /email.mobileconfig.tpl
file_server
}
}

View File

@@ -1,5 +1,5 @@
#!/bin/bash #!/bin/bash
# awsdomain.sh - Konfiguriert Cloudflare mit den Amazon SES Angaben
if [ -z "$DOMAIN_NAME" ]; then if [ -z "$DOMAIN_NAME" ]; then
echo "Fehler: DOMAIN_NAME ist nicht gesetzt." echo "Fehler: DOMAIN_NAME ist nicht gesetzt."
echo "Bitte setzen Sie die Variable mit: export DOMAIN_NAME='IhreDomain.de'" echo "Bitte setzen Sie die Variable mit: export DOMAIN_NAME='IhreDomain.de'"

View File

@@ -18,7 +18,7 @@ fi
# Konfiguration # Konfiguration
AWS_REGION=${AWS_REGION:-"us-east-2"} AWS_REGION=${AWS_REGION:-"us-east-2"}
EMAIL_PREFIX=${EMAIL_PREFIX:-"emails/"} EMAIL_PREFIX=${EMAIL_PREFIX:-""}
RULE_NAME="store-$(echo "$DOMAIN_NAME" | tr '.' '-')-to-s3" RULE_NAME="store-$(echo "$DOMAIN_NAME" | tr '.' '-')-to-s3"
echo "=== SES Konfiguration für $DOMAIN_NAME ===" echo "=== SES Konfiguration für $DOMAIN_NAME ==="
@@ -89,6 +89,62 @@ else
echo "Rule Set 'bizmatch-ruleset' ist bereits aktiv." echo "Rule Set 'bizmatch-ruleset' ist bereits aktiv."
fi fi
# ------------------------
# Lambda-Funktion mit SES verknüpfen
# ------------------------
echo "Verknüpfe Lambda-Funktion 'ses-to-sqs' mit SES..."
# Lambda ARN ermitteln
LAMBDA_ARN=$(aws lambda get-function \
--function-name ses-to-sqs \
--region ${AWS_REGION} \
--query 'Configuration.FunctionArn' \
--output text)
if [ -z "$LAMBDA_ARN" ]; then
echo "FEHLER: Lambda-Funktion 'ses-to-sqs' nicht gefunden!"
echo "Bitte zuerst Lambda-Funktion deployen."
exit 1
fi
echo "Lambda ARN: $LAMBDA_ARN"
# SES Permission für Lambda hinzufügen (falls noch nicht vorhanden)
echo "Füge SES-Berechtigung zur Lambda-Funktion hinzu..."
aws lambda add-permission \
--function-name ses-to-sqs \
--statement-id "AllowSESInvoke-${DOMAIN_NAME//./}" \
--action "lambda:InvokeFunction" \
--principal ses.amazonaws.com \
--source-account $(aws sts get-caller-identity --query Account --output text) \
--region ${AWS_REGION} 2>/dev/null || echo "Permission bereits vorhanden"
# Receipt Rule UPDATE: Lambda Action hinzufügen
echo "Aktualisiere Receipt Rule mit Lambda Action..."
aws ses update-receipt-rule --rule-set-name "bizmatch-ruleset" --rule '{
"Name": "'"${RULE_NAME}"'",
"Enabled": true,
"ScanEnabled": true,
"Actions": [
{
"S3Action": {
"BucketName": "'"${S3_BUCKET_NAME}"'",
"ObjectKeyPrefix": "'"${EMAIL_PREFIX}"'"
}
},
{
"LambdaAction": {
"FunctionArn": "'"${LAMBDA_ARN}"'",
"InvocationType": "Event"
}
}
],
"TlsPolicy": "Require",
"Recipients": ["'"${DOMAIN_NAME}"'"]
}' --region ${AWS_REGION}
echo "✅ Lambda-Funktion erfolgreich mit SES verknüpft!"
echo "SES-Konfiguration für $DOMAIN_NAME abgeschlossen." echo "SES-Konfiguration für $DOMAIN_NAME abgeschlossen."
echo echo
echo "WICHTIG: Überprüfen Sie die Ausgabe oben für DNS-Einträge, die Sie bei Ihrem DNS-Provider setzen müssen:" echo "WICHTIG: Überprüfen Sie die Ausgabe oben für DNS-Einträge, die Sie bei Ihrem DNS-Provider setzen müssen:"

View File

@@ -4,7 +4,6 @@
# Setze deine API-Schlüssel und Zone-ID als Umgebungsvariablen oder ersetze sie direkt # Setze deine API-Schlüssel und Zone-ID als Umgebungsvariablen oder ersetze sie direkt
# CF_ZONE_ID="1b7756cee93ed8ba8c05bdc3cb0a5da8" # Die Zone-ID deiner Domain bei Cloudflare # CF_ZONE_ID="1b7756cee93ed8ba8c05bdc3cb0a5da8" # Die Zone-ID deiner Domain bei Cloudflare
# DOMAIN_NAME="andreasknuth.de" # Deine Domain
AWS_REGION="us-east-2" # AWS-Region AWS_REGION="us-east-2" # AWS-Region
if [ -z "$DOMAIN_NAME" ]; then if [ -z "$DOMAIN_NAME" ]; then
echo "Fehler: DOMAIN_NAME ist nicht gesetzt." echo "Fehler: DOMAIN_NAME ist nicht gesetzt."
@@ -147,7 +146,7 @@ create_dns_record "MX" "mail.${DOMAIN_NAME}" "feedback-smtp.${AWS_REGION}.amazon
# CNAME für mail.{Domain} anlegen # CNAME für mail.{Domain} anlegen
echo "CNAME für mail.${DOMAIN_NAME} anlegen bei Cloudflare..." echo "CNAME für mail.${DOMAIN_NAME} anlegen bei Cloudflare..."
create_dns_record "CNAME" "imap.${DOMAIN_NAME}" "${DOMAIN_NAME}" "false" 1 create_dns_record "CNAME" "imap.${DOMAIN_NAME}" "${DOMAIN_NAME}" "false" 3600
# SPF-Eintrag anlegen # SPF-Eintrag anlegen
echo "SPF-Eintrag anlegen bei Cloudflare..." echo "SPF-Eintrag anlegen bei Cloudflare..."

View File

@@ -20,7 +20,7 @@ services:
volumes: volumes:
- gitea-data:/data - gitea-data:/data
#- ./gitea/gitea-ssh:/data/git/.ssh #- ./gitea/gitea-ssh:/data/git/.ssh
- /home/git/.ssh/:/data/git/.ssh #- /home/git/.ssh/:/data/git/.ssh
ports: ports:
- "3500:3500" - "3500:3500"
- "2222:22" - "2222:22"

View File

@@ -0,0 +1,387 @@
import os
import boto3
import json
import time
from email.parser import BytesParser
from email.policy import SMTP as SMTPPolicy
s3 = boto3.client('s3')
sqs = boto3.client('sqs', region_name='us-east-2')
# AWS Region
AWS_REGION = 'us-east-2'
# Metadata Keys
PROCESSED_KEY = 'processed'
PROCESSED_VALUE = 'true'
def domain_to_bucket(domain: str) -> str:
"""Konvertiert Domain zu S3 Bucket Namen"""
domain = domain.lower()
return domain.replace('.', '-') + '-emails'
def domain_to_queue_name(domain: str) -> str:
"""Konvertiert Domain zu SQS Queue Namen"""
domain = domain.lower()
return domain.replace('.', '-') + '-queue'
def get_queue_url_for_domain(domain: str) -> str:
"""Ermittelt SQS Queue URL für Domain"""
queue_name = domain_to_queue_name(domain)
try:
response = sqs.get_queue_url(QueueName=queue_name)
queue_url = response['QueueUrl']
print(f"✓ Found queue: {queue_name}")
return queue_url
except sqs.exceptions.QueueDoesNotExist:
raise Exception(
f"Queue does not exist: {queue_name} "
f"(for domain: {domain.lower()})"
)
except Exception as e:
raise Exception(f"Error getting queue URL for {domain.lower()}: {e}")
def is_already_processed(bucket: str, key: str) -> bool:
"""Prüft ob E-Mail bereits verarbeitet wurde"""
try:
head = s3.head_object(Bucket=bucket, Key=key)
metadata = head.get('Metadata', {}) or {}
if metadata.get(PROCESSED_KEY) == PROCESSED_VALUE:
processed_at = metadata.get('processed_at', 'unknown')
print(f"✓ Already processed at {processed_at}")
return True
return False
except s3.exceptions.NoSuchKey:
print(f"⚠ Object {key} not found in {bucket}")
return True
except Exception as e:
print(f"⚠ Error checking processed status: {e}")
return False
def set_processing_lock(bucket: str, key: str) -> bool:
"""
Setzt Processing Lock um Duplicate Processing zu verhindern
Returns: True wenn Lock erfolgreich gesetzt, False wenn bereits locked
"""
try:
head = s3.head_object(Bucket=bucket, Key=key)
metadata = head.get('Metadata', {}) or {}
# Prüfe auf existierenden Lock
processing_started = metadata.get('processing_started')
if processing_started:
lock_age = time.time() - float(processing_started)
if lock_age < 300: # 5 Minuten Lock
print(f"⚠ Processing lock active (age: {lock_age:.0f}s)")
return False
else:
print(f"⚠ Stale lock detected ({lock_age:.0f}s old), overriding")
# Setze neuen Lock
new_meta = metadata.copy()
new_meta['processing_started'] = str(int(time.time()))
s3.copy_object(
Bucket=bucket,
Key=key,
CopySource={'Bucket': bucket, 'Key': key},
Metadata=new_meta,
MetadataDirective='REPLACE'
)
print(f"✓ Processing lock set")
return True
except Exception as e:
print(f"⚠ Error setting processing lock: {e}")
return True
def mark_as_queued(bucket: str, key: str, queue_name: str):
"""Markiert E-Mail als in Queue eingereiht"""
try:
head = s3.head_object(Bucket=bucket, Key=key)
metadata = head.get('Metadata', {}) or {}
metadata['queued_at'] = str(int(time.time()))
metadata['queued_to'] = queue_name
metadata['status'] = 'queued'
metadata.pop('processing_started', None)
s3.copy_object(
Bucket=bucket,
Key=key,
CopySource={'Bucket': bucket, 'Key': key},
Metadata=metadata,
MetadataDirective='REPLACE'
)
print(f"✓ Marked as queued to {queue_name}")
except Exception as e:
print(f"⚠ Failed to mark as queued: {e}")
def send_to_queue(queue_url: str, bucket: str, key: str,
from_addr: str, recipients: list, domain: str,
subject: str, message_id: str):
"""
Sendet E-Mail-Job in domain-spezifische SQS Queue
EINE Message mit ALLEN Recipients für diese Domain
"""
queue_name = queue_url.split('/')[-1]
message = {
'bucket': bucket,
'key': key,
'from': from_addr,
'recipients': recipients, # Liste aller Empfänger
'domain': domain,
'subject': subject,
'message_id': message_id,
'timestamp': int(time.time())
}
try:
response = sqs.send_message(
QueueUrl=queue_url,
MessageBody=json.dumps(message, ensure_ascii=False),
MessageAttributes={
'domain': {
'StringValue': domain,
'DataType': 'String'
},
'bucket': {
'StringValue': bucket,
'DataType': 'String'
},
'recipient_count': {
'StringValue': str(len(recipients)),
'DataType': 'Number'
},
'message_id': {
'StringValue': message_id,
'DataType': 'String'
}
}
)
sqs_message_id = response['MessageId']
print(f"✓ Queued to {queue_name}: SQS MessageId={sqs_message_id}")
print(f" Recipients: {len(recipients)} - {', '.join(recipients)}")
# Als queued markieren
mark_as_queued(bucket, key, queue_name)
return sqs_message_id
except Exception as e:
print(f"✗ Failed to queue message: {e}")
raise
def lambda_handler(event, context):
"""
Lambda Handler für SES Events
Eine Domain pro Event = eine Queue Message mit allen Recipients
"""
print(f"{'='*70}")
print(f"Lambda invoked: {context.aws_request_id}")
print(f"Region: {AWS_REGION}")
print(f"{'='*70}")
# SES Event parsen
try:
record = event['Records'][0]
ses = record['ses']
except (KeyError, IndexError) as e:
print(f"✗ Invalid event structure: {e}")
return {
'statusCode': 400,
'body': json.dumps({'error': 'Invalid SES event'})
}
mail = ses['mail']
receipt = ses['receipt']
message_id = mail['messageId']
source = mail['source']
timestamp = mail.get('timestamp', '')
recipients = receipt.get('recipients', [])
# FRÜHES LOGGING: S3 Key und Recipients
print(f"\n🔑 S3 Key: {message_id}")
print(f"👥 Recipients ({len(recipients)}): {', '.join(recipients)}")
if not recipients:
print(f"✗ No recipients found in event")
return {
'statusCode': 400,
'body': json.dumps({
'error': 'No recipients in event',
'message_id': message_id
})
}
# Domain extrahieren (alle Recipients haben gleiche Domain!)
domain = recipients[0].split('@')[1].lower()
bucket = domain_to_bucket(domain)
print(f"\n📧 Email Event:")
print(f" MessageId: {message_id}")
print(f" From: {source}")
print(f" Domain: {domain}")
print(f" Bucket: {bucket}")
print(f" Timestamp: {timestamp}")
print(f" Recipients: {len(recipients)}")
# Queue für Domain ermitteln
try:
queue_url = get_queue_url_for_domain(domain)
queue_name = queue_url.split('/')[-1]
print(f" Queue: {queue_name}")
except Exception as e:
print(f"\n✗ ERROR: {e}")
return {
'statusCode': 500,
'body': json.dumps({
'error': 'queue_not_configured',
'domain': domain,
'recipients': recipients,
'message': str(e)
})
}
# S3 Object finden
try:
print(f"\n📦 Searching S3...")
response = s3.list_objects_v2(
Bucket=bucket,
Prefix=message_id,
MaxKeys=1
)
if 'Contents' not in response or not response['Contents']:
raise Exception(f"No S3 object found for message {message_id}")
key = response['Contents'][0]['Key']
size = response['Contents'][0]['Size']
print(f" Found: s3://{bucket}/{key}")
print(f" Size: {size:,} bytes ({size/1024:.1f} KB)")
except Exception as e:
print(f"\n✗ S3 ERROR: {e}")
return {
'statusCode': 404,
'body': json.dumps({
'error': 's3_object_not_found',
'message_id': message_id,
'bucket': bucket,
'details': str(e)
})
}
# Duplicate Check
print(f"\n🔍 Checking for duplicates...")
if is_already_processed(bucket, key):
print(f" Already processed, skipping")
return {
'statusCode': 200,
'body': json.dumps({
'status': 'already_processed',
'message_id': message_id,
'recipients': recipients
})
}
# Processing Lock setzen
print(f"\n🔒 Setting processing lock...")
if not set_processing_lock(bucket, key):
print(f" Already being processed by another instance")
return {
'statusCode': 200,
'body': json.dumps({
'status': 'already_processing',
'message_id': message_id,
'recipients': recipients
})
}
# E-Mail laden um Subject zu extrahieren
subject = '(unknown)'
try:
print(f"\n📖 Reading email for metadata...")
obj = s3.get_object(Bucket=bucket, Key=key)
raw_bytes = obj['Body'].read()
# Nur Headers parsen (schneller)
parsed = BytesParser(policy=SMTPPolicy).parsebytes(raw_bytes)
subject = parsed.get('subject', '(no subject)')
print(f" Subject: {subject}")
except Exception as e:
print(f" ⚠ Could not parse email (continuing): {e}")
# In Queue einreihen (EINE Message mit ALLEN Recipients)
try:
print(f"\n📤 Queuing to {queue_name}...")
sqs_message_id = send_to_queue(
queue_url=queue_url,
bucket=bucket,
key=key,
from_addr=source,
recipients=recipients, # ALLE Recipients
domain=domain,
subject=subject,
message_id=message_id
)
print(f"\n{'='*70}")
print(f"✅ SUCCESS - Email queued for delivery")
print(f"{'='*70}\n")
return {
'statusCode': 200,
'body': json.dumps({
'status': 'queued',
'message_id': message_id,
'sqs_message_id': sqs_message_id,
'queue': queue_name,
'domain': domain,
'recipients': recipients,
'recipient_count': len(recipients),
'subject': subject
})
}
except Exception as e:
print(f"\n{'='*70}")
print(f"✗ FAILED TO QUEUE")
print(f"{'='*70}")
print(f"Error: {e}")
return {
'statusCode': 500,
'body': json.dumps({
'error': 'failed_to_queue',
'message': str(e),
'message_id': message_id,
'recipients': recipients
})
}

View File

@@ -0,0 +1,152 @@
import os
import boto3
import smtplib
import time
import requests
from email.parser import BytesParser
from email.policy import default
from email.utils import getaddresses
s3 = boto3.client('s3')
MAILCOW_HOST = os.environ['MAILCOW_SMTP_HOST']
MAILCOW_PORT = int(os.environ.get('MAILCOW_SMTP_PORT', 587))
SMTP_USER = os.environ.get('MAILCOW_SMTP_USER')
SMTP_PASS = os.environ.get('MAILCOW_SMTP_PASS')
MAILCOW_API_KEY = os.environ.get('MAILCOW_API_KEY')
def domain_to_bucket(domain):
return domain.replace('.', '-') + '-emails'
def bucket_to_domain(bucket):
return bucket.replace('-emails', '').replace('-', '.')
def get_valid_inboxes():
url = 'https://mail.email-srvr.com/api/v1/get/mailbox/all'
headers = {'X-API-Key': MAILCOW_API_KEY}
try:
response = requests.get(url, headers=headers, timeout=10)
response.raise_for_status()
mailboxes = response.json()
return {mb['username'].lower() for mb in mailboxes if mb['active_int'] == 1}
except requests.RequestException as e:
print(f"Fehler beim Abrufen der Postfächer: {e}")
raise Exception("Konnte gültige Postfächer nicht abrufen")
def lambda_handler(event, context):
rec = event['Records'][0]
if 'ses' in rec:
ses = rec['ses']
msg_id = ses['mail']['messageId']
recipients = ses['receipt']['recipients']
first_recipient = recipients[0]
domain = first_recipient.split('@')[1]
bucket = domain_to_bucket(domain)
prefix = f"emails/{msg_id}"
print(f"SES-Receipt erkannt, domain={domain}, bucket={bucket}, prefix={prefix}")
resp_list = s3.list_objects_v2(Bucket=bucket, Prefix=prefix)
if 'Contents' not in resp_list or not resp_list['Contents']:
raise Exception(f"Kein Objekt unter Prefix {prefix} in Bucket {bucket} gefunden")
key = resp_list['Contents'][0]['Key']
elif 's3' in rec:
s3info = rec['s3']
bucket = s3info['bucket']['name']
key = s3info['object']['key']
print("S3-Put erkannt, bucket =", bucket, "key =", key)
recipients = []
else:
raise Exception("Unbekannter Event-Typ")
# Prüfen, ob das Objekt bereits verarbeitet wurde
try:
resp = s3.head_object(Bucket=bucket, Key=key)
if resp.get('Metadata', {}).get('processed') == 'true':
print(f"Objekt {key} bereits verarbeitet (processed=true), überspringe Verarbeitung")
return {
'statusCode': 200,
'body': f"Objekt {key} bereits verarbeitet, keine erneute Weiterleitung"
}
except Exception as e:
print(f"Fehler beim Prüfen der Metadaten: {e}")
# Raw-Mail aus S3 holen
resp = s3.get_object(Bucket=bucket, Key=key)
raw_bytes = resp['Body'].read()
print(f"E-Mail geladen: {len(raw_bytes)} Bytes")
# Parsen für Logging
parsed = BytesParser(policy=default).parsebytes(raw_bytes)
subj = parsed.get('subject', '(kein Subject)')
frm_addr = getaddresses(parsed.get_all('from', []))[0][1]
print(f"Parsed: From={frm_addr} Subject={subj}")
# Empfänger aus Headern ziehen, falls nicht aus SES
if not recipients:
to_addrs = [addr for _name, addr in getaddresses(parsed.get_all('to', []))]
cc_addrs = [addr for _name, addr in getaddresses(parsed.get_all('cc', []))]
bcc_addrs = [addr for _name, addr in getaddresses(parsed.get_all('bcc', []))]
recipients = to_addrs + cc_addrs + bcc_addrs
print("Empfänger aus Headern:", recipients)
# Im S3-Flow nur Empfänger mit passender Domain behalten
expected_domain = bucket_to_domain(bucket)
recipients = [rcpt for rcpt in recipients if rcpt.lower().split('@')[1] == expected_domain]
print(f"Empfänger nach Domain-Filter ({expected_domain}): {recipients}")
if not recipients:
print("Keine Empfänger gefunden, setze Metadatum und überspringe SMTP")
else:
# Gültige Postfächer abrufen und Empfänger filtern
valid_inboxes = get_valid_inboxes()
valid_recipients = [rcpt for rcpt in recipients if rcpt.lower() in valid_inboxes]
print(f"Gültige Empfänger: {valid_recipients}")
if valid_recipients:
# SMTP-Verbindung und Envelope
start = time.time()
print("=== SMTP: Verbinde zu", MAILCOW_HOST, "Port", MAILCOW_PORT)
with smtplib.SMTP(MAILCOW_HOST, MAILCOW_PORT, timeout=30) as smtp:
smtp.ehlo()
smtp.starttls()
smtp.ehlo()
if SMTP_USER and SMTP_PASS:
smtp.login(SMTP_USER, SMTP_PASS)
print("=== SMTP: MAIL FROM", frm_addr)
smtp.mail(frm_addr)
for rcpt in valid_recipients:
print("=== SMTP: RCPT TO", rcpt)
smtp.rcpt(rcpt)
smtp.data(raw_bytes)
print(f"SMTP-Transfer in {time.time()-start:.2f}s abgeschlossen ...")
else:
print("Keine gültigen Postfächer für die Empfänger gefunden, setze Metadatum und überspringe SMTP")
# Metadatum "processed": "true" hinzufügen
try:
resp = s3.head_object(Bucket=bucket, Key=key)
current_metadata = resp.get('Metadata', {})
new_metadata = current_metadata.copy()
new_metadata['processed'] = 'true'
s3.copy_object(
Bucket=bucket,
Key=key,
CopySource={'Bucket': bucket, 'Key': key},
Metadata=new_metadata,
MetadataDirective='REPLACE'
)
print("Metadatum 'processed:true' hinzugefügt.")
except Exception as e:
print(f"Fehler beim Schreiben des Metadatums: {e}")
raise
return {
'statusCode': 200,
'body': f"E-Mail verarbeitet für {bucket}, SMTP-Weiterleitung: {bool(valid_recipients)}"
}

View File

@@ -1,134 +0,0 @@
import time
import gzip
import json
import os
import urllib.request
import urllib.error
import urllib.parse
import logging
import boto3
import base64
from email.parser import BytesParser
from email.policy import default
from email.utils import getaddresses
logger = logging.getLogger()
logger.setLevel(logging.INFO)
API_BASE_URL = os.environ['API_BASE_URL']
API_TOKEN = os.environ['API_TOKEN']
MAX_EMAIL_SIZE = int(os.environ.get('MAX_EMAIL_SIZE', '10485760'))
s3_client = boto3.client('s3')
def mark_email_processed(bucket, key, metadata, s3_client, processor='lambda'):
"""Setzt in S3 das processed-Flag per Metadata."""
try:
s3_client.copy_object(
Bucket=bucket,
Key=key,
CopySource={'Bucket': bucket, 'Key': key},
Metadata={
'processed': metadata,
'processed_timestamp': str(int(time.time())),
'processor': processor
},
MetadataDirective='REPLACE'
)
logger.info(f"Marked S3 object {bucket}/{key} as {metadata}")
except Exception as e:
logger.error(f"Fehler beim Markieren {bucket}/{key}: {e}")
def call_api_once(payload, domain, request_id):
"""Single-shot POST, kein Retry."""
url = f"{API_BASE_URL}/process/{domain}"
data = json.dumps(payload).encode('utf-8')
req = urllib.request.Request(url, data=data, method='POST')
req.add_header('Authorization', f'Bearer {API_TOKEN}')
req.add_header('Content-Type', 'application/json')
req.add_header('X-Request-ID', request_id)
logger.info(f"[{request_id}] OUTGOING POST {url}: "
f"domain={domain}, key={payload['s3_key']}, bucket={payload['s3_bucket']}, "
f"orig_size={payload['original_size']}, comp_size={payload['compressed_size']}")
with urllib.request.urlopen(req, timeout=25) as resp:
code = resp.getcode()
if code == 200:
logger.info(f"[{request_id}] API returned 200 OK")
return True
else:
body = resp.read().decode('utf-8', errors='ignore')
logger.error(f"[{request_id}] API returned {code}: {body}")
return False
def lambda_handler(event, context):
req_id = context.aws_request_id
rec = event['Records'][0]['s3']
bucket = rec['bucket']['name']
key = urllib.parse.unquote_plus(rec['object']['key'])
logger.info(f"[{req_id}] Processing {bucket}/{key}")
# Kopf-Check
head = s3_client.head_object(Bucket=bucket, Key=key)
metadata = head.get('Metadata', {})
if metadata.get('processed') == 'true':
logger.info(f"[{req_id}] Skipping already processed object")
return {'statusCode': 200, 'body': 'Already processed'}
size = head['ContentLength']
if size > MAX_EMAIL_SIZE:
logger.warning(f"[{req_id}] Email too large: {size} bytes")
return {'statusCode': 413}
# E-Mail Inhalt laden
body = s3_client.get_object(Bucket=bucket, Key=key)['Body'].read()
# 1) Parsen und Loggen von from/to
try:
msg = BytesParser(policy=default).parsebytes(body)
from_addr = getaddresses(msg.get_all('from', []))[0][1] if msg.get_all('from') else ''
to_addrs = [addr for _n, addr in getaddresses(msg.get_all('to', []))]
logger.info(f"[{req_id}] Parsed email: from={from_addr}, to={to_addrs}")
except Exception as e:
logger.error(f"[{req_id}] Fehler beim Parsen der Email: {e}")
from_addr = ''
to_addrs = []
# 2) Komprimieren und Payload bauen
compressed = gzip.compress(body)
payload = {
's3_bucket': bucket,
's3_key': key,
'domain': bucket.replace('-', '.').rsplit('.emails',1)[0],
'email_content': base64.b64encode(compressed).decode(),
'compressed': True,
'etag': head['ETag'].strip('"'),
'request_id': req_id,
'original_size': len(body),
'compressed_size': len(compressed)
}
# 3) Single API call
try:
success = call_api_once(payload, payload['domain'], req_id)
except Exception as e:
logger.error(f"[{req_id}] API-Call-Exception: {e}")
success = False
# 4) Handling nach API-Call
if success:
# normal processed
mark_email_processed(bucket, key, 'true', s3_client)
else:
# nur wenn es to_addrs gibt
if to_addrs:
bucket_domain = payload['domain']
domains = [addr.split('@')[-1] for addr in to_addrs if '@' in addr]
status = 'unknownUser' if bucket_domain in domains else 'unknownDomain'
mark_email_processed(bucket, key, status, s3_client)
else:
logger.info(f"[{req_id}] Keine Empfänger, kein Markieren")
return {'statusCode': 200, 'body': 'Done'}