This commit is contained in:
Timo Knuth
2026-02-27 15:19:24 +01:00
parent b7f8221095
commit 253c3c1c6d
134 changed files with 11188 additions and 1871 deletions

View File

@@ -0,0 +1,94 @@
import { NextResponse } from 'next/server'
import OpenAI from 'openai'
type LlmProvider = 'openai' | 'openrouter'
function getProvider(): LlmProvider {
const configured = (process.env.LLM_PROVIDER ?? '').toLowerCase()
if (configured === 'openrouter') return 'openrouter'
if (configured === 'openai') return 'openai'
return process.env.OPENROUTER_API_KEY ? 'openrouter' : 'openai'
}
function createClient(provider: LlmProvider) {
if (provider === 'openrouter') {
const apiKey = process.env.OPENROUTER_API_KEY || ''
return new OpenAI({
apiKey,
baseURL: process.env.OPENROUTER_BASE_URL || 'https://openrouter.ai/api/v1',
defaultHeaders: {
...(process.env.OPENROUTER_SITE_URL
? { 'HTTP-Referer': process.env.OPENROUTER_SITE_URL }
: {}),
...(process.env.OPENROUTER_APP_NAME
? { 'X-Title': process.env.OPENROUTER_APP_NAME }
: {}),
},
})
}
return new OpenAI({
apiKey: process.env.OPENAI_API_KEY || '',
})
}
function getModel(provider: LlmProvider): string {
if (provider === 'openrouter') {
return process.env.OPENROUTER_MODEL || 'minimax/minimax-m2.5'
}
return process.env.OPENAI_MODEL || 'gpt-4o-mini'
}
export async function POST(req: Request) {
try {
const body = await req.json()
const { orgName, context } = body
if (!orgName || !context) {
return NextResponse.json({ error: 'orgName and context are required' }, { status: 400 })
}
const provider = getProvider()
const client = createClient(provider)
const model = getModel(provider)
const systemMessage = `Sie sind ein professioneller Copywriter für eine moderne deutsche Innung oder Kreishandwerkerschaft.
Erstellen Sie eine moderne, ansprechende Überschrift (Heading) und einen Einleitungstext für eine Landingpage.
WICHTIG: Geben Sie AUSSCHLIESSLICH ein valides JSON-Objekt zurück, komplett ohne Markdown-Formatierung (kein \`\`\`json ... \`\`\`), in dieser Struktur:
{
"title": "Eine moderne, ansprechende Überschrift (max. 6-8 Wörter)",
"text": "Ein überzeugender Einleitungstext, der erklärt, wofür die Organisation steht, fokussiert auf die Region und den Kontext (max. 3-4 Sätze)."
}`
const userMessage = `Name der Organisation: ${orgName}\nZusätzliche Stichpunkte vom Benutzer:\n${context}`
const completion = await client.chat.completions.create({
model,
messages: [
{ role: 'system', content: systemMessage },
{ role: 'user', content: userMessage },
],
// some openrouter models ignore response_format, so doing it purely by prompt
temperature: 0.7
})
let textResponse = completion.choices[0]?.message?.content || ''
// safely remove potential markdown blocks just in case
textResponse = textResponse.trim()
if (textResponse.startsWith('```json')) {
textResponse = textResponse.replace(/^```json\n?/, '').replace(/\n?```$/, '').trim()
} else if (textResponse.startsWith('```')) {
textResponse = textResponse.replace(/^```\n?/, '').replace(/\n?```$/, '').trim()
}
const result = JSON.parse(textResponse)
return NextResponse.json(result)
} catch (error: any) {
console.error('Error generating AI landing page content:', error)
return NextResponse.json({ error: error?.message || 'Failed to generate content' }, { status: 500 })
}
}

View File

@@ -0,0 +1,160 @@
import { NextResponse } from 'next/server'
import OpenAI from 'openai'
type LlmProvider = 'openai' | 'openrouter'
function getProvider(): LlmProvider {
const configured = (process.env.LLM_PROVIDER ?? '').toLowerCase()
if (configured === 'openrouter') return 'openrouter'
if (configured === 'openai') return 'openai'
return process.env.OPENROUTER_API_KEY ? 'openrouter' : 'openai'
}
function createClient(provider: LlmProvider) {
if (provider === 'openrouter') {
const apiKey = process.env.OPENROUTER_API_KEY || ''
return new OpenAI({
apiKey,
baseURL: process.env.OPENROUTER_BASE_URL || 'https://openrouter.ai/api/v1',
defaultHeaders: {
...(process.env.OPENROUTER_SITE_URL
? { 'HTTP-Referer': process.env.OPENROUTER_SITE_URL }
: {}),
...(process.env.OPENROUTER_APP_NAME
? { 'X-Title': process.env.OPENROUTER_APP_NAME }
: {}),
},
})
}
return new OpenAI({
apiKey: process.env.OPENAI_API_KEY || '',
})
}
function getModel(provider: LlmProvider): string {
if (provider === 'openrouter') {
return process.env.OPENROUTER_MODEL || 'minimax/minimax-m2.5'
}
return process.env.OPENAI_MODEL || 'gpt-5-mini'
}
function hasApiKey(provider: LlmProvider): boolean {
if (provider === 'openrouter') return !!process.env.OPENROUTER_API_KEY
return !!process.env.OPENAI_API_KEY
}
async function generateText({
provider,
model,
systemMessage,
prompt,
}: {
provider: LlmProvider
model: string
systemMessage: string
prompt: string
}) {
const client = createClient(provider)
const completion = await client.chat.completions.create({
model,
messages: [
{ role: 'system', content: systemMessage },
{ role: 'user', content: prompt },
],
})
return completion.choices[0]?.message?.content || ''
}
export async function POST(req: Request) {
try {
const { prompt, type, format } = await req.json()
const primaryProvider = getProvider()
const primaryModel = getModel(primaryProvider)
if (!prompt) {
return NextResponse.json({ error: 'Prompt is required' }, { status: 400 })
}
let systemMessage = ''
if (type === 'news') {
systemMessage = `Du bist ein erfahrener Newsletter- und PR-Experte für eine Innung (Handwerksverband).
Deine Aufgabe ist es, professionelle, ansprechende und informative News-Beiträge zu schreiben.
Achte auf eine klare Struktur, eine einladende Tonalität und hohe inhaltliche Qualität.
Das gewünschte Ausgabeformat ist: ${format === 'markdown' ? 'Markdown' : 'Einfacher unformatierter Text'}.`
} else if (type === 'stelle') {
systemMessage = `Du bist ein erfahrener HR- und Recruiting-Experte für das Handwerk.
Deine Aufgabe ist es, attraktive und präzise Stellenanzeigen (Lehrlingsbörse / Jobbörse) zu verfassen.
Die Stellenanzeige soll Begeisterung wecken und klar die Aufgaben sowie Anforderungen kommunizieren.
Das gewünschte Ausgabeformat ist: ${format === 'markdown' ? 'Markdown' : 'Einfacher unformatierter Text'}.`
} else {
systemMessage = `Du bist ein hilfreicher KI-Assistent. Antworte immer auf Deutsch.`
}
const attempts: Array<{ provider: LlmProvider; model: string; reason: string }> = []
if (hasApiKey(primaryProvider)) {
attempts.push({
provider: primaryProvider,
model: primaryModel,
reason: 'primary',
})
}
// Fallback requested: if primary fails, try OpenAI GPT-5 mini when OPENAI_API_KEY is present.
if (primaryProvider !== 'openai' && hasApiKey('openai')) {
attempts.push({
provider: 'openai',
model: 'gpt-5-mini',
reason: 'fallback_openai',
})
}
if (attempts.length === 0) {
return NextResponse.json(
{ error: 'No AI provider key configured (OPENROUTER_API_KEY or OPENAI_API_KEY).' },
{ status: 500 }
)
}
let lastError: any = null
for (const attempt of attempts) {
try {
const text = await generateText({
provider: attempt.provider,
model: attempt.model,
systemMessage,
prompt,
})
return NextResponse.json({
text,
provider: attempt.provider,
model: attempt.model,
fallbackUsed: attempt.reason !== 'primary',
})
} catch (error: any) {
lastError = error
console.error('AI attempt failed:', {
provider: attempt.provider,
model: attempt.model,
message: error?.message,
})
}
}
return NextResponse.json(
{ error: lastError?.message || 'All AI providers failed' },
{ status: 500 }
)
} catch (error: any) {
console.error('AI Generate Error:', error)
return NextResponse.json(
{ error: error?.message || 'Internal Server Error' },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,18 @@
import { NextResponse } from 'next/server'
import { auth } from '@/lib/auth'
import { prisma } from '@innungsapp/shared'
import { headers } from 'next/headers'
export async function POST() {
const session = await auth.api.getSession({ headers: await headers() })
if (!session?.user?.id) {
return NextResponse.json({ error: 'Nicht eingeloggt' }, { status: 401 })
}
await prisma.user.update({
where: { id: session.user.id },
data: { mustChangePassword: false },
})
return NextResponse.json({ success: true })
}

View File

@@ -0,0 +1,50 @@
import { NextRequest } from 'next/server'
import { auth } from '@/lib/auth'
import { prisma } from '@innungsapp/shared'
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await auth.api.getSession({ headers: req.headers })
if (!session?.user) {
return new Response('Unauthorized', { status: 401 })
}
const { id } = await params
// Verify admin role via UserRole table
const userRole = await prisma.userRole.findFirst({
where: { userId: session.user.id, role: 'admin' },
})
if (!userRole) {
return new Response('Forbidden', { status: 403 })
}
const termin = await prisma.termin.findUnique({
where: { id, orgId: userRole.orgId },
include: { anmeldungen: { include: { member: true } } },
})
if (!termin) {
return new Response('Not found', { status: 404 })
}
if (termin.anmeldungen.length === 0) {
return new Response('Keine Anmeldungen vorhanden', { status: 404 })
}
const rows = termin.anmeldungen.map((a) => ({
Name: a.member.name,
Email: a.member.email,
Betrieb: a.member.betrieb ?? '',
Angemeldet: new Date(a.angemeldetAt).toLocaleDateString('de-DE'),
}))
const header = Object.keys(rows[0]).join(';')
const csv = [header, ...rows.map((r) => Object.values(r).join(';'))].join('\n')
return new Response('\uFEFF' + csv, {
headers: {
'Content-Type': 'text/csv; charset=utf-8',
'Content-Disposition': `attachment; filename="teilnehmer-${id}.csv"`,
},
})
}

View File

@@ -0,0 +1,5 @@
import { NextResponse } from 'next/server'
export async function GET() {
return NextResponse.json({ status: 'ok', timestamp: new Date().toISOString() })
}

View File

@@ -0,0 +1,72 @@
import { NextRequest, NextResponse } from 'next/server'
import { prisma } from '@innungsapp/shared'
import { sendInviteEmail } from '@/lib/email'
import { auth } from '@/lib/auth'
export async function POST(req: NextRequest, { params }: { params: Promise<{ slug: string }> }) {
const { slug } = await params
const body = await req.json().catch(() => null)
if (!body?.name || !body?.email) {
return NextResponse.json({ error: 'Name und E-Mail sind erforderlich.' }, { status: 400 })
}
const name: string = String(body.name).trim()
const email: string = String(body.email).trim().toLowerCase()
const org = await prisma.organization.findUnique({
where: { slug },
select: { id: true, name: true },
})
if (!org) {
return NextResponse.json({ error: 'Organisation nicht gefunden.' }, { status: 404 })
}
// Check if email already registered in this org
const existing = await prisma.member.findFirst({
where: { orgId: org.id, email },
})
if (existing) {
// Still send invite so user can log in — don't reveal whether they exist
await sendInviteEmail({
to: email,
memberName: existing.name,
orgName: org.name,
apiUrl: process.env.BETTER_AUTH_URL!,
})
return NextResponse.json({ success: true })
}
// Create member record
await prisma.member.create({
data: {
name,
email,
orgId: org.id,
betrieb: '-',
sparte: '-',
ort: '-',
status: 'aktiv',
},
})
// Create auth user (may already exist)
try {
await auth.api.createUser({
body: { name, email, role: 'user', password: undefined },
})
} catch {
// User may already exist in auth system
}
await sendInviteEmail({
to: email,
memberName: name,
orgName: org.name,
apiUrl: process.env.BETTER_AUTH_URL!,
})
return NextResponse.json({ success: true })
}

View File

@@ -0,0 +1,82 @@
import { NextRequest, NextResponse } from 'next/server'
import { prisma } from '@innungsapp/shared'
export async function POST(req: NextRequest, { params }: { params: Promise<{ slug: string }> }) {
const { slug } = await params
const body = await req.json().catch(() => null)
if (!body?.email) {
return NextResponse.json({ error: 'E-Mail ist erforderlich.' }, { status: 400 })
}
const email: string = String(body.email).trim().toLowerCase()
const name: string = String(body.name ?? '').trim() || email.split('@')[0]
const org = await prisma.organization.findUnique({
where: { slug },
select: { id: true },
})
if (!org) {
return NextResponse.json({ error: 'Organisation nicht gefunden.' }, { status: 404 })
}
// Look up the auth user that better-auth just created
const authUser = await prisma.user.findUnique({
where: { email },
select: { id: true },
})
if (!authUser) {
return NextResponse.json({ error: 'Benutzer nicht gefunden. Bitte zuerst registrieren.' }, { status: 400 })
}
// Idempotent: skip if member already exists (linked to this user)
const existingMember = await prisma.member.findFirst({
where: { orgId: org.id, userId: authUser.id },
})
if (!existingMember) {
// Member may exist without userId (created by admin before user registered)
const unlinkedMember = await prisma.member.findFirst({
where: { orgId: org.id, email, userId: null },
})
if (unlinkedMember) {
await prisma.member.update({
where: { id: unlinkedMember.id },
data: { userId: authUser.id },
})
} else {
await prisma.member.create({
data: {
name,
email,
orgId: org.id,
userId: authUser.id,
betrieb: '-',
sparte: '-',
ort: '-',
status: 'aktiv',
},
})
}
}
// Idempotent: skip if role already exists
const existingRole = await prisma.userRole.findFirst({
where: { userId: authUser.id, orgId: org.id },
})
if (!existingRole) {
await prisma.userRole.create({
data: {
userId: authUser.id,
orgId: org.id,
role: 'member',
},
})
}
return NextResponse.json({ success: true })
}

View File

@@ -3,6 +3,7 @@ import { readFile } from 'fs/promises'
import path from 'path'
const UPLOAD_DIR = process.env.UPLOAD_DIR ?? './uploads'
// Added comment to force recompile after ENOSPC
export async function GET(
req: NextRequest,