This commit is contained in:
2025-09-30 01:54:58 +02:00
parent 148cb6d283
commit 5856eda62b
16 changed files with 3340 additions and 1271 deletions

164
server/db.js Normal file
View File

@@ -0,0 +1,164 @@
const { Pool, Client } = require('pg')
const fs = require('fs')
const path = require('path')
const dotenvPath = path.resolve(__dirname, '..', '.env')
if (fs.existsSync(dotenvPath)) {
require('dotenv').config({ path: dotenvPath })
} else {
require('dotenv').config()
}
const hasConnectionString =
typeof process.env.DATABASE_URL === 'string' && process.env.DATABASE_URL.trim().length > 0
if (!hasConnectionString) {
console.warn('[db] DATABASE_URL is not set. Falling back to discrete Postgres environment variables.')
}
const sslConfig =
process.env.DATABASE_SSL === 'true' || process.env.PGSSLMODE === 'require'
? { rejectUnauthorized: false }
: undefined
const defaultConfig = {
host: process.env.PGHOST || 'localhost',
port: Number(process.env.PGPORT || 5432),
user: process.env.PGUSER || 'postgres',
password: process.env.PGPASSWORD || 'postgres',
database: process.env.PGDATABASE || 'claudia_blog'
}
const poolConfig = hasConnectionString
? { connectionString: process.env.DATABASE_URL.trim() }
: { ...defaultConfig }
if (sslConfig) {
poolConfig.ssl = sslConfig
}
let pool
let ensuringDatabasePromise
function quoteIdentifier(identifier) {
return `"${identifier.replace(/"/g, '""')}"`
}
function resolveTargetDatabase() {
if (hasConnectionString) {
try {
const url = new URL(process.env.DATABASE_URL.trim())
const dbName = url.pathname ? url.pathname.replace(/^\//, '') : null
return dbName || null
} catch (error) {
console.warn(`[db] Unable to parse DATABASE_URL: ${error.message}`)
return null
}
}
return poolConfig.database || null
}
async function ensureDatabaseExists(force = false) {
if (ensuringDatabasePromise && !force) {
return ensuringDatabasePromise
}
ensuringDatabasePromise = (async () => {
const targetDb = resolveTargetDatabase()
if (!targetDb) {
return
}
let adminConfig
if (hasConnectionString) {
try {
const adminUrl = new URL(process.env.DATABASE_URL.trim())
adminUrl.pathname = '/postgres'
adminConfig = { connectionString: adminUrl.toString() }
if (sslConfig) {
adminConfig.ssl = sslConfig
}
} catch (error) {
console.warn(`[db] Unable to prepare admin connection to create database: ${error.message}`)
return
}
} else {
const adminDatabase = process.env.PGDEFAULTDB || 'postgres'
adminConfig = {
...poolConfig,
database: adminDatabase
}
}
const client = new Client(adminConfig)
try {
await client.connect()
const exists = await client.query('SELECT 1 FROM pg_database WHERE datname = $1', [targetDb])
if (exists.rowCount === 0) {
await client.query(`CREATE DATABASE ${quoteIdentifier(targetDb)}`)
console.log(`[db] Created database ${targetDb}`)
}
} catch (error) {
if (error.code === '42P04') {
return
}
console.warn(`[db] Could not ensure database ${targetDb}: ${error.message}`)
} finally {
await client.end().catch(() => {})
}
})()
return ensuringDatabasePromise
}
async function initialisePool() {
const activePool = new Pool(poolConfig)
activePool.on('error', (err) => {
console.error('[db] Unexpected error on idle client', err)
})
// Force a connection so we surface errors immediately
await activePool.query('SELECT 1')
return activePool
}
async function getPool() {
if (pool) {
return pool
}
try {
await ensureDatabaseExists()
pool = await initialisePool()
return pool
} catch (error) {
if (error.code === '3D000') {
// Database missing, retry once after forcing ensure
await ensureDatabaseExists(true)
pool = await initialisePool()
return pool
}
throw error
}
}
async function query(text, params) {
const activePool = await getPool()
return activePool.query(text, params)
}
async function closePool() {
if (pool) {
await pool.end()
pool = null
}
}
module.exports = {
query,
getPool,
closePool,
ensureDatabaseExists
}

396
server/index.js Normal file
View File

@@ -0,0 +1,396 @@
const path = require('path')
const fs = require('fs')
const crypto = require('crypto')
const express = require('express')
const cors = require('cors')
const { upload } = require('./storage')
const { query, closePool } = require('./db')
const { runMigrations } = require('./migrations')
const PORT = Number(process.env.API_PORT) || 4005
const MAX_SECTIONS = Number(process.env.BLOG_MAX_SECTIONS || 5)
const app = express()
const allowedOrigins = (process.env.CORS_ORIGINS || 'http://localhost:3000')
.split(',')
.map(origin => origin.trim())
.filter(Boolean)
app.use(cors({
origin: allowedOrigins,
credentials: true
}))
app.use(express.json({ limit: '2mb' }))
app.use(express.urlencoded({ extended: true }))
const uploadsPath = path.join(__dirname, '..', 'public', 'uploads')
if (!fs.existsSync(uploadsPath)) {
fs.mkdirSync(uploadsPath, { recursive: true })
}
app.use('/uploads', express.static(uploadsPath))
app.get('/health', (_req, res) => {
res.json({ status: 'ok' })
})
function slugify(value) {
return value
.toString()
.trim()
.toLowerCase()
.replace(/[^a-z0-9]+/g, '-')
.replace(/(^-|-$)+/g, '') || crypto.randomUUID()
}
async function generateUniqueSlug(title, excludeId) {
const base = slugify(title)
let slug = base
let suffix = 1
// eslint-disable-next-line no-constant-condition
while (true) {
const values = [slug]
let condition = ''
const excludeNumeric = Number(excludeId)
if (Number.isInteger(excludeNumeric) && excludeNumeric > 0) {
condition = 'AND id <> $2'
values.push(excludeNumeric)
}
const existing = await query(
`SELECT id FROM blog_posts WHERE slug = $1 ${condition} LIMIT 1`,
values
)
if (existing.rows.length === 0) {
return slug
}
suffix += 1
slug = `${base}-${suffix}`
}
}
function parsePayload(body) {
if (body.payload) {
try {
return JSON.parse(body.payload)
} catch (error) {
throw new Error('Invalid payload JSON')
}
}
return body
}
function buildMainImage(payload, files) {
const uploadForMain = files.mainImage && files.mainImage[0]
if (payload.removeMainImage === true || payload.removeMainImage === 'true') {
return null
}
if (uploadForMain) {
return `/uploads/${uploadForMain.filename}`
}
if (payload.existingMainImage) {
return payload.existingMainImage
}
return null
}
function buildSections(payload, files) {
const sections = []
const inputSections = Array.isArray(payload.sections) ? payload.sections : []
for (let index = 0; index < MAX_SECTIONS; index += 1) {
const sectionInput = inputSections[index] || {}
const fileKey = `section${index}Image`
const uploadForSection = files[fileKey] && files[fileKey][0]
const rawText = typeof sectionInput.text === 'string' ? sectionInput.text : ''
const text = rawText.trim()
const image = uploadForSection
? `/uploads/${uploadForSection.filename}`
: sectionInput.existingImage || null
if (text || image) {
sections.push({
id: sectionInput.id || crypto.randomUUID(),
text: text || null,
image
})
}
}
return sections
}
function mapPostRow(row) {
return {
id: row.id,
title: row.title,
slug: row.slug,
previewImage: row.preview_image,
linkUrl: row.link_url,
sections: row.sections || [],
footer: row.footer,
isEditorsPick: row.is_editors_pick,
createdAt: row.created_at,
updatedAt: row.updated_at
}
}
function createExcerpt(sections) {
const firstText = sections
.map(section => section.text || '')
.find(text => text && text.trim().length > 0)
if (!firstText) {
return ''
}
const trimmed = firstText.trim()
if (trimmed.length <= 220) {
return trimmed
}
return `${trimmed.slice(0, 217)}...`
}
async function ensureEditorsPickLimit(targetId, makePick) {
if (!makePick) {
return
}
let condition = ''
const params = []
if (Number.isInteger(targetId) && targetId > 0) {
condition = 'AND id <> $1'
params.push(targetId)
}
const result = await query(
`SELECT id FROM blog_posts WHERE is_editors_pick = true ${condition}`,
params
)
if (result.rows.length >= 3) {
const ids = result.rows.map(r => r.id)
throw new Error(`Only three editor's picks allowed. Currently set: ${ids.join(', ')}`)
}
}
function getUploadFields() {
const fields = [{ name: 'mainImage', maxCount: 1 }]
for (let index = 0; index < MAX_SECTIONS; index += 1) {
fields.push({ name: `section${index}Image`, maxCount: 1 })
}
return fields
}
app.get('/posts', async (_req, res) => {
try {
const result = await query(
'SELECT * FROM blog_posts ORDER BY created_at DESC'
)
const posts = result.rows.map(mapPostRow).map(post => ({
...post,
excerpt: createExcerpt(post.sections)
}))
res.json({ data: posts })
} catch (error) {
console.error('[GET /posts] error', error)
res.status(500).json({ error: 'Failed to fetch posts' })
}
})
app.get('/posts/:id', async (req, res) => {
const { id } = req.params
try {
const result = await query('SELECT * FROM blog_posts WHERE id = $1', [id])
if (result.rows.length === 0) {
return res.status(404).json({ error: 'Post not found' })
}
const post = mapPostRow(result.rows[0])
post.excerpt = createExcerpt(post.sections)
return res.json({ data: post })
} catch (error) {
console.error('[GET /posts/:id] error', error)
return res.status(500).json({ error: 'Failed to fetch post' })
}
})
app.post('/posts', upload.fields(getUploadFields()), async (req, res) => {
try {
const payload = parsePayload(req.body)
if (!payload.title || !payload.title.trim()) {
return res.status(400).json({ error: 'Title is required' })
}
const mainImage = buildMainImage(payload, req.files)
const sections = buildSections(payload, req.files)
const slug = await generateUniqueSlug(payload.title.trim())
const isEditorsPick = Boolean(payload.isEditorsPick)
await ensureEditorsPickLimit(null, isEditorsPick)
const result = await query(
`INSERT INTO blog_posts (title, slug, preview_image, link_url, sections, footer, is_editors_pick)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING *`,
[
payload.title.trim(),
slug,
mainImage,
payload.linkUrl || null,
JSON.stringify(sections),
payload.footer || null,
isEditorsPick
]
)
if (isEditorsPick) {
try {
await ensureEditorsPickLimit(result.rows[0].id, true)
} catch (limitError) {
await query('UPDATE blog_posts SET is_editors_pick = false WHERE id = $1', [result.rows[0].id])
throw limitError
}
}
const post = mapPostRow(result.rows[0])
post.excerpt = createExcerpt(post.sections)
res.status(201).json({ data: post })
} catch (error) {
console.error('[POST /posts] error', error)
const message = error.message || 'Failed to create post'
res.status(400).json({ error: message })
}
})
app.put('/posts/:id', upload.fields(getUploadFields()), async (req, res) => {
const { id } = req.params
try {
const payload = parsePayload(req.body)
if (!payload.title || !payload.title.trim()) {
return res.status(400).json({ error: 'Title is required' })
}
const existingResult = await query('SELECT * FROM blog_posts WHERE id = $1', [id])
if (existingResult.rows.length === 0) {
return res.status(404).json({ error: 'Post not found' })
}
const existingPost = mapPostRow(existingResult.rows[0])
const mainImage = buildMainImage(payload, req.files)
const sections = buildSections(payload, req.files)
const nextSlug = await generateUniqueSlug(payload.title.trim(), Number(id))
const isEditorsPick = Boolean(payload.isEditorsPick)
if (isEditorsPick && !existingPost.isEditorsPick) {
await ensureEditorsPickLimit(Number(id), true)
}
const result = await query(
`UPDATE blog_posts
SET title = $1,
slug = $2,
preview_image = $3,
link_url = $4,
sections = $5,
footer = $6,
is_editors_pick = $7
WHERE id = $8
RETURNING *`,
[
payload.title.trim(),
nextSlug,
mainImage,
payload.linkUrl || null,
JSON.stringify(sections),
payload.footer || null,
isEditorsPick,
id
]
)
const post = mapPostRow(result.rows[0])
post.excerpt = createExcerpt(post.sections)
res.json({ data: post })
} catch (error) {
console.error('[PUT /posts/:id] error', error)
const status = error.message && error.message.includes('editor') ? 400 : 500
res.status(status).json({ error: error.message || 'Failed to update post' })
}
})
app.delete('/posts/:id', async (req, res) => {
const { id } = req.params
try {
const existing = await query('SELECT * FROM blog_posts WHERE id = $1', [id])
if (existing.rows.length === 0) {
return res.status(404).json({ error: 'Post not found' })
}
await query('DELETE FROM blog_posts WHERE id = $1', [id])
return res.json({ success: true })
} catch (error) {
console.error('[DELETE /posts/:id] error', error)
return res.status(500).json({ error: 'Failed to delete post' })
}
})
app.patch('/posts/:id/editors-pick', async (req, res) => {
const { id } = req.params
const makePick = Boolean(req.body?.isEditorsPick)
try {
const existing = await query('SELECT * FROM blog_posts WHERE id = $1', [id])
if (existing.rows.length === 0) {
return res.status(404).json({ error: 'Post not found' })
}
if (makePick && !existing.rows[0].is_editors_pick) {
await ensureEditorsPickLimit(Number(id), true)
}
const result = await query(
'UPDATE blog_posts SET is_editors_pick = $1 WHERE id = $2 RETURNING *',
[makePick, id]
)
const post = mapPostRow(result.rows[0])
post.excerpt = createExcerpt(post.sections)
return res.json({ data: post })
} catch (error) {
console.error('[PATCH /posts/:id/editors-pick] error', error)
const status = error.message && error.message.includes('Only three') ? 400 : 500
return res.status(status).json({ error: error.message || 'Failed to update editor pick' })
}
})
async function start() {
try {
await runMigrations()
app.listen(PORT, '0.0.0.0', () => {
console.log(`[api] listening on port ${PORT}`)
})
} catch (error) {
console.error('[api] failed to start', error)
await closePool()
process.exit(1)
}
}
start()

46
server/migrations.js Normal file
View File

@@ -0,0 +1,46 @@
const { query } = require('./db')
async function runMigrations() {
await query(`
CREATE TABLE IF NOT EXISTS blog_posts (
id SERIAL PRIMARY KEY,
title TEXT NOT NULL,
slug TEXT NOT NULL UNIQUE,
preview_image TEXT,
link_url TEXT,
sections JSONB NOT NULL DEFAULT '[]'::jsonb,
footer TEXT,
is_editors_pick BOOLEAN NOT NULL DEFAULT FALSE,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
`)
await query(`
CREATE OR REPLACE FUNCTION set_updated_at()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
`)
await query(`
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_trigger
WHERE tgname = 'trg_blog_posts_updated_at'
) THEN
CREATE TRIGGER trg_blog_posts_updated_at
BEFORE UPDATE ON blog_posts
FOR EACH ROW
EXECUTE PROCEDURE set_updated_at();
END IF;
END; $$;
`)
}
module.exports = { runMigrations }

42
server/storage.js Normal file
View File

@@ -0,0 +1,42 @@
const multer = require('multer')
const path = require('path')
const fs = require('fs')
const uploadsDir = path.join(__dirname, '..', 'public', 'uploads')
if (!fs.existsSync(uploadsDir)) {
fs.mkdirSync(uploadsDir, { recursive: true })
}
const storage = multer.diskStorage({
destination: (_req, _file, cb) => {
cb(null, uploadsDir)
},
filename: (_req, file, cb) => {
const timestamp = Date.now()
const random = Math.round(Math.random() * 1e9)
const ext = path.extname(file.originalname) || '.bin'
cb(null, `${timestamp}-${random}${ext}`)
}
})
function fileFilter(_req, file, cb) {
if (file.mimetype.startsWith('image/')) {
cb(null, true)
} else {
cb(new Error('Only image uploads are allowed'))
}
}
const upload = multer({
storage,
fileFilter,
limits: {
fileSize: 5 * 1024 * 1024 // 5MB per file
}
})
module.exports = {
upload,
uploadsDir
}