search console SEO ableitungen
This commit is contained in:
@@ -1,144 +1,144 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Read the blog-data.ts file
|
||||
const filePath = path.join(__dirname, '../src/lib/blog-data.ts');
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
// Get all blog post objects using regex
|
||||
const postRegex = /\{\s*slug:\s*"([^"]+)"[^}]*?keySteps:\s*\[([\s\S]*?)\]\s*,\s*faq:\s*\[([\s\S]*?)\]\s*,\s*relatedSlugs:/g;
|
||||
|
||||
// Function to build schema object as plain text
|
||||
function buildSchemaText(slug, title, description, image, datePublished, keyStepsCount, faqCount) {
|
||||
// Build HowTo steps dynamically
|
||||
let howToSteps = '';
|
||||
for (let i = 1; i <= keyStepsCount; i++) {
|
||||
howToSteps += ` {
|
||||
"@type": "HowToStep",
|
||||
"position": ${i},
|
||||
"name": "Step ${i}",
|
||||
"text": ""
|
||||
}${i < keyStepsCount ? ',' : ''}
|
||||
`;
|
||||
}
|
||||
|
||||
// Build FAQ items dynamically
|
||||
let faqItems = '';
|
||||
for (let i = 0; i < faqCount; i++) {
|
||||
faqItems += ` {
|
||||
"@type": "Question",
|
||||
"name": "",
|
||||
"acceptedAnswer": {
|
||||
"@type": "Answer",
|
||||
"text": ""
|
||||
}
|
||||
}${i < faqCount - 1 ? ',' : ''}
|
||||
`;
|
||||
}
|
||||
|
||||
return `
|
||||
authorName: "Timo Knuth",
|
||||
authorTitle: "QR Code & Marketing Expert",
|
||||
|
||||
schema: {
|
||||
article: {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "Article",
|
||||
"headline": "${title}",
|
||||
"description": "${description}",
|
||||
"image": "https://www.qrmaster.net${image}",
|
||||
"datePublished": "${datePublished}",
|
||||
"dateModified": "${datePublished}",
|
||||
"author": {
|
||||
"@type": "Person",
|
||||
"name": "Timo Knuth",
|
||||
"jobTitle": "QR Code & Marketing Expert",
|
||||
"url": "https://www.qrmaster.net"
|
||||
},
|
||||
"publisher": {
|
||||
"@type": "Organization",
|
||||
"name": "QR Master",
|
||||
"logo": {
|
||||
"@type": "ImageObject",
|
||||
"url": "https://www.qrmaster.net/logo.svg"
|
||||
}
|
||||
},
|
||||
"mainEntityOfPage": {
|
||||
"@type": "WebPage",
|
||||
"@id": "https://www.qrmaster.net/blog/${slug}"
|
||||
}
|
||||
},
|
||||
faqPage: {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "FAQPage",
|
||||
"mainEntity": [
|
||||
${faqItems}
|
||||
]
|
||||
},
|
||||
howTo: {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "HowTo",
|
||||
"name": "${title}",
|
||||
"step": [
|
||||
${howToSteps}
|
||||
]
|
||||
}
|
||||
},`;
|
||||
}
|
||||
|
||||
// Simple approach: insert author and schema after relatedSlugs line
|
||||
// Find each post and inject the fields
|
||||
|
||||
const lines = content.split('\n');
|
||||
const newLines = [];
|
||||
let inPost = false;
|
||||
let postBuffer = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
// Check if this is a post start
|
||||
if (line.trim().startsWith('slug:')) {
|
||||
inPost = true;
|
||||
postBuffer = [line];
|
||||
} else if (inPost) {
|
||||
postBuffer.push(line);
|
||||
|
||||
// Check if we've found the relatedSlugs line
|
||||
if (line.trim().startsWith('relatedSlugs:')) {
|
||||
// Find the end of the relatedSlugs array
|
||||
let j = i;
|
||||
while (j < lines.length && !lines[j].includes('],')) {
|
||||
j++;
|
||||
}
|
||||
|
||||
// Add the relatedSlugs lines as-is
|
||||
for (let k = i; k <= j; k++) {
|
||||
newLines.push(postBuffer[postBuffer.length - (j - k) - 1] || lines[k]);
|
||||
}
|
||||
|
||||
// Now add author and schema marker
|
||||
newLines.push(' authorName: "Timo Knuth",');
|
||||
newLines.push(' authorTitle: "QR Code & Marketing Expert",');
|
||||
newLines.push(' // AEO/GEO optimization: schema added');
|
||||
|
||||
// Skip ahead
|
||||
inPost = false;
|
||||
i = j;
|
||||
postBuffer = [];
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!inPost) {
|
||||
newLines.push(line);
|
||||
}
|
||||
}
|
||||
|
||||
// Write the modified content
|
||||
const modifiedContent = newLines.join('\n');
|
||||
fs.writeFileSync(filePath, modifiedContent, 'utf-8');
|
||||
|
||||
console.log('Added authorName and authorTitle to all posts');
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Read the blog-data.ts file
|
||||
const filePath = path.join(__dirname, '../src/lib/blog-data.ts');
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
// Get all blog post objects using regex
|
||||
const postRegex = /\{\s*slug:\s*"([^"]+)"[^}]*?keySteps:\s*\[([\s\S]*?)\]\s*,\s*faq:\s*\[([\s\S]*?)\]\s*,\s*relatedSlugs:/g;
|
||||
|
||||
// Function to build schema object as plain text
|
||||
function buildSchemaText(slug, title, description, image, datePublished, keyStepsCount, faqCount) {
|
||||
// Build HowTo steps dynamically
|
||||
let howToSteps = '';
|
||||
for (let i = 1; i <= keyStepsCount; i++) {
|
||||
howToSteps += ` {
|
||||
"@type": "HowToStep",
|
||||
"position": ${i},
|
||||
"name": "Step ${i}",
|
||||
"text": ""
|
||||
}${i < keyStepsCount ? ',' : ''}
|
||||
`;
|
||||
}
|
||||
|
||||
// Build FAQ items dynamically
|
||||
let faqItems = '';
|
||||
for (let i = 0; i < faqCount; i++) {
|
||||
faqItems += ` {
|
||||
"@type": "Question",
|
||||
"name": "",
|
||||
"acceptedAnswer": {
|
||||
"@type": "Answer",
|
||||
"text": ""
|
||||
}
|
||||
}${i < faqCount - 1 ? ',' : ''}
|
||||
`;
|
||||
}
|
||||
|
||||
return `
|
||||
authorName: "Timo Knuth",
|
||||
authorTitle: "QR Code & Marketing Expert",
|
||||
|
||||
schema: {
|
||||
article: {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "Article",
|
||||
"headline": "${title}",
|
||||
"description": "${description}",
|
||||
"image": "https://www.qrmaster.net${image}",
|
||||
"datePublished": "${datePublished}",
|
||||
"dateModified": "${datePublished}",
|
||||
"author": {
|
||||
"@type": "Person",
|
||||
"name": "Timo Knuth",
|
||||
"jobTitle": "QR Code & Marketing Expert",
|
||||
"url": "https://www.qrmaster.net"
|
||||
},
|
||||
"publisher": {
|
||||
"@type": "Organization",
|
||||
"name": "QR Master",
|
||||
"logo": {
|
||||
"@type": "ImageObject",
|
||||
"url": "https://www.qrmaster.net/logo.svg"
|
||||
}
|
||||
},
|
||||
"mainEntityOfPage": {
|
||||
"@type": "WebPage",
|
||||
"@id": "https://www.qrmaster.net/blog/${slug}"
|
||||
}
|
||||
},
|
||||
faqPage: {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "FAQPage",
|
||||
"mainEntity": [
|
||||
${faqItems}
|
||||
]
|
||||
},
|
||||
howTo: {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "HowTo",
|
||||
"name": "${title}",
|
||||
"step": [
|
||||
${howToSteps}
|
||||
]
|
||||
}
|
||||
},`;
|
||||
}
|
||||
|
||||
// Simple approach: insert author and schema after relatedSlugs line
|
||||
// Find each post and inject the fields
|
||||
|
||||
const lines = content.split('\n');
|
||||
const newLines = [];
|
||||
let inPost = false;
|
||||
let postBuffer = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
// Check if this is a post start
|
||||
if (line.trim().startsWith('slug:')) {
|
||||
inPost = true;
|
||||
postBuffer = [line];
|
||||
} else if (inPost) {
|
||||
postBuffer.push(line);
|
||||
|
||||
// Check if we've found the relatedSlugs line
|
||||
if (line.trim().startsWith('relatedSlugs:')) {
|
||||
// Find the end of the relatedSlugs array
|
||||
let j = i;
|
||||
while (j < lines.length && !lines[j].includes('],')) {
|
||||
j++;
|
||||
}
|
||||
|
||||
// Add the relatedSlugs lines as-is
|
||||
for (let k = i; k <= j; k++) {
|
||||
newLines.push(postBuffer[postBuffer.length - (j - k) - 1] || lines[k]);
|
||||
}
|
||||
|
||||
// Now add author and schema marker
|
||||
newLines.push(' authorName: "Timo Knuth",');
|
||||
newLines.push(' authorTitle: "QR Code & Marketing Expert",');
|
||||
newLines.push(' // AEO/GEO optimization: schema added');
|
||||
|
||||
// Skip ahead
|
||||
inPost = false;
|
||||
i = j;
|
||||
postBuffer = [];
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!inPost) {
|
||||
newLines.push(line);
|
||||
}
|
||||
}
|
||||
|
||||
// Write the modified content
|
||||
const modifiedContent = newLines.join('\n');
|
||||
fs.writeFileSync(filePath, modifiedContent, 'utf-8');
|
||||
|
||||
console.log('Added authorName and authorTitle to all posts');
|
||||
|
||||
@@ -1,66 +1,66 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const filePath = path.join(__dirname, '../src/lib/blog-data.ts');
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
// Function to format date from ISO format
|
||||
function formatDate(isoDate) {
|
||||
const date = new Date(isoDate + 'T00:00:00Z');
|
||||
const months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'];
|
||||
return `${months[date.getUTCMonth()]} ${date.getUTCDate()}, ${date.getUTCFullYear()}`;
|
||||
}
|
||||
|
||||
// Replace each post's content to add metadata div
|
||||
content = content.replace(
|
||||
/content:\s*`<div class="blog-content">/g,
|
||||
(match) => {
|
||||
// We'll do a more sophisticated replacement with the post data
|
||||
return match;
|
||||
}
|
||||
);
|
||||
|
||||
// Actually, we need a smarter approach - match each post and extract date info
|
||||
// Let's use a different strategy: find each post object and inject the metadata
|
||||
|
||||
const postRegex = /(\{\s*slug:\s*"([^"]+)"[\s\S]*?publishDate:\s*"([^"]+)"[\s\S]*?dateModified:\s*"([^"]+)"[\s\S]*?authorName:\s*"([^"]+)"[\s\S]*?authorTitle:\s*"([^"]+)"[\s\S]*?content:\s*`<div class="blog-content">)/g;
|
||||
|
||||
let match;
|
||||
const replacements = [];
|
||||
|
||||
while ((match = postRegex.exec(content)) !== null) {
|
||||
const fullMatch = match[0];
|
||||
const slug = match[2];
|
||||
const publishDate = match[3];
|
||||
const dateModified = match[4];
|
||||
const authorName = match[5];
|
||||
const authorTitle = match[6];
|
||||
|
||||
const publishFormatted = formatDate(publishDate);
|
||||
const modifiedFormatted = formatDate(dateModified);
|
||||
|
||||
const metadataDiv = `<div class="post-metadata bg-blue-50 p-4 rounded-lg mb-8 border-l-4 border-blue-500">
|
||||
<p class="text-sm text-gray-700">
|
||||
<strong>Author:</strong> ${authorName}, ${authorTitle}<br/>
|
||||
📅 <strong>Published:</strong> ${publishFormatted} | <strong>Last updated:</strong> ${modifiedFormatted}
|
||||
</p>
|
||||
</div>
|
||||
`;
|
||||
|
||||
const replacement = fullMatch.replace(
|
||||
'<div class="blog-content">',
|
||||
`<div class="blog-content">
|
||||
${metadataDiv}`
|
||||
);
|
||||
|
||||
replacements.push({ original: fullMatch, replacement, slug });
|
||||
}
|
||||
|
||||
// Apply replacements in reverse order to maintain indices
|
||||
replacements.reverse().forEach(({ original, replacement }) => {
|
||||
content = content.replace(original, replacement);
|
||||
});
|
||||
|
||||
fs.writeFileSync(filePath, content, 'utf-8');
|
||||
console.log(`✅ Added metadata divs to ${replacements.length} posts`);
|
||||
replacements.forEach(r => console.log(` - ${r.slug}`));
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const filePath = path.join(__dirname, '../src/lib/blog-data.ts');
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
// Function to format date from ISO format
|
||||
function formatDate(isoDate) {
|
||||
const date = new Date(isoDate + 'T00:00:00Z');
|
||||
const months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'];
|
||||
return `${months[date.getUTCMonth()]} ${date.getUTCDate()}, ${date.getUTCFullYear()}`;
|
||||
}
|
||||
|
||||
// Replace each post's content to add metadata div
|
||||
content = content.replace(
|
||||
/content:\s*`<div class="blog-content">/g,
|
||||
(match) => {
|
||||
// We'll do a more sophisticated replacement with the post data
|
||||
return match;
|
||||
}
|
||||
);
|
||||
|
||||
// Actually, we need a smarter approach - match each post and extract date info
|
||||
// Let's use a different strategy: find each post object and inject the metadata
|
||||
|
||||
const postRegex = /(\{\s*slug:\s*"([^"]+)"[\s\S]*?publishDate:\s*"([^"]+)"[\s\S]*?dateModified:\s*"([^"]+)"[\s\S]*?authorName:\s*"([^"]+)"[\s\S]*?authorTitle:\s*"([^"]+)"[\s\S]*?content:\s*`<div class="blog-content">)/g;
|
||||
|
||||
let match;
|
||||
const replacements = [];
|
||||
|
||||
while ((match = postRegex.exec(content)) !== null) {
|
||||
const fullMatch = match[0];
|
||||
const slug = match[2];
|
||||
const publishDate = match[3];
|
||||
const dateModified = match[4];
|
||||
const authorName = match[5];
|
||||
const authorTitle = match[6];
|
||||
|
||||
const publishFormatted = formatDate(publishDate);
|
||||
const modifiedFormatted = formatDate(dateModified);
|
||||
|
||||
const metadataDiv = `<div class="post-metadata bg-blue-50 p-4 rounded-lg mb-8 border-l-4 border-blue-500">
|
||||
<p class="text-sm text-gray-700">
|
||||
<strong>Author:</strong> ${authorName}, ${authorTitle}<br/>
|
||||
📅 <strong>Published:</strong> ${publishFormatted} | <strong>Last updated:</strong> ${modifiedFormatted}
|
||||
</p>
|
||||
</div>
|
||||
`;
|
||||
|
||||
const replacement = fullMatch.replace(
|
||||
'<div class="blog-content">',
|
||||
`<div class="blog-content">
|
||||
${metadataDiv}`
|
||||
);
|
||||
|
||||
replacements.push({ original: fullMatch, replacement, slug });
|
||||
}
|
||||
|
||||
// Apply replacements in reverse order to maintain indices
|
||||
replacements.reverse().forEach(({ original, replacement }) => {
|
||||
content = content.replace(original, replacement);
|
||||
});
|
||||
|
||||
fs.writeFileSync(filePath, content, 'utf-8');
|
||||
console.log(`✅ Added metadata divs to ${replacements.length} posts`);
|
||||
replacements.forEach(r => console.log(` - ${r.slug}`));
|
||||
|
||||
274
scripts/build.js
274
scripts/build.js
@@ -1,137 +1,137 @@
|
||||
const { spawnSync } = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const repoRoot = path.resolve(__dirname, '..');
|
||||
const prismaSchemaPath = path.join(repoRoot, 'prisma', 'schema.prisma');
|
||||
const generatedSchemaPath = path.join(
|
||||
repoRoot,
|
||||
'node_modules',
|
||||
'.prisma',
|
||||
'client',
|
||||
'schema.prisma'
|
||||
);
|
||||
|
||||
function readFileIfExists(filePath) {
|
||||
try {
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
} catch (error) {
|
||||
if (error && error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeSchema(schema) {
|
||||
return schema.replace(/\s+/g, '');
|
||||
}
|
||||
|
||||
function schemasMatch() {
|
||||
const sourceSchema = readFileIfExists(prismaSchemaPath);
|
||||
const generatedSchema = readFileIfExists(generatedSchemaPath);
|
||||
|
||||
return Boolean(
|
||||
sourceSchema &&
|
||||
generatedSchema &&
|
||||
normalizeSchema(sourceSchema) === normalizeSchema(generatedSchema)
|
||||
);
|
||||
}
|
||||
|
||||
function run(command, args, options = {}) {
|
||||
const shouldUseShell =
|
||||
process.platform === 'win32' && command.toLowerCase().endsWith('.cmd');
|
||||
|
||||
const result = spawnSync(command, args, {
|
||||
cwd: repoRoot,
|
||||
encoding: 'utf8',
|
||||
stdio: 'pipe',
|
||||
shell: shouldUseShell,
|
||||
env: {
|
||||
...process.env,
|
||||
...options.env,
|
||||
},
|
||||
});
|
||||
|
||||
if (result.stdout) {
|
||||
process.stdout.write(result.stdout);
|
||||
}
|
||||
|
||||
if (result.stderr) {
|
||||
process.stderr.write(result.stderr);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function isWindowsPrismaRenameLock(output) {
|
||||
const text = [output.stdout, output.stderr]
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
|
||||
return (
|
||||
process.platform === 'win32' &&
|
||||
text.includes('EPERM: operation not permitted, rename') &&
|
||||
text.includes('query_engine-windows.dll.node')
|
||||
);
|
||||
}
|
||||
|
||||
function runPrismaGenerate() {
|
||||
const prismaBin =
|
||||
process.platform === 'win32'
|
||||
? path.join(repoRoot, 'node_modules', '.bin', 'prisma.cmd')
|
||||
: path.join(repoRoot, 'node_modules', '.bin', 'prisma');
|
||||
|
||||
const result = run(prismaBin, ['generate']);
|
||||
|
||||
if (result.error) {
|
||||
throw result.error;
|
||||
}
|
||||
|
||||
if ((result.status ?? 1) === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!isWindowsPrismaRenameLock(result) || !schemasMatch()) {
|
||||
return result.status ?? 1;
|
||||
}
|
||||
|
||||
console.warn(
|
||||
'\nPrisma generate hit a Windows file lock, but the generated client already matches prisma/schema.prisma. Continuing with the existing client.\n'
|
||||
);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
function runNextBuild() {
|
||||
const nextBin =
|
||||
process.platform === 'win32'
|
||||
? path.join(repoRoot, 'node_modules', '.bin', 'next.cmd')
|
||||
: path.join(repoRoot, 'node_modules', '.bin', 'next');
|
||||
|
||||
// WSL needs more aggressive memory settings
|
||||
const isWSL = process.platform === 'linux' && require('fs').existsSync('/proc/version') &&
|
||||
require('fs').readFileSync('/proc/version', 'utf8').toLowerCase().includes('microsoft');
|
||||
|
||||
const memoryLimit = isWSL ? '8192' : '4096';
|
||||
|
||||
return run(nextBin, ['build'], {
|
||||
env: {
|
||||
NODE_OPTIONS: `--max-old-space-size=${memoryLimit}`,
|
||||
SKIP_ENV_VALIDATION: 'true',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const prismaExitCode = runPrismaGenerate();
|
||||
if (prismaExitCode !== 0) {
|
||||
process.exit(prismaExitCode);
|
||||
}
|
||||
|
||||
const nextResult = runNextBuild();
|
||||
if (nextResult.error) {
|
||||
throw nextResult.error;
|
||||
}
|
||||
|
||||
process.exit(nextResult.status ?? 1);
|
||||
const { spawnSync } = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const repoRoot = path.resolve(__dirname, '..');
|
||||
const prismaSchemaPath = path.join(repoRoot, 'prisma', 'schema.prisma');
|
||||
const generatedSchemaPath = path.join(
|
||||
repoRoot,
|
||||
'node_modules',
|
||||
'.prisma',
|
||||
'client',
|
||||
'schema.prisma'
|
||||
);
|
||||
|
||||
function readFileIfExists(filePath) {
|
||||
try {
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
} catch (error) {
|
||||
if (error && error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeSchema(schema) {
|
||||
return schema.replace(/\s+/g, '');
|
||||
}
|
||||
|
||||
function schemasMatch() {
|
||||
const sourceSchema = readFileIfExists(prismaSchemaPath);
|
||||
const generatedSchema = readFileIfExists(generatedSchemaPath);
|
||||
|
||||
return Boolean(
|
||||
sourceSchema &&
|
||||
generatedSchema &&
|
||||
normalizeSchema(sourceSchema) === normalizeSchema(generatedSchema)
|
||||
);
|
||||
}
|
||||
|
||||
function run(command, args, options = {}) {
|
||||
const shouldUseShell =
|
||||
process.platform === 'win32' && command.toLowerCase().endsWith('.cmd');
|
||||
|
||||
const result = spawnSync(command, args, {
|
||||
cwd: repoRoot,
|
||||
encoding: 'utf8',
|
||||
stdio: 'pipe',
|
||||
shell: shouldUseShell,
|
||||
env: {
|
||||
...process.env,
|
||||
...options.env,
|
||||
},
|
||||
});
|
||||
|
||||
if (result.stdout) {
|
||||
process.stdout.write(result.stdout);
|
||||
}
|
||||
|
||||
if (result.stderr) {
|
||||
process.stderr.write(result.stderr);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function isWindowsPrismaRenameLock(output) {
|
||||
const text = [output.stdout, output.stderr]
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
|
||||
return (
|
||||
process.platform === 'win32' &&
|
||||
text.includes('EPERM: operation not permitted, rename') &&
|
||||
text.includes('query_engine-windows.dll.node')
|
||||
);
|
||||
}
|
||||
|
||||
function runPrismaGenerate() {
|
||||
const prismaBin =
|
||||
process.platform === 'win32'
|
||||
? path.join(repoRoot, 'node_modules', '.bin', 'prisma.cmd')
|
||||
: path.join(repoRoot, 'node_modules', '.bin', 'prisma');
|
||||
|
||||
const result = run(prismaBin, ['generate']);
|
||||
|
||||
if (result.error) {
|
||||
throw result.error;
|
||||
}
|
||||
|
||||
if ((result.status ?? 1) === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!isWindowsPrismaRenameLock(result) || !schemasMatch()) {
|
||||
return result.status ?? 1;
|
||||
}
|
||||
|
||||
console.warn(
|
||||
'\nPrisma generate hit a Windows file lock, but the generated client already matches prisma/schema.prisma. Continuing with the existing client.\n'
|
||||
);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
function runNextBuild() {
|
||||
const nextBin =
|
||||
process.platform === 'win32'
|
||||
? path.join(repoRoot, 'node_modules', '.bin', 'next.cmd')
|
||||
: path.join(repoRoot, 'node_modules', '.bin', 'next');
|
||||
|
||||
// WSL needs more aggressive memory settings
|
||||
const isWSL = process.platform === 'linux' && require('fs').existsSync('/proc/version') &&
|
||||
require('fs').readFileSync('/proc/version', 'utf8').toLowerCase().includes('microsoft');
|
||||
|
||||
const memoryLimit = isWSL ? '8192' : '4096';
|
||||
|
||||
return run(nextBin, ['build'], {
|
||||
env: {
|
||||
NODE_OPTIONS: `--max-old-space-size=${memoryLimit}`,
|
||||
SKIP_ENV_VALIDATION: 'true',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const prismaExitCode = runPrismaGenerate();
|
||||
if (prismaExitCode !== 0) {
|
||||
process.exit(prismaExitCode);
|
||||
}
|
||||
|
||||
const nextResult = runNextBuild();
|
||||
if (nextResult.error) {
|
||||
throw nextResult.error;
|
||||
}
|
||||
|
||||
process.exit(nextResult.status ?? 1);
|
||||
|
||||
@@ -1,46 +1,46 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const filePath = path.join(__dirname, '../src/lib/blog-data.ts');
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
// Fix the date formatting issue in metadata divs
|
||||
// Replace "undefined NaN, NaN" with proper formatted dates from the post data
|
||||
|
||||
const postRegex = /slug:\s*"([^"]+)"[\s\S]*?date:\s*"([^"]+)"[\s\S]*?updatedAt:\s*"([^"]+)"[\s\S]*?<div class="post-metadata[^>]*>[\s\S]*?<strong>Published:<\/strong>\s*[^|]*\s*\|\s*<strong>Last updated:<\/strong>\s*undefined NaN, NaN/gm;
|
||||
|
||||
let match;
|
||||
const replacements = [];
|
||||
|
||||
// First pass: collect all post slugs with their correct dates
|
||||
const postDatesRegex = /slug:\s*"([^"]+)"[\s\S]*?date:\s*"([^"]+)"[\s\S]*?updatedAt:\s*"([^"]+)"/gm;
|
||||
|
||||
while ((match = postDatesRegex.exec(content)) !== null) {
|
||||
const slug = match[1];
|
||||
const publishDate = match[2]; // e.g., "February 16, 2026"
|
||||
const updatedDate = match[3]; // e.g., "2026-01-26"
|
||||
|
||||
// Format the updated date
|
||||
const [year, month, day] = updatedDate.split('-');
|
||||
const months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'];
|
||||
const formattedUpdated = `${months[parseInt(month) - 1]} ${parseInt(day)}, ${year}`;
|
||||
|
||||
replacements.push({
|
||||
slug,
|
||||
publishDate,
|
||||
updatedDate: formattedUpdated
|
||||
});
|
||||
}
|
||||
|
||||
// Now replace the broken metadata divs
|
||||
replacements.forEach(({ slug, publishDate, updatedDate }) => {
|
||||
const pattern = new RegExp(
|
||||
`(<div class="post-metadata[^>]*>[\s\S]*?<strong>Published:<\/strong>\s*)${publishDate.replace(/[.*+?^${}()|[\]\\]/g, '\$&')}([\s\S]*?<strong>Last updated:<\/strong>\s*)undefined NaN, NaN`,
|
||||
'gm'
|
||||
);
|
||||
|
||||
content = content.replace(pattern, `$1${publishDate}$2${updatedDate}`);
|
||||
});
|
||||
|
||||
fs.writeFileSync(filePath, content, 'utf-8');
|
||||
console.log(`✅ Fixed date formatting in ${replacements.length} posts`);
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const filePath = path.join(__dirname, '../src/lib/blog-data.ts');
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
// Fix the date formatting issue in metadata divs
|
||||
// Replace "undefined NaN, NaN" with proper formatted dates from the post data
|
||||
|
||||
const postRegex = /slug:\s*"([^"]+)"[\s\S]*?date:\s*"([^"]+)"[\s\S]*?updatedAt:\s*"([^"]+)"[\s\S]*?<div class="post-metadata[^>]*>[\s\S]*?<strong>Published:<\/strong>\s*[^|]*\s*\|\s*<strong>Last updated:<\/strong>\s*undefined NaN, NaN/gm;
|
||||
|
||||
let match;
|
||||
const replacements = [];
|
||||
|
||||
// First pass: collect all post slugs with their correct dates
|
||||
const postDatesRegex = /slug:\s*"([^"]+)"[\s\S]*?date:\s*"([^"]+)"[\s\S]*?updatedAt:\s*"([^"]+)"/gm;
|
||||
|
||||
while ((match = postDatesRegex.exec(content)) !== null) {
|
||||
const slug = match[1];
|
||||
const publishDate = match[2]; // e.g., "February 16, 2026"
|
||||
const updatedDate = match[3]; // e.g., "2026-01-26"
|
||||
|
||||
// Format the updated date
|
||||
const [year, month, day] = updatedDate.split('-');
|
||||
const months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'];
|
||||
const formattedUpdated = `${months[parseInt(month) - 1]} ${parseInt(day)}, ${year}`;
|
||||
|
||||
replacements.push({
|
||||
slug,
|
||||
publishDate,
|
||||
updatedDate: formattedUpdated
|
||||
});
|
||||
}
|
||||
|
||||
// Now replace the broken metadata divs
|
||||
replacements.forEach(({ slug, publishDate, updatedDate }) => {
|
||||
const pattern = new RegExp(
|
||||
`(<div class="post-metadata[^>]*>[\s\S]*?<strong>Published:<\/strong>\s*)${publishDate.replace(/[.*+?^${}()|[\]\\]/g, '\$&')}([\s\S]*?<strong>Last updated:<\/strong>\s*)undefined NaN, NaN`,
|
||||
'gm'
|
||||
);
|
||||
|
||||
content = content.replace(pattern, `$1${publishDate}$2${updatedDate}`);
|
||||
});
|
||||
|
||||
fs.writeFileSync(filePath, content, 'utf-8');
|
||||
console.log(`✅ Fixed date formatting in ${replacements.length} posts`);
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const filePath = path.join(__dirname, '../src/lib/blog-data.ts');
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
// Remove the draft note from qr-code-scan-statistics-2026
|
||||
const draftNotePattern = /<p><em>Note: I'm not browsing live sources[\s\S]*?before publishing.*?replace the placeholder sections below with your numbers \+ citations\.<\/em><\/p>/gm;
|
||||
|
||||
const originalLength = content.length;
|
||||
content = content.replace(draftNotePattern, '');
|
||||
const newLength = content.length;
|
||||
|
||||
fs.writeFileSync(filePath, content, 'utf-8');
|
||||
|
||||
if (originalLength > newLength) {
|
||||
console.log(`✅ Removed draft note from qr-code-scan-statistics-2026 (${originalLength - newLength} bytes deleted)`);
|
||||
} else {
|
||||
console.log('⚠️ Draft note not found or already removed');
|
||||
}
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const filePath = path.join(__dirname, '../src/lib/blog-data.ts');
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
|
||||
// Remove the draft note from qr-code-scan-statistics-2026
|
||||
const draftNotePattern = /<p><em>Note: I'm not browsing live sources[\s\S]*?before publishing.*?replace the placeholder sections below with your numbers \+ citations\.<\/em><\/p>/gm;
|
||||
|
||||
const originalLength = content.length;
|
||||
content = content.replace(draftNotePattern, '');
|
||||
const newLength = content.length;
|
||||
|
||||
fs.writeFileSync(filePath, content, 'utf-8');
|
||||
|
||||
if (originalLength > newLength) {
|
||||
console.log(`✅ Removed draft note from qr-code-scan-statistics-2026 (${originalLength - newLength} bytes deleted)`);
|
||||
} else {
|
||||
console.log('⚠️ Draft note not found or already removed');
|
||||
}
|
||||
|
||||
@@ -1,137 +1,137 @@
|
||||
# QR Master - Quick Setup Script (PowerShell for Windows)
|
||||
# This script automates the initial setup process
|
||||
|
||||
Write-Host "🚀 QR Master - Quick Setup" -ForegroundColor Cyan
|
||||
Write-Host "================================" -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Check if Docker is installed
|
||||
try {
|
||||
docker --version | Out-Null
|
||||
Write-Host "✓ Docker is installed" -ForegroundColor Green
|
||||
} catch {
|
||||
Write-Host "❌ Docker is not installed. Please install Docker Desktop first." -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if Docker Compose is installed
|
||||
try {
|
||||
docker-compose --version | Out-Null
|
||||
Write-Host "✓ Docker Compose is installed" -ForegroundColor Green
|
||||
} catch {
|
||||
Write-Host "❌ Docker Compose is not installed. Please install Docker Desktop first." -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
|
||||
# Check if .env exists
|
||||
if (-Not (Test-Path .env)) {
|
||||
Write-Host "📝 Creating .env file from template..." -ForegroundColor Yellow
|
||||
Copy-Item env.example .env
|
||||
|
||||
# Generate secrets
|
||||
$NEXTAUTH_SECRET = [Convert]::ToBase64String((1..32 | ForEach-Object { Get-Random -Maximum 256 }))
|
||||
$IP_SALT = [Convert]::ToBase64String((1..32 | ForEach-Object { Get-Random -Maximum 256 }))
|
||||
|
||||
# Update .env with generated secrets
|
||||
(Get-Content .env) -replace 'NEXTAUTH_SECRET=.*', "NEXTAUTH_SECRET=$NEXTAUTH_SECRET" | Set-Content .env
|
||||
(Get-Content .env) -replace 'IP_SALT=.*', "IP_SALT=$IP_SALT" | Set-Content .env
|
||||
|
||||
Write-Host "✓ Generated secure secrets" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "✓ .env file already exists" -ForegroundColor Green
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
|
||||
# Ask user what mode they want
|
||||
Write-Host "Choose setup mode:"
|
||||
Write-Host "1) Development (database only in Docker, app on host)"
|
||||
Write-Host "2) Production (full stack in Docker)"
|
||||
$choice = Read-Host "Enter choice [1-2]"
|
||||
|
||||
Write-Host ""
|
||||
|
||||
switch ($choice) {
|
||||
"1" {
|
||||
Write-Host "🔧 Setting up development environment..." -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Start database services
|
||||
Write-Host "Starting PostgreSQL and Redis..."
|
||||
docker-compose -f docker-compose.dev.yml up -d
|
||||
|
||||
# Wait for database to be ready
|
||||
Write-Host "Waiting for database to be ready..."
|
||||
Start-Sleep -Seconds 5
|
||||
|
||||
# Install dependencies
|
||||
Write-Host "Installing dependencies..."
|
||||
npm install
|
||||
|
||||
# Run migrations
|
||||
Write-Host "Running database migrations..."
|
||||
npm run db:migrate
|
||||
|
||||
# Seed database
|
||||
Write-Host "Seeding database with demo data..."
|
||||
npm run db:seed
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "✅ Development environment ready!" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
Write-Host "To start the application:"
|
||||
Write-Host " npm run dev"
|
||||
Write-Host ""
|
||||
Write-Host "Access points:"
|
||||
Write-Host " - App: http://localhost:3050"
|
||||
Write-Host " - Database UI: http://localhost:8080"
|
||||
Write-Host " - Database: localhost:5435"
|
||||
Write-Host " - Redis: localhost:6379"
|
||||
}
|
||||
"2" {
|
||||
Write-Host "🚀 Setting up production environment..." -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Build and start all services
|
||||
Write-Host "Building and starting all services..."
|
||||
docker-compose up -d --build
|
||||
|
||||
# Wait for services to be ready
|
||||
Write-Host "Waiting for services to be ready..."
|
||||
Start-Sleep -Seconds 10
|
||||
|
||||
# Run migrations
|
||||
Write-Host "Running database migrations..."
|
||||
docker-compose exec web npx prisma migrate deploy
|
||||
|
||||
# Seed database
|
||||
Write-Host "Seeding database with demo data..."
|
||||
docker-compose exec web npm run db:seed
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "✅ Production environment ready!" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
Write-Host "Access points:"
|
||||
Write-Host " - App: http://localhost:3050"
|
||||
Write-Host " - Database: localhost:5435"
|
||||
Write-Host " - Redis: localhost:6379"
|
||||
Write-Host ""
|
||||
Write-Host "To view logs:"
|
||||
Write-Host " docker-compose logs -f"
|
||||
}
|
||||
default {
|
||||
Write-Host "❌ Invalid choice. Exiting." -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "📚 Documentation:"
|
||||
Write-Host " - Quick start: README.md"
|
||||
Write-Host " - Docker guide: DOCKER_SETUP.md"
|
||||
Write-Host " - Migration guide: MIGRATION_FROM_SUPABASE.md"
|
||||
Write-Host ""
|
||||
Write-Host "🎉 Setup complete! Happy coding!" -ForegroundColor Green
|
||||
|
||||
# QR Master - Quick Setup Script (PowerShell for Windows)
|
||||
# This script automates the initial setup process
|
||||
|
||||
Write-Host "🚀 QR Master - Quick Setup" -ForegroundColor Cyan
|
||||
Write-Host "================================" -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Check if Docker is installed
|
||||
try {
|
||||
docker --version | Out-Null
|
||||
Write-Host "✓ Docker is installed" -ForegroundColor Green
|
||||
} catch {
|
||||
Write-Host "❌ Docker is not installed. Please install Docker Desktop first." -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if Docker Compose is installed
|
||||
try {
|
||||
docker-compose --version | Out-Null
|
||||
Write-Host "✓ Docker Compose is installed" -ForegroundColor Green
|
||||
} catch {
|
||||
Write-Host "❌ Docker Compose is not installed. Please install Docker Desktop first." -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
|
||||
# Check if .env exists
|
||||
if (-Not (Test-Path .env)) {
|
||||
Write-Host "📝 Creating .env file from template..." -ForegroundColor Yellow
|
||||
Copy-Item env.example .env
|
||||
|
||||
# Generate secrets
|
||||
$NEXTAUTH_SECRET = [Convert]::ToBase64String((1..32 | ForEach-Object { Get-Random -Maximum 256 }))
|
||||
$IP_SALT = [Convert]::ToBase64String((1..32 | ForEach-Object { Get-Random -Maximum 256 }))
|
||||
|
||||
# Update .env with generated secrets
|
||||
(Get-Content .env) -replace 'NEXTAUTH_SECRET=.*', "NEXTAUTH_SECRET=$NEXTAUTH_SECRET" | Set-Content .env
|
||||
(Get-Content .env) -replace 'IP_SALT=.*', "IP_SALT=$IP_SALT" | Set-Content .env
|
||||
|
||||
Write-Host "✓ Generated secure secrets" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "✓ .env file already exists" -ForegroundColor Green
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
|
||||
# Ask user what mode they want
|
||||
Write-Host "Choose setup mode:"
|
||||
Write-Host "1) Development (database only in Docker, app on host)"
|
||||
Write-Host "2) Production (full stack in Docker)"
|
||||
$choice = Read-Host "Enter choice [1-2]"
|
||||
|
||||
Write-Host ""
|
||||
|
||||
switch ($choice) {
|
||||
"1" {
|
||||
Write-Host "🔧 Setting up development environment..." -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Start database services
|
||||
Write-Host "Starting PostgreSQL and Redis..."
|
||||
docker-compose -f docker-compose.dev.yml up -d
|
||||
|
||||
# Wait for database to be ready
|
||||
Write-Host "Waiting for database to be ready..."
|
||||
Start-Sleep -Seconds 5
|
||||
|
||||
# Install dependencies
|
||||
Write-Host "Installing dependencies..."
|
||||
npm install
|
||||
|
||||
# Run migrations
|
||||
Write-Host "Running database migrations..."
|
||||
npm run db:migrate
|
||||
|
||||
# Seed database
|
||||
Write-Host "Seeding database with demo data..."
|
||||
npm run db:seed
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "✅ Development environment ready!" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
Write-Host "To start the application:"
|
||||
Write-Host " npm run dev"
|
||||
Write-Host ""
|
||||
Write-Host "Access points:"
|
||||
Write-Host " - App: http://localhost:3050"
|
||||
Write-Host " - Database UI: http://localhost:8080"
|
||||
Write-Host " - Database: localhost:5435"
|
||||
Write-Host " - Redis: localhost:6379"
|
||||
}
|
||||
"2" {
|
||||
Write-Host "🚀 Setting up production environment..." -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Build and start all services
|
||||
Write-Host "Building and starting all services..."
|
||||
docker-compose up -d --build
|
||||
|
||||
# Wait for services to be ready
|
||||
Write-Host "Waiting for services to be ready..."
|
||||
Start-Sleep -Seconds 10
|
||||
|
||||
# Run migrations
|
||||
Write-Host "Running database migrations..."
|
||||
docker-compose exec web npx prisma migrate deploy
|
||||
|
||||
# Seed database
|
||||
Write-Host "Seeding database with demo data..."
|
||||
docker-compose exec web npm run db:seed
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "✅ Production environment ready!" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
Write-Host "Access points:"
|
||||
Write-Host " - App: http://localhost:3050"
|
||||
Write-Host " - Database: localhost:5435"
|
||||
Write-Host " - Redis: localhost:6379"
|
||||
Write-Host ""
|
||||
Write-Host "To view logs:"
|
||||
Write-Host " docker-compose logs -f"
|
||||
}
|
||||
default {
|
||||
Write-Host "❌ Invalid choice. Exiting." -ForegroundColor Red
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "📚 Documentation:"
|
||||
Write-Host " - Quick start: README.md"
|
||||
Write-Host " - Docker guide: DOCKER_SETUP.md"
|
||||
Write-Host " - Migration guide: MIGRATION_FROM_SUPABASE.md"
|
||||
Write-Host ""
|
||||
Write-Host "🎉 Setup complete! Happy coding!" -ForegroundColor Green
|
||||
|
||||
|
||||
296
scripts/setup.sh
296
scripts/setup.sh
@@ -1,148 +1,148 @@
|
||||
#!/bin/bash
|
||||
|
||||
# QR Master - Quick Setup Script
|
||||
# This script automates the initial setup process
|
||||
|
||||
set -e
|
||||
|
||||
echo "🚀 QR Master - Quick Setup"
|
||||
echo "================================"
|
||||
echo ""
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Check if Docker is installed
|
||||
if ! command -v docker &> /dev/null; then
|
||||
echo -e "${RED}❌ Docker is not installed. Please install Docker first.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if Docker Compose is installed
|
||||
if ! command -v docker-compose &> /dev/null; then
|
||||
echo -e "${RED}❌ Docker Compose is not installed. Please install Docker Compose first.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}✓${NC} Docker is installed"
|
||||
echo -e "${GREEN}✓${NC} Docker Compose is installed"
|
||||
echo ""
|
||||
|
||||
# Check if .env exists
|
||||
if [ ! -f .env ]; then
|
||||
echo "📝 Creating .env file from template..."
|
||||
cp env.example .env
|
||||
|
||||
# Generate secrets
|
||||
if command -v openssl &> /dev/null; then
|
||||
NEXTAUTH_SECRET=$(openssl rand -base64 32)
|
||||
IP_SALT=$(openssl rand -base64 32)
|
||||
|
||||
# Update .env with generated secrets
|
||||
sed -i.bak "s|NEXTAUTH_SECRET=.*|NEXTAUTH_SECRET=$NEXTAUTH_SECRET|" .env
|
||||
sed -i.bak "s|IP_SALT=.*|IP_SALT=$IP_SALT|" .env
|
||||
rm .env.bak 2>/dev/null || true
|
||||
|
||||
echo -e "${GREEN}✓${NC} Generated secure secrets"
|
||||
else
|
||||
echo -e "${YELLOW}⚠${NC} OpenSSL not found. Please manually update NEXTAUTH_SECRET and IP_SALT in .env"
|
||||
fi
|
||||
else
|
||||
echo -e "${GREEN}✓${NC} .env file already exists"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Ask user what mode they want
|
||||
echo "Choose setup mode:"
|
||||
echo "1) Development (database only in Docker, app on host)"
|
||||
echo "2) Production (full stack in Docker)"
|
||||
read -p "Enter choice [1-2]: " choice
|
||||
|
||||
echo ""
|
||||
|
||||
case $choice in
|
||||
1)
|
||||
echo "🔧 Setting up development environment..."
|
||||
echo ""
|
||||
|
||||
# Start database services
|
||||
echo "Starting PostgreSQL and Redis..."
|
||||
docker-compose -f docker-compose.dev.yml up -d
|
||||
|
||||
# Wait for database to be ready
|
||||
echo "Waiting for database to be ready..."
|
||||
sleep 5
|
||||
|
||||
# Install dependencies
|
||||
echo "Installing dependencies..."
|
||||
npm install
|
||||
|
||||
# Run migrations
|
||||
echo "Running database migrations..."
|
||||
npm run db:migrate
|
||||
|
||||
# Seed database
|
||||
echo "Seeding database with demo data..."
|
||||
npm run db:seed
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}✅ Development environment ready!${NC}"
|
||||
echo ""
|
||||
echo "To start the application:"
|
||||
echo " npm run dev"
|
||||
echo ""
|
||||
echo "Access points:"
|
||||
echo " - App: http://localhost:3050"
|
||||
echo " - Database UI: http://localhost:8080"
|
||||
echo " - Database: localhost:5435"
|
||||
echo " - Redis: localhost:6379"
|
||||
;;
|
||||
2)
|
||||
echo "🚀 Setting up production environment..."
|
||||
echo ""
|
||||
|
||||
# Build and start all services
|
||||
echo "Building and starting all services..."
|
||||
docker-compose up -d --build
|
||||
|
||||
# Wait for services to be ready
|
||||
echo "Waiting for services to be ready..."
|
||||
sleep 10
|
||||
|
||||
# Run migrations
|
||||
echo "Running database migrations..."
|
||||
docker-compose exec web npx prisma migrate deploy
|
||||
|
||||
# Seed database
|
||||
echo "Seeding database with demo data..."
|
||||
docker-compose exec web npm run db:seed
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}✅ Production environment ready!${NC}"
|
||||
echo ""
|
||||
echo "Access points:"
|
||||
echo " - App: http://localhost:3050"
|
||||
echo " - Database: localhost:5435"
|
||||
echo " - Redis: localhost:6379"
|
||||
echo ""
|
||||
echo "To view logs:"
|
||||
echo " docker-compose logs -f"
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Invalid choice. Exiting.${NC}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
echo "📚 Documentation:"
|
||||
echo " - Quick start: README.md"
|
||||
echo " - Docker guide: DOCKER_SETUP.md"
|
||||
echo " - Migration guide: MIGRATION_FROM_SUPABASE.md"
|
||||
echo ""
|
||||
echo "🎉 Setup complete! Happy coding!"
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
# QR Master - Quick Setup Script
|
||||
# This script automates the initial setup process
|
||||
|
||||
set -e
|
||||
|
||||
echo "🚀 QR Master - Quick Setup"
|
||||
echo "================================"
|
||||
echo ""
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Check if Docker is installed
|
||||
if ! command -v docker &> /dev/null; then
|
||||
echo -e "${RED}❌ Docker is not installed. Please install Docker first.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if Docker Compose is installed
|
||||
if ! command -v docker-compose &> /dev/null; then
|
||||
echo -e "${RED}❌ Docker Compose is not installed. Please install Docker Compose first.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}✓${NC} Docker is installed"
|
||||
echo -e "${GREEN}✓${NC} Docker Compose is installed"
|
||||
echo ""
|
||||
|
||||
# Check if .env exists
|
||||
if [ ! -f .env ]; then
|
||||
echo "📝 Creating .env file from template..."
|
||||
cp env.example .env
|
||||
|
||||
# Generate secrets
|
||||
if command -v openssl &> /dev/null; then
|
||||
NEXTAUTH_SECRET=$(openssl rand -base64 32)
|
||||
IP_SALT=$(openssl rand -base64 32)
|
||||
|
||||
# Update .env with generated secrets
|
||||
sed -i.bak "s|NEXTAUTH_SECRET=.*|NEXTAUTH_SECRET=$NEXTAUTH_SECRET|" .env
|
||||
sed -i.bak "s|IP_SALT=.*|IP_SALT=$IP_SALT|" .env
|
||||
rm .env.bak 2>/dev/null || true
|
||||
|
||||
echo -e "${GREEN}✓${NC} Generated secure secrets"
|
||||
else
|
||||
echo -e "${YELLOW}⚠${NC} OpenSSL not found. Please manually update NEXTAUTH_SECRET and IP_SALT in .env"
|
||||
fi
|
||||
else
|
||||
echo -e "${GREEN}✓${NC} .env file already exists"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Ask user what mode they want
|
||||
echo "Choose setup mode:"
|
||||
echo "1) Development (database only in Docker, app on host)"
|
||||
echo "2) Production (full stack in Docker)"
|
||||
read -p "Enter choice [1-2]: " choice
|
||||
|
||||
echo ""
|
||||
|
||||
case $choice in
|
||||
1)
|
||||
echo "🔧 Setting up development environment..."
|
||||
echo ""
|
||||
|
||||
# Start database services
|
||||
echo "Starting PostgreSQL and Redis..."
|
||||
docker-compose -f docker-compose.dev.yml up -d
|
||||
|
||||
# Wait for database to be ready
|
||||
echo "Waiting for database to be ready..."
|
||||
sleep 5
|
||||
|
||||
# Install dependencies
|
||||
echo "Installing dependencies..."
|
||||
npm install
|
||||
|
||||
# Run migrations
|
||||
echo "Running database migrations..."
|
||||
npm run db:migrate
|
||||
|
||||
# Seed database
|
||||
echo "Seeding database with demo data..."
|
||||
npm run db:seed
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}✅ Development environment ready!${NC}"
|
||||
echo ""
|
||||
echo "To start the application:"
|
||||
echo " npm run dev"
|
||||
echo ""
|
||||
echo "Access points:"
|
||||
echo " - App: http://localhost:3050"
|
||||
echo " - Database UI: http://localhost:8080"
|
||||
echo " - Database: localhost:5435"
|
||||
echo " - Redis: localhost:6379"
|
||||
;;
|
||||
2)
|
||||
echo "🚀 Setting up production environment..."
|
||||
echo ""
|
||||
|
||||
# Build and start all services
|
||||
echo "Building and starting all services..."
|
||||
docker-compose up -d --build
|
||||
|
||||
# Wait for services to be ready
|
||||
echo "Waiting for services to be ready..."
|
||||
sleep 10
|
||||
|
||||
# Run migrations
|
||||
echo "Running database migrations..."
|
||||
docker-compose exec web npx prisma migrate deploy
|
||||
|
||||
# Seed database
|
||||
echo "Seeding database with demo data..."
|
||||
docker-compose exec web npm run db:seed
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}✅ Production environment ready!${NC}"
|
||||
echo ""
|
||||
echo "Access points:"
|
||||
echo " - App: http://localhost:3050"
|
||||
echo " - Database: localhost:5435"
|
||||
echo " - Redis: localhost:6379"
|
||||
echo ""
|
||||
echo "To view logs:"
|
||||
echo " docker-compose logs -f"
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Invalid choice. Exiting.${NC}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
echo "📚 Documentation:"
|
||||
echo " - Quick start: README.md"
|
||||
echo " - Docker guide: DOCKER_SETUP.md"
|
||||
echo " - Migration guide: MIGRATION_FROM_SUPABASE.md"
|
||||
echo ""
|
||||
echo "🎉 Setup complete! Happy coding!"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user