Compare commits
2 Commits
b1c99893a6
...
c13eb331be
| Author | SHA1 | Date | |
|---|---|---|---|
| c13eb331be | |||
| 08483c7075 |
25
.env.example
25
.env.example
@@ -1,2 +1,23 @@
|
||||
REVENUECAT_WEBHOOK_SECRET = greenlens-rc-webhook-2026
|
||||
REVENUECAT_PRO_ENTITLEMENT_ID = pro
|
||||
SITE_DOMAIN=greenlenspro.com
|
||||
SITE_URL=https://greenlenspro.com
|
||||
|
||||
POSTGRES_DB=greenlns
|
||||
POSTGRES_USER=greenlns
|
||||
POSTGRES_PASSWORD=change-me
|
||||
DATABASE_URL=postgresql://greenlns:change-me@postgres:5432/greenlns
|
||||
|
||||
JWT_SECRET=change-me
|
||||
|
||||
MINIO_ACCESS_KEY=greenlns-minio
|
||||
MINIO_SECRET_KEY=change-me
|
||||
MINIO_BUCKET=plant-images
|
||||
MINIO_PUBLIC_URL=https://greenlenspro.com/storage
|
||||
|
||||
OPENAI_API_KEY=
|
||||
OPENAI_SCAN_MODEL=gpt-5-mini
|
||||
OPENAI_HEALTH_MODEL=gpt-5-mini
|
||||
|
||||
REVENUECAT_WEBHOOK_SECRET=
|
||||
REVENUECAT_PRO_ENTITLEMENT_ID=pro
|
||||
|
||||
PLANT_IMPORT_ADMIN_KEY=
|
||||
|
||||
11
.mcp.json
Normal file
11
.mcp.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"apify": {
|
||||
"type": "sse",
|
||||
"url": "https://mcp.apify.com/sse",
|
||||
"headers": {
|
||||
"Authorization": "Bearer apify_api_0D7RWI6eW1H9LETBuLY7PHNpAErxL72ua6lo"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
130
CLAUDE.md
130
CLAUDE.md
@@ -1,102 +1,92 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
This repository contains the GreenLens mobile app, the Express backend, and the self-hosted landing/deployment stack.
|
||||
|
||||
## Commands
|
||||
|
||||
### Mobile App (Expo)
|
||||
### Mobile app
|
||||
```bash
|
||||
npm install # Install dependencies
|
||||
npm run start # Start Expo dev server (offline mode)
|
||||
npm run android # Start on Android
|
||||
npm run ios # Start on iOS
|
||||
npm run test # Run Jest tests
|
||||
npm install
|
||||
npm run start
|
||||
npm run android
|
||||
npm run ios
|
||||
npm run test
|
||||
```
|
||||
|
||||
### Server (Express)
|
||||
### Backend
|
||||
```bash
|
||||
cd server
|
||||
npm install
|
||||
npm run start # Start Express server
|
||||
npm run rebuild:batches # Rebuild plant catalog from batch constants
|
||||
npm run diagnostics # Check duplicates and import audits
|
||||
npm run start
|
||||
npm run rebuild:batches
|
||||
npm run diagnostics
|
||||
```
|
||||
|
||||
### Production Builds (EAS)
|
||||
### Production iOS builds
|
||||
```bash
|
||||
npx eas-cli build:version:set -p ios # Bump iOS build number
|
||||
npx eas-cli build:version:set -p ios
|
||||
npx eas-cli build -p ios --profile production
|
||||
npx eas-cli submit -p ios --latest # Submit to TestFlight
|
||||
npx eas-cli submit -p ios --latest
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Mobile App
|
||||
Expo Router with file-based routing. Entry point is `app/_layout.tsx`.
|
||||
### Mobile app
|
||||
- Expo Router entrypoint: `app/_layout.tsx`
|
||||
- Global app state: `context/AppContext.tsx`
|
||||
- Backend HTTP client: `services/backend/backendApiClient.ts`
|
||||
- In-app fallback mock: `services/backend/mockBackendService.ts`
|
||||
|
||||
- **`app/(tabs)/`** — Tab navigation: Home (`index.tsx`), Search, Profile
|
||||
- **`app/scanner.tsx`** — Plant scan modal
|
||||
- **`app/lexicon.tsx`** — Plant encyclopedia
|
||||
- **`app/plant/`** — Plant detail screens
|
||||
- **`app/auth/`** — Login / Signup screens
|
||||
- **`app/onboarding.tsx`** — First-launch onboarding
|
||||
### Backend
|
||||
Single Express server in `server/index.js` with supporting modules in `server/lib/`:
|
||||
|
||||
Global state lives in `context/AppContext.tsx` (plants, user, billing, language).
|
||||
- `postgres.js` for PostgreSQL access
|
||||
- `sqlite.js` as a compatibility shim re-exporting the PostgreSQL layer
|
||||
- `plants.js` for plant catalog persistence and diagnostics
|
||||
- `auth.js` for JWT auth
|
||||
- `billing.js` for credits, RevenueCat, Stripe, and idempotency
|
||||
- `openai.js` for scan and health-check model calls
|
||||
- `storage.js` for MinIO/S3 object uploads
|
||||
|
||||
### Services Layer (Mobile)
|
||||
- `services/storageService.ts` — AsyncStorage persistence for user plants
|
||||
- `services/plantRecognitionService.ts` — Calls `/v1/scan` on backend
|
||||
- `services/plantDatabaseService.ts` — Local static plant data
|
||||
- `services/authService.ts` — JWT auth against backend
|
||||
- `services/backend/backendApiClient.ts` — HTTP client for all `/v1/*` calls
|
||||
- `services/backend/mockBackendService.ts` — In-app mock if `EXPO_PUBLIC_BACKEND_URL` is not set
|
||||
Primary backend environment variables:
|
||||
|
||||
### Backend (Express — `server/`)
|
||||
Single `server/index.js` with all routes. Libs in `server/lib/`:
|
||||
|
||||
- `sqlite.js` — SQLite wrapper (`openDatabase`, `run`, `get`, `all`)
|
||||
- `plants.js` — Plant catalog CRUD + semantic search
|
||||
- `auth.js` — JWT-based signup/login
|
||||
- `billing.js` — Credits, idempotency, Stripe webhooks
|
||||
- `openai.js` — Plant identification + health analysis via OpenAI
|
||||
- `storage.js` — MinIO/S3 image upload (`uploadImage`, `ensureStorageBucket`)
|
||||
|
||||
Key env vars for server:
|
||||
```
|
||||
PLANT_DB_PATH # SQLite file path (default: server/data/greenlns.sqlite)
|
||||
```bash
|
||||
DATABASE_URL
|
||||
POSTGRES_HOST
|
||||
POSTGRES_PORT
|
||||
POSTGRES_DB
|
||||
POSTGRES_USER
|
||||
POSTGRES_PASSWORD
|
||||
JWT_SECRET
|
||||
OPENAI_API_KEY
|
||||
STRIPE_SECRET_KEY
|
||||
JWT_SECRET
|
||||
MINIO_ENDPOINT / MINIO_ACCESS_KEY / MINIO_SECRET_KEY / MINIO_BUCKET / MINIO_PUBLIC_URL
|
||||
MINIO_ENDPOINT
|
||||
MINIO_ACCESS_KEY
|
||||
MINIO_SECRET_KEY
|
||||
MINIO_BUCKET
|
||||
MINIO_PUBLIC_URL
|
||||
```
|
||||
|
||||
### Landing Page (`greenlns-landing/`)
|
||||
Next.js 16 app with `output: 'standalone'` for Docker. Runs independently from the mobile app.
|
||||
### Landing and deployment
|
||||
`greenlns-landing/` is a Next.js 16 app built with `output: 'standalone'`.
|
||||
|
||||
Has its own `docker-compose.yml` that spins up:
|
||||
- Next.js app (Landing Page)
|
||||
- PostgreSQL 16 (persistent DB for the backend)
|
||||
- MinIO (persistent image storage)
|
||||
- Nginx (reverse proxy + SSL)
|
||||
The production-style stack lives in `greenlns-landing/docker-compose.yml` and includes:
|
||||
|
||||
### Infrastructure Plan
|
||||
**Current state:** Server runs on Railway with SQLite (ephemeral).
|
||||
- `caddy` for TLS and reverse proxy
|
||||
- `landing` for the Next.js app
|
||||
- `api` for the Express backend
|
||||
- `postgres` for persistent app data
|
||||
- `minio` for object storage
|
||||
|
||||
**Target state (not yet migrated):**
|
||||
- Express Server moves OFF Railway → runs on the landing page server via `docker-compose.yml`
|
||||
- PostgreSQL + MinIO replace SQLite + Railway hosting entirely
|
||||
`greenlns-landing/Caddyfile` routes:
|
||||
|
||||
**When migrating to PostgreSQL (do all of these together):**
|
||||
1. Remove `server/lib/sqlite.js` and `server/data/` entirely
|
||||
2. Remove Railway service for the Express server (no longer needed)
|
||||
3. Add `pg` package to `server/package.json`
|
||||
4. Replace all SQLite calls with `pg` and `DATABASE_URL` env var
|
||||
5. Change all SQL placeholders from `?` to `$1, $2, ...` (SQLite → PostgreSQL syntax)
|
||||
6. Add Express server as a service in `greenlens-landing/docker-compose.yml`
|
||||
7. Use `JSONB` columns in PostgreSQL for nested data (e.g. `careInfo`, `categories`) instead of serialized strings — enables fast querying and filtering directly on JSON fields
|
||||
- `/` to the landing app
|
||||
- `/api/*`, `/auth/*`, `/v1/*`, `/health`, `/plants/*` to the Express API
|
||||
- `/storage/*` to MinIO
|
||||
|
||||
### Key Patterns
|
||||
- SQL placeholders: SQLite uses `?`, PostgreSQL uses `$1, $2, ...` — important when migrating
|
||||
- Translations: `utils/translations.ts` supports `de` / `en` / `es`
|
||||
- Colors: `constants/Colors.ts` with light/dark mode tokens
|
||||
- Image URIs: App sends base64 to `/v1/upload/image`, gets back a public MinIO URL
|
||||
## Data model notes
|
||||
|
||||
- PostgreSQL is the source of truth for server persistence.
|
||||
- Nested plant metadata such as `categories` and `careInfo` uses `JSONB`.
|
||||
- Billing idempotency responses also use `JSONB`.
|
||||
- SQL placeholders use PostgreSQL syntax: `$1`, `$2`, ...
|
||||
|
||||
222
README.md
222
README.md
@@ -1,92 +1,184 @@
|
||||
# GreenLens
|
||||
|
||||
Expo app for plant scanning, care tracking, lexicon browsing, and profile settings.
|
||||
Expo app for plant scanning, care tracking, billing, and profile management, backed by an Express API.
|
||||
|
||||
## Run locally
|
||||
## App development
|
||||
|
||||
1. Install dependencies:
|
||||
- `npm install`
|
||||
2. Start Expo:
|
||||
- `npm run start`
|
||||
```bash
|
||||
npm install
|
||||
npm run start
|
||||
```
|
||||
|
||||
## iOS TestFlight (EAS)
|
||||
## Backend development
|
||||
|
||||
Use these three commands in order:
|
||||
The backend now targets PostgreSQL instead of SQLite.
|
||||
|
||||
1. Set iOS build number:
|
||||
- `npx eas-cli build:version:set -p ios`
|
||||
2. Create production iOS build:
|
||||
- `npx eas-cli build -p ios --profile production`
|
||||
3. Submit latest iOS build to TestFlight:
|
||||
- `npx eas-cli submit -p ios --latest`
|
||||
```bash
|
||||
cd server
|
||||
npm install
|
||||
npm run start
|
||||
```
|
||||
|
||||
## Lexicon SQLite maintenance
|
||||
Required backend environment:
|
||||
|
||||
The server now uses a persistent SQLite database (`server/data/greenlns.sqlite`) and supports validated rebuilds.
|
||||
- `DATABASE_URL` or `POSTGRES_HOST` + `POSTGRES_PORT` + `POSTGRES_DB` + `POSTGRES_USER` + `POSTGRES_PASSWORD`
|
||||
- `JWT_SECRET`
|
||||
|
||||
1. Install server dependencies:
|
||||
- `cd server && npm install`
|
||||
2. Run the server:
|
||||
- `npm run start`
|
||||
3. Rebuild plants from the local lexicon batch constants:
|
||||
- `npm run rebuild:batches`
|
||||
4. Check duplicates and import audits:
|
||||
- `npm run diagnostics`
|
||||
Optional integrations:
|
||||
|
||||
For protected rebuild endpoints, set `PLANT_IMPORT_ADMIN_KEY` and send `x-admin-key` in requests.
|
||||
- `OPENAI_API_KEY`
|
||||
- `REVENUECAT_WEBHOOK_SECRET`
|
||||
- `PLANT_IMPORT_ADMIN_KEY`
|
||||
- `MINIO_ENDPOINT`
|
||||
- `MINIO_ACCESS_KEY`
|
||||
- `MINIO_SECRET_KEY`
|
||||
- `MINIO_BUCKET`
|
||||
- `MINIO_PUBLIC_URL`
|
||||
|
||||
### Local plant images
|
||||
## Docker Compose
|
||||
|
||||
The lexicon now supports storing plant image paths in SQLite as local public paths instead of external URLs.
|
||||
For backend-only local infrastructure use [docker-compose.yml](/abs/path/C:/Users/a931627/Documents/apps/GreenLns/docker-compose.yml).
|
||||
|
||||
Recommended structure:
|
||||
For the production-style self-hosted stack with landing page, Caddy, API, PostgreSQL, and MinIO use [greenlns-landing/docker-compose.yml](/abs/path/C:/Users/a931627/Documents/apps/GreenLns/greenlns-landing/docker-compose.yml).
|
||||
|
||||
- Database field: `imageUri`
|
||||
- Value example: `/plants/monstera-deliciosa.webp`
|
||||
- File location on disk: `server/public/plants/monstera-deliciosa.webp`
|
||||
## Server deployment
|
||||
|
||||
Notes:
|
||||
Run the commands in this section from the repo root on your server:
|
||||
|
||||
- The Express server serves `server/public/plants` at `/plants/*`.
|
||||
- Remote `https://...` image URLs still work, so migration can be incremental.
|
||||
- Keep the database focused on metadata and store only the image path, not binary blobs.
|
||||
```bash
|
||||
cd /path/to/GreenLns
|
||||
```
|
||||
|
||||
## Billing and backend simulation
|
||||
Example:
|
||||
|
||||
The app now uses a backend API contract for paid AI features:
|
||||
```bash
|
||||
cd /var/www/GreenLns
|
||||
```
|
||||
|
||||
- Scan AI (`/v1/scan`)
|
||||
- Semantic AI search (`/v1/search/semantic`)
|
||||
- Billing summary (`/v1/billing/summary`)
|
||||
- Health check AI (`/v1/health-check`)
|
||||
### 1. Prepare environment
|
||||
|
||||
The Node server in `server/index.js` now implements these `/v1` routes directly and uses:
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
- `server/lib/openai.js` for OpenAI calls
|
||||
- `server/lib/billing.js` for credit/billing/idempotency state
|
||||
Then fill at least:
|
||||
|
||||
If `EXPO_PUBLIC_BACKEND_URL` is not set, the app uses an in-app mock backend simulation for `/v1/*` API calls.
|
||||
`EXPO_PUBLIC_PAYMENT_SERVER_URL` is used only for Stripe PaymentSheet calls (`/api/payment-sheet`).
|
||||
The in-app mock backend provides:
|
||||
- `SITE_DOMAIN`
|
||||
- `SITE_URL`
|
||||
- `POSTGRES_PASSWORD`
|
||||
- `JWT_SECRET`
|
||||
- `MINIO_SECRET_KEY`
|
||||
- optional: `OPENAI_API_KEY`, `REVENUECAT_*`
|
||||
|
||||
- Server-side style credit enforcement
|
||||
- Atomic `consumeCredit()` behavior
|
||||
- Idempotency-key handling
|
||||
- Free and Pro monthly credit buckets
|
||||
- Top-up purchase simulation
|
||||
- RevenueCat/Stripe webhook simulation
|
||||
### 2. Start the full production stack
|
||||
|
||||
This makes it possible to build UI and flow now, then replace mock endpoints with a real backend later.
|
||||
```bash
|
||||
docker compose up --build -d
|
||||
```
|
||||
|
||||
## Production integration notes
|
||||
When you run this from the repo root, Docker Compose uses [docker-compose.yml](/abs/path/C:/Users/a931627/Documents/apps/GreenLns/docker-compose.yml).
|
||||
|
||||
- Keep OpenAI keys only on the backend.
|
||||
- Use app-store billing via RevenueCat or StoreKit/Play Billing.
|
||||
- Forward entitlement updates to backend webhooks.
|
||||
- Enforce credits on backend only; app should only display UX quota.
|
||||
- Recommended backend env vars:
|
||||
- `OPENAI_API_KEY`
|
||||
- `OPENAI_SCAN_MODEL` (for example `gpt-5`)
|
||||
- `OPENAI_HEALTH_MODEL` (for example `gpt-5`)
|
||||
- `STRIPE_SECRET_KEY`
|
||||
- `STRIPE_PUBLISHABLE_KEY`
|
||||
What gets built:
|
||||
|
||||
- `landing` is built from `./greenlns-landing/Dockerfile`
|
||||
- `api` is built from `./server/Dockerfile`
|
||||
|
||||
What is not built locally, but pulled as ready-made images:
|
||||
|
||||
- `postgres` uses `postgres:16-alpine`
|
||||
- `minio` uses `minio/minio:latest`
|
||||
- `caddy` uses `caddy:2.8-alpine`
|
||||
|
||||
So yes: `docker compose up --build -d` builds the landing page container and the API container, and it starts PostgreSQL as a container. PostgreSQL is not "built" from your code, it is started from the official Postgres image.
|
||||
|
||||
This starts:
|
||||
|
||||
- `caddy`
|
||||
- `landing`
|
||||
- `api`
|
||||
- `postgres`
|
||||
- `minio`
|
||||
|
||||
### 3. Useful server commands
|
||||
|
||||
Check running containers:
|
||||
|
||||
```bash
|
||||
docker compose ps
|
||||
```
|
||||
|
||||
Follow all logs:
|
||||
|
||||
```bash
|
||||
docker compose logs -f
|
||||
```
|
||||
|
||||
Follow only API logs:
|
||||
|
||||
```bash
|
||||
docker compose logs -f api
|
||||
```
|
||||
|
||||
Follow only landing logs:
|
||||
|
||||
```bash
|
||||
docker compose logs -f landing
|
||||
```
|
||||
|
||||
Restart one service:
|
||||
|
||||
```bash
|
||||
docker compose restart api
|
||||
docker compose restart landing
|
||||
```
|
||||
|
||||
Rebuild and restart after code changes:
|
||||
|
||||
```bash
|
||||
docker compose up --build -d
|
||||
```
|
||||
|
||||
Stop the stack:
|
||||
|
||||
```bash
|
||||
docker compose down
|
||||
```
|
||||
|
||||
Stop the stack and remove volumes:
|
||||
|
||||
```bash
|
||||
docker compose down -v
|
||||
```
|
||||
|
||||
### 4. Health checks after deploy
|
||||
|
||||
```bash
|
||||
curl https://greenlenspro.com/health
|
||||
curl https://greenlenspro.com/
|
||||
curl https://greenlenspro.com/sitemap.xml
|
||||
```
|
||||
|
||||
### 5. Production compose file location
|
||||
|
||||
If you want to run the same stack from inside the landing directory instead:
|
||||
|
||||
```bash
|
||||
cd greenlns-landing
|
||||
docker compose up --build -d
|
||||
```
|
||||
|
||||
In that case Docker Compose uses [greenlns-landing/docker-compose.yml](/abs/path/C:/Users/a931627/Documents/apps/GreenLns/greenlns-landing/docker-compose.yml).
|
||||
|
||||
There, too:
|
||||
|
||||
- `landing` is built from `greenlns-landing/Dockerfile`
|
||||
- `api` is built from `../server/Dockerfile`
|
||||
- `postgres`, `minio`, and `caddy` are started from official images
|
||||
|
||||
## iOS TestFlight
|
||||
|
||||
```bash
|
||||
npx eas-cli build:version:set -p ios
|
||||
npx eas-cli build -p ios --profile production
|
||||
npx eas-cli submit -p ios --latest
|
||||
```
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const { closeDatabase, openDatabase } = require('../../server/lib/sqlite');
|
||||
const { ensurePlantSchema, getPlants, rebuildPlantsCatalog } = require('../../server/lib/plants');
|
||||
|
||||
const { closeDatabase, openDatabase, run } = require('../../server/lib/sqlite');
|
||||
const { ensurePlantSchema, getPlants } = require('../../server/lib/plants');
|
||||
const describeIfDatabase = process.env.DATABASE_URL ? describe : describe.skip;
|
||||
|
||||
describe('server plant search ranking', () => {
|
||||
describeIfDatabase('server plant search ranking', () => {
|
||||
let db;
|
||||
let dbPath;
|
||||
|
||||
beforeAll(async () => {
|
||||
dbPath = path.join(os.tmpdir(), `greenlns-search-${Date.now()}.sqlite`);
|
||||
db = await openDatabase(dbPath);
|
||||
db = await openDatabase();
|
||||
await ensurePlantSchema(db);
|
||||
|
||||
const entries = [
|
||||
@@ -83,44 +79,17 @@ describe('server plant search ranking', () => {
|
||||
},
|
||||
];
|
||||
|
||||
for (const entry of entries) {
|
||||
await run(
|
||||
db,
|
||||
`INSERT INTO plants (
|
||||
id,
|
||||
name,
|
||||
botanicalName,
|
||||
imageUri,
|
||||
imageStatus,
|
||||
description,
|
||||
categories,
|
||||
careInfo,
|
||||
confidence,
|
||||
createdAt,
|
||||
updatedAt
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))`,
|
||||
[
|
||||
entry.id,
|
||||
entry.name,
|
||||
entry.botanicalName,
|
||||
entry.imageUri,
|
||||
entry.imageStatus,
|
||||
entry.description,
|
||||
JSON.stringify(entry.categories),
|
||||
JSON.stringify(entry.careInfo),
|
||||
entry.confidence,
|
||||
],
|
||||
);
|
||||
}
|
||||
await rebuildPlantsCatalog(db, entries, {
|
||||
source: 'plantsSearch.test',
|
||||
preserveExistingIds: false,
|
||||
enforceUniqueImages: true,
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (db) {
|
||||
await closeDatabase(db);
|
||||
}
|
||||
if (dbPath && fs.existsSync(dbPath)) {
|
||||
fs.unlinkSync(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
it('returns exact common name matches first', async () => {
|
||||
|
||||
@@ -196,4 +196,81 @@ describe('mockBackendService billing simulation', () => {
|
||||
|
||||
expect(second.billing.credits.topupBalance).toBe(25);
|
||||
});
|
||||
|
||||
it('ignores malformed pro entitlements coming from top-up customer info', async () => {
|
||||
const response = await mockBackendService.syncRevenueCatState({
|
||||
userId: 'test-user-rc-topup-misconfigured-entitlement',
|
||||
source: 'topup_purchase',
|
||||
customerInfo: {
|
||||
entitlements: {
|
||||
active: {
|
||||
pro: {
|
||||
productIdentifier: 'topup_small',
|
||||
expirationDate: '2026-04-30T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
},
|
||||
nonSubscriptions: {
|
||||
topup_small: [
|
||||
{
|
||||
productIdentifier: 'topup_small',
|
||||
transactionIdentifier: 'rc-topup-malformed-1',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.billing.entitlement.plan).toBe('free');
|
||||
expect(response.billing.entitlement.status).toBe('inactive');
|
||||
expect(response.billing.credits.topupBalance).toBe(25);
|
||||
expect(response.billing.credits.available).toBe(40);
|
||||
});
|
||||
|
||||
it('does not downgrade an existing pro user during a top-up sync', async () => {
|
||||
const userId = 'test-user-rc-pro-topup';
|
||||
|
||||
await mockBackendService.syncRevenueCatState({
|
||||
userId,
|
||||
source: 'subscription_purchase',
|
||||
customerInfo: {
|
||||
entitlements: {
|
||||
active: {
|
||||
pro: {
|
||||
productIdentifier: 'monthly_pro',
|
||||
expirationDate: '2026-04-30T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
},
|
||||
nonSubscriptions: {},
|
||||
},
|
||||
});
|
||||
|
||||
const response = await mockBackendService.syncRevenueCatState({
|
||||
userId,
|
||||
source: 'topup_purchase',
|
||||
customerInfo: {
|
||||
entitlements: {
|
||||
active: {
|
||||
pro: {
|
||||
productIdentifier: 'topup_small',
|
||||
expirationDate: '2026-04-30T00:00:00.000Z',
|
||||
},
|
||||
},
|
||||
},
|
||||
nonSubscriptions: {
|
||||
topup_small: [
|
||||
{
|
||||
productIdentifier: 'topup_small',
|
||||
transactionIdentifier: 'rc-topup-pro-1',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.billing.entitlement.plan).toBe('pro');
|
||||
expect(response.billing.credits.available).toBe(275);
|
||||
expect(response.billing.credits.topupBalance).toBe(25);
|
||||
});
|
||||
});
|
||||
|
||||
2
app.json
2
app.json
@@ -2,7 +2,7 @@
|
||||
"expo": {
|
||||
"name": "GreenLens",
|
||||
"slug": "greenlens",
|
||||
"version": "2.1.5",
|
||||
"version": "2.1.6",
|
||||
"orientation": "portrait",
|
||||
"icon": "./assets/icon.png",
|
||||
"userInterfaceStyle": "automatic",
|
||||
|
||||
@@ -94,7 +94,7 @@ function RootLayoutInner() {
|
||||
await Purchases.logIn(session.serverUserId);
|
||||
const customerInfo = await Purchases.getCustomerInfo();
|
||||
if (!cancelled) {
|
||||
await syncRevenueCatState(customerInfo as any);
|
||||
await syncRevenueCatState(customerInfo as any, 'app_init');
|
||||
}
|
||||
} else {
|
||||
await Purchases.logOut();
|
||||
|
||||
@@ -4,13 +4,69 @@ import { SafeAreaView } from 'react-native-safe-area-context';
|
||||
import { Ionicons } from '@expo/vector-icons';
|
||||
import { useRouter } from 'expo-router';
|
||||
import Constants from 'expo-constants';
|
||||
import Purchases, { PRODUCT_CATEGORY } from 'react-native-purchases';
|
||||
import Purchases, {
|
||||
PACKAGE_TYPE,
|
||||
PRODUCT_CATEGORY,
|
||||
PurchasesOffering,
|
||||
PurchasesPackage,
|
||||
PurchasesStoreProduct,
|
||||
} from 'react-native-purchases';
|
||||
import { useApp } from '../../context/AppContext';
|
||||
import { useColors } from '../../constants/Colors';
|
||||
import { ThemeBackdrop } from '../../components/ThemeBackdrop';
|
||||
import { Language } from '../../types';
|
||||
import { PurchaseProductId } from '../../services/backend/contracts';
|
||||
|
||||
type SubscriptionProductId = 'monthly_pro' | 'yearly_pro';
|
||||
type TopupProductId = Extract<PurchaseProductId, 'topup_small' | 'topup_medium' | 'topup_large'>;
|
||||
type SubscriptionPackages = Partial<Record<SubscriptionProductId, PurchasesPackage>>;
|
||||
type TopupProducts = Partial<Record<TopupProductId, PurchasesStoreProduct>>;
|
||||
|
||||
const isMatchingPackage = (
|
||||
pkg: PurchasesPackage,
|
||||
productId: SubscriptionProductId,
|
||||
expectedPackageType: PACKAGE_TYPE,
|
||||
) => {
|
||||
return (
|
||||
pkg.product.identifier === productId
|
||||
|| pkg.identifier === productId
|
||||
|| pkg.packageType === expectedPackageType
|
||||
);
|
||||
};
|
||||
|
||||
const resolveSubscriptionPackages = (offering: PurchasesOffering | null): SubscriptionPackages => {
|
||||
if (!offering) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const availablePackages = [
|
||||
offering.monthly,
|
||||
offering.annual,
|
||||
...offering.availablePackages,
|
||||
].filter((value): value is PurchasesPackage => Boolean(value));
|
||||
|
||||
return {
|
||||
monthly_pro: availablePackages.find((pkg) => isMatchingPackage(pkg, 'monthly_pro', PACKAGE_TYPE.MONTHLY)),
|
||||
yearly_pro: availablePackages.find((pkg) => isMatchingPackage(pkg, 'yearly_pro', PACKAGE_TYPE.ANNUAL)),
|
||||
};
|
||||
};
|
||||
|
||||
const summarizeOfferingPackages = (offering: PurchasesOffering | null) => {
|
||||
if (!offering) {
|
||||
return { identifier: null, packages: [] as Array<Record<string, string | null>> };
|
||||
}
|
||||
|
||||
return {
|
||||
identifier: offering.identifier,
|
||||
packages: offering.availablePackages.map((pkg) => ({
|
||||
identifier: pkg.identifier,
|
||||
packageType: pkg.packageType,
|
||||
productIdentifier: pkg.product.identifier,
|
||||
priceString: pkg.product.priceString,
|
||||
})),
|
||||
};
|
||||
};
|
||||
|
||||
const getBillingCopy = (language: Language) => {
|
||||
if (language === 'de') {
|
||||
return {
|
||||
@@ -161,8 +217,8 @@ export default function BillingScreen() {
|
||||
const [subModalVisible, setSubModalVisible] = useState(false);
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
const [storeReady, setStoreReady] = useState(isExpoGo);
|
||||
const [subscriptionPackages, setSubscriptionPackages] = useState<Partial<Record<'monthly_pro' | 'yearly_pro', any>>>({});
|
||||
const [topupProducts, setTopupProducts] = useState<Partial<Record<'topup_small' | 'topup_medium' | 'topup_large', any>>>({});
|
||||
const [subscriptionPackages, setSubscriptionPackages] = useState<SubscriptionPackages>({});
|
||||
const [topupProducts, setTopupProducts] = useState<TopupProducts>({});
|
||||
|
||||
// Cancel Flow State
|
||||
const [cancelStep, setCancelStep] = useState<'none' | 'survey' | 'offer'>('none');
|
||||
@@ -188,10 +244,12 @@ export default function BillingScreen() {
|
||||
if (cancelled) return;
|
||||
|
||||
const currentOffering = offerings.current;
|
||||
setSubscriptionPackages({
|
||||
monthly_pro: currentOffering?.monthly ?? undefined,
|
||||
yearly_pro: currentOffering?.annual ?? undefined,
|
||||
});
|
||||
const resolvedPackages = resolveSubscriptionPackages(currentOffering);
|
||||
if (!resolvedPackages.monthly_pro || !resolvedPackages.yearly_pro) {
|
||||
console.warn('[Billing] RevenueCat offering missing expected subscription packages', summarizeOfferingPackages(currentOffering));
|
||||
}
|
||||
|
||||
setSubscriptionPackages(resolvedPackages);
|
||||
|
||||
setTopupProducts({
|
||||
topup_small: topups.find((product) => product.identifier === 'topup_small'),
|
||||
@@ -244,13 +302,20 @@ export default function BillingScreen() {
|
||||
return;
|
||||
}
|
||||
const selectedPackage = productId === 'monthly_pro' ? monthlyPackage : yearlyPackage;
|
||||
const latestOffering = !selectedPackage
|
||||
? await Purchases.getOfferings().then((offerings) => offerings.current)
|
||||
: null;
|
||||
if (!selectedPackage) {
|
||||
console.warn('[Billing] Purchase blocked because subscription package was not resolved', {
|
||||
productId,
|
||||
offering: summarizeOfferingPackages(latestOffering),
|
||||
});
|
||||
throw new Error('Abo-Paket konnte nicht geladen werden. Bitte RevenueCat Offering prüfen.');
|
||||
}
|
||||
await Purchases.purchasePackage(selectedPackage);
|
||||
// Derive plan locally from RevenueCat — backend sync via webhook comes later (Step 3)
|
||||
const customerInfo = await Purchases.getCustomerInfo();
|
||||
await syncRevenueCatState(customerInfo as any);
|
||||
await syncRevenueCatState(customerInfo as any, 'subscription_purchase');
|
||||
} else {
|
||||
const selectedProduct = topupProducts[productId];
|
||||
if (!selectedProduct) {
|
||||
@@ -258,7 +323,7 @@ export default function BillingScreen() {
|
||||
}
|
||||
await Purchases.purchaseStoreProduct(selectedProduct);
|
||||
const customerInfo = await Purchases.getCustomerInfo();
|
||||
await syncRevenueCatState(customerInfo as any);
|
||||
await syncRevenueCatState(customerInfo as any, 'topup_purchase');
|
||||
}
|
||||
}
|
||||
setSubModalVisible(false);
|
||||
@@ -282,7 +347,7 @@ export default function BillingScreen() {
|
||||
try {
|
||||
if (!isExpoGo) {
|
||||
const customerInfo = await Purchases.restorePurchases();
|
||||
await syncRevenueCatState(customerInfo as any);
|
||||
await syncRevenueCatState(customerInfo as any, 'restore');
|
||||
}
|
||||
Alert.alert(copy.restorePurchases, '✓');
|
||||
} catch (e) {
|
||||
@@ -413,11 +478,11 @@ export default function BillingScreen() {
|
||||
</View>
|
||||
|
||||
<View style={[styles.legalLinksRow, { marginTop: 16 }]}>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlns-landing.vercel.app/privacy')}>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlenspro.com/privacy')}>
|
||||
<Text style={[styles.legalLink, { color: colors.primary }]}>Privacy Policy</Text>
|
||||
</TouchableOpacity>
|
||||
<Text style={[styles.legalSep, { color: colors.textMuted }]}> · </Text>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlns-landing.vercel.app/terms')}>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlenspro.com/terms')}>
|
||||
<Text style={[styles.legalLink, { color: colors.primary }]}>Terms of Use</Text>
|
||||
</TouchableOpacity>
|
||||
</View>
|
||||
@@ -467,11 +532,11 @@ export default function BillingScreen() {
|
||||
))}
|
||||
</View>
|
||||
<View style={[styles.legalLinksRow, { marginTop: 12 }]}>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlns-landing.vercel.app/privacy')}>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlenspro.com/privacy')}>
|
||||
<Text style={[styles.legalLink, { color: colors.primary }]}>Privacy Policy</Text>
|
||||
</TouchableOpacity>
|
||||
<Text style={[styles.legalSep, { color: colors.textMuted }]}> · </Text>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlns-landing.vercel.app/terms')}>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlenspro.com/terms')}>
|
||||
<Text style={[styles.legalLink, { color: colors.primary }]}>Terms of Use</Text>
|
||||
</TouchableOpacity>
|
||||
</View>
|
||||
@@ -526,7 +591,7 @@ export default function BillingScreen() {
|
||||
planId === 'pro' && { borderColor: colors.primary, backgroundColor: colors.primary + '10' }
|
||||
]}
|
||||
onPress={() => handlePurchase('monthly_pro')}
|
||||
disabled={isUpdating}
|
||||
disabled={isUpdating || !storeReady}
|
||||
>
|
||||
<View style={{ flex: 1 }}>
|
||||
<View style={styles.planHeaderRow}>
|
||||
@@ -582,11 +647,11 @@ export default function BillingScreen() {
|
||||
</TouchableOpacity>
|
||||
</View>
|
||||
<View style={styles.legalLinksRow}>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlns-landing.vercel.app/privacy')}>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlenspro.com/privacy')}>
|
||||
<Text style={[styles.legalLink, { color: colors.primary }]}>Privacy Policy</Text>
|
||||
</TouchableOpacity>
|
||||
<Text style={[styles.legalSep, { color: colors.textMuted }]}> · </Text>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlns-landing.vercel.app/terms')}>
|
||||
<TouchableOpacity onPress={() => Linking.openURL('https://greenlenspro.com/terms')}>
|
||||
<Text style={[styles.legalLink, { color: colors.primary }]}>Terms of Use</Text>
|
||||
</TouchableOpacity>
|
||||
</View>
|
||||
|
||||
@@ -121,7 +121,7 @@ export default function DataScreen() {
|
||||
text: copy.deleteActionBtn,
|
||||
style: 'destructive',
|
||||
onPress: async () => {
|
||||
// Future implementation: call backend to wipe user data, cancel active Stripe subscriptions
|
||||
// Future implementation: call backend to wipe user data and cancel active app subscriptions
|
||||
await signOut();
|
||||
router.replace('/onboarding');
|
||||
},
|
||||
|
||||
@@ -12,7 +12,14 @@ import {
|
||||
import { ImageCacheService } from '../services/imageCacheService';
|
||||
import { getTranslation } from '../utils/translations';
|
||||
import { backendApiClient } from '../services/backend/backendApiClient';
|
||||
import { BillingSummary, PurchaseProductId, RevenueCatCustomerInfo, SimulatedWebhookEvent } from '../services/backend/contracts';
|
||||
import {
|
||||
BillingSummary,
|
||||
PurchaseProductId,
|
||||
RevenueCatCustomerInfo,
|
||||
RevenueCatEntitlementInfo,
|
||||
RevenueCatSyncSource,
|
||||
SimulatedWebhookEvent,
|
||||
} from '../services/backend/contracts';
|
||||
import { createIdempotencyKey } from '../utils/idempotency';
|
||||
import { AuthService, AuthSession } from '../services/authService';
|
||||
import { PlantsDb, SettingsDb, LexiconHistoryDb, AppMetaDb } from '../services/database';
|
||||
@@ -43,7 +50,7 @@ interface AppState {
|
||||
updatePlant: (plant: Plant) => void;
|
||||
refreshPlants: () => void;
|
||||
refreshBillingSummary: () => Promise<void>;
|
||||
syncRevenueCatState: (customerInfo: RevenueCatCustomerInfo) => Promise<BillingSummary | null>;
|
||||
syncRevenueCatState: (customerInfo: RevenueCatCustomerInfo, source?: RevenueCatSyncSource) => Promise<BillingSummary | null>;
|
||||
simulatePurchase: (productId: PurchaseProductId) => Promise<void>;
|
||||
simulateWebhookEvent: (event: SimulatedWebhookEvent, payload?: { credits?: number }) => Promise<void>;
|
||||
getLexiconSearchHistory: () => string[];
|
||||
@@ -75,6 +82,42 @@ const isAppearanceMode = (v: string): v is AppearanceMode =>
|
||||
const isColorPalette = (v: string): v is ColorPalette =>
|
||||
v === 'forest' || v === 'ocean' || v === 'sunset' || v === 'mono';
|
||||
const isLanguage = (v: string): v is Language => v === 'de' || v === 'en' || v === 'es';
|
||||
const REVENUECAT_PRO_ENTITLEMENT_ID = (process.env.EXPO_PUBLIC_REVENUECAT_PRO_ENTITLEMENT_ID || 'pro').trim() || 'pro';
|
||||
const SUPPORTED_REVENUECAT_SUBSCRIPTION_PRODUCTS = new Set<PurchaseProductId>(['monthly_pro', 'yearly_pro']);
|
||||
|
||||
const summarizeRevenueCatCustomerInfo = (customerInfo: RevenueCatCustomerInfo) => {
|
||||
const activeEntitlements = customerInfo?.entitlements?.active || {};
|
||||
return {
|
||||
appUserId: customerInfo?.appUserId ?? null,
|
||||
originalAppUserId: customerInfo?.originalAppUserId ?? null,
|
||||
activeEntitlements: Object.entries(activeEntitlements).map(([id, entitlement]) => ({
|
||||
id,
|
||||
productIdentifier: entitlement?.productIdentifier ?? null,
|
||||
expirationDate: entitlement?.expirationDate || entitlement?.expiresDate || null,
|
||||
})),
|
||||
allPurchasedProductIdentifiers: customerInfo?.allPurchasedProductIdentifiers ?? [],
|
||||
nonSubscriptionTransactions: Object.values(customerInfo?.nonSubscriptions || {}).flatMap((entries) =>
|
||||
(Array.isArray(entries) ? entries : []).map((transaction) => ({
|
||||
productIdentifier: transaction?.productIdentifier ?? null,
|
||||
transactionIdentifier: transaction?.transactionIdentifier || transaction?.transactionId || null,
|
||||
}))),
|
||||
};
|
||||
};
|
||||
|
||||
const getValidProEntitlement = (customerInfo: RevenueCatCustomerInfo): RevenueCatEntitlementInfo | null => {
|
||||
const activeEntitlements = customerInfo?.entitlements?.active || {};
|
||||
const proEntitlement = activeEntitlements[REVENUECAT_PRO_ENTITLEMENT_ID];
|
||||
if (!proEntitlement) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (proEntitlement.productIdentifier && SUPPORTED_REVENUECAT_SUBSCRIPTION_PRODUCTS.has(proEntitlement.productIdentifier as PurchaseProductId)) {
|
||||
return proEntitlement;
|
||||
}
|
||||
|
||||
console.warn('[Billing] Ignoring unsupported RevenueCat pro entitlement during local sync', summarizeRevenueCatCustomerInfo(customerInfo));
|
||||
return null;
|
||||
};
|
||||
|
||||
const getDeviceLanguage = (): Language => {
|
||||
try {
|
||||
@@ -341,14 +384,25 @@ export const AppProvider: React.FC<{ children: React.ReactNode }> = ({ children
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const applyRevenueCatCustomerInfoLocally = useCallback((customerInfo: RevenueCatCustomerInfo) => {
|
||||
const entitlementId = (process.env.EXPO_PUBLIC_REVENUECAT_PRO_ENTITLEMENT_ID || 'pro').trim() || 'pro';
|
||||
const applyRevenueCatCustomerInfoLocally = useCallback((
|
||||
customerInfo: RevenueCatCustomerInfo,
|
||||
source: RevenueCatSyncSource = 'app_init',
|
||||
) => {
|
||||
if (source === 'topup_purchase') {
|
||||
return;
|
||||
}
|
||||
|
||||
const activeEntitlements = customerInfo?.entitlements?.active || {};
|
||||
const proEntitlement = activeEntitlements[entitlementId];
|
||||
const rawProEntitlement = activeEntitlements[REVENUECAT_PRO_ENTITLEMENT_ID];
|
||||
const proEntitlement = getValidProEntitlement(customerInfo);
|
||||
const isPro = Boolean(proEntitlement);
|
||||
|
||||
setBillingSummary((prev) => {
|
||||
if (!prev) return prev;
|
||||
if (!proEntitlement && rawProEntitlement) {
|
||||
return prev;
|
||||
}
|
||||
|
||||
return {
|
||||
...prev,
|
||||
entitlement: {
|
||||
@@ -362,10 +416,17 @@ export const AppProvider: React.FC<{ children: React.ReactNode }> = ({ children
|
||||
});
|
||||
}, []);
|
||||
|
||||
const syncRevenueCatState = useCallback(async (customerInfo: RevenueCatCustomerInfo) => {
|
||||
applyRevenueCatCustomerInfoLocally(customerInfo);
|
||||
const syncRevenueCatState = useCallback(async (
|
||||
customerInfo: RevenueCatCustomerInfo,
|
||||
source: RevenueCatSyncSource = 'app_init',
|
||||
) => {
|
||||
console.log('[Billing] Syncing RevenueCat customer info', {
|
||||
source,
|
||||
customerInfo: summarizeRevenueCatCustomerInfo(customerInfo),
|
||||
});
|
||||
applyRevenueCatCustomerInfoLocally(customerInfo, source);
|
||||
try {
|
||||
const response = await backendApiClient.syncRevenueCatState({ customerInfo });
|
||||
const response = await backendApiClient.syncRevenueCatState({ customerInfo, source });
|
||||
setBillingSummary(response.billing);
|
||||
return response.billing;
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,54 +1,108 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
server:
|
||||
build:
|
||||
context: ./server
|
||||
caddy:
|
||||
image: caddy:2.8-alpine
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "${PORT:-3005}:3000"
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
environment:
|
||||
SITE_DOMAIN: ${SITE_DOMAIN:-greenlenspro.com}
|
||||
volumes:
|
||||
- ./greenlns-landing/Caddyfile:/etc/caddy/Caddyfile:ro
|
||||
- caddy_data:/data
|
||||
- caddy_config:/config
|
||||
depends_on:
|
||||
landing:
|
||||
condition: service_started
|
||||
api:
|
||||
condition: service_healthy
|
||||
|
||||
landing:
|
||||
build:
|
||||
context: ./greenlns-landing
|
||||
dockerfile: Dockerfile
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
PORT: 3000
|
||||
PLANT_DB_PATH: /data/greenlns.sqlite
|
||||
NEXT_PUBLIC_SITE_URL: ${SITE_URL:-https://greenlenspro.com}
|
||||
healthcheck:
|
||||
test: ["CMD", "node", "-e", "fetch('http://127.0.0.1:3000').then((r)=>process.exit(r.ok?0:1)).catch(()=>process.exit(1))"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: server/Dockerfile
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
PORT: 3000
|
||||
DATABASE_URL: postgresql://${POSTGRES_USER:-greenlns}:${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required}@postgres:5432/${POSTGRES_DB:-greenlns}
|
||||
POSTGRES_HOST: postgres
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_DB: ${POSTGRES_DB:-greenlns}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-greenlns}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required}
|
||||
MINIO_ENDPOINT: minio
|
||||
MINIO_PORT: 9000
|
||||
MINIO_USE_SSL: "false"
|
||||
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY:-minioadmin}
|
||||
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY:-minioadmin123}
|
||||
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY:-greenlns-minio}
|
||||
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY:?MINIO_SECRET_KEY is required}
|
||||
MINIO_BUCKET: ${MINIO_BUCKET:-plant-images}
|
||||
# Public URL for MinIO — set this to your Railway MinIO public domain
|
||||
MINIO_PUBLIC_URL: ${MINIO_PUBLIC_URL:-http://localhost:9000}
|
||||
# App secrets (set via Railway env vars)
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
STRIPE_SECRET_KEY: ${STRIPE_SECRET_KEY}
|
||||
STRIPE_PUBLISHABLE_KEY: ${STRIPE_PUBLISHABLE_KEY}
|
||||
STRIPE_WEBHOOK_SECRET: ${STRIPE_WEBHOOK_SECRET}
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
PLANT_IMPORT_ADMIN_KEY: ${PLANT_IMPORT_ADMIN_KEY}
|
||||
volumes:
|
||||
- db_data:/data
|
||||
MINIO_PUBLIC_URL: ${MINIO_PUBLIC_URL:-https://greenlenspro.com/storage}
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
|
||||
OPENAI_SCAN_MODEL: ${OPENAI_SCAN_MODEL:-gpt-5-mini}
|
||||
OPENAI_HEALTH_MODEL: ${OPENAI_HEALTH_MODEL:-gpt-5-mini}
|
||||
REVENUECAT_WEBHOOK_SECRET: ${REVENUECAT_WEBHOOK_SECRET:-}
|
||||
REVENUECAT_PRO_ENTITLEMENT_ID: ${REVENUECAT_PRO_ENTITLEMENT_ID:-pro}
|
||||
JWT_SECRET: ${JWT_SECRET:?JWT_SECRET is required}
|
||||
PLANT_IMPORT_ADMIN_KEY: ${PLANT_IMPORT_ADMIN_KEY:-}
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
minio:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "node", "-e", "fetch('http://127.0.0.1:3000/health').then((r)=>process.exit(r.ok?0:1)).catch(()=>process.exit(1))"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_DB: ${POSTGRES_DB:-greenlns}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-greenlns}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required}
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U $$POSTGRES_USER -d $$POSTGRES_DB"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
minio:
|
||||
image: minio/minio:latest
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MINIO_ROOT_USER: ${MINIO_ACCESS_KEY:-minioadmin}
|
||||
MINIO_ROOT_PASSWORD: ${MINIO_SECRET_KEY:-minioadmin123}
|
||||
MINIO_ROOT_USER: ${MINIO_ACCESS_KEY:-greenlns-minio}
|
||||
MINIO_ROOT_PASSWORD: ${MINIO_SECRET_KEY:?MINIO_SECRET_KEY is required}
|
||||
command: server /data
|
||||
volumes:
|
||||
- minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
db_data:
|
||||
caddy_config:
|
||||
caddy_data:
|
||||
minio_data:
|
||||
postgres_data:
|
||||
|
||||
8
eas.json
8
eas.json
@@ -13,8 +13,8 @@
|
||||
"distribution": "internal",
|
||||
"env": {
|
||||
"NPM_CONFIG_LEGACY_PEER_DEPS": "true",
|
||||
"EXPO_PUBLIC_BACKEND_URL": "https://imaginative-abundance-production-f785.up.railway.app",
|
||||
"EXPO_PUBLIC_PAYMENT_SERVER_URL": "https://imaginative-abundance-production-f785.up.railway.app",
|
||||
"EXPO_PUBLIC_BACKEND_URL": "https://greenlenspro.com",
|
||||
"EXPO_PUBLIC_PAYMENT_SERVER_URL": "https://greenlenspro.com",
|
||||
"EXPO_PUBLIC_STRIPE_PUBLISHABLE_KEY": "pk_live_51SHpSLJYShvDMH3vXGaCFTgSDBZmjLUuw12rcvZFPwxfdEK1zRGG5mXFTMs6vMkgp7Udj07eZPDTNijhQn29VYpe00gzX8pBKN",
|
||||
"EXPO_PUBLIC_REVENUECAT_IOS_API_KEY": "appl_hrjmLmIUUTojZygbsisNqQqrHbX"
|
||||
}
|
||||
@@ -23,8 +23,8 @@
|
||||
"node": "22.18.0",
|
||||
"env": {
|
||||
"NPM_CONFIG_LEGACY_PEER_DEPS": "true",
|
||||
"EXPO_PUBLIC_BACKEND_URL": "https://imaginative-abundance-production-f785.up.railway.app",
|
||||
"EXPO_PUBLIC_PAYMENT_SERVER_URL": "https://imaginative-abundance-production-f785.up.railway.app",
|
||||
"EXPO_PUBLIC_BACKEND_URL": "https://greenlenspro.com",
|
||||
"EXPO_PUBLIC_PAYMENT_SERVER_URL": "https://greenlenspro.com",
|
||||
"EXPO_PUBLIC_STRIPE_PUBLISHABLE_KEY": "pk_live_51SHpSLJYShvDMH3vXGaCFTgSDBZmjLUuw12rcvZFPwxfdEK1zRGG5mXFTMs6vMkgp7Udj07eZPDTNijhQn29VYpe00gzX8pBKN",
|
||||
"EXPO_PUBLIC_REVENUECAT_IOS_API_KEY": "appl_hrjmLmIUUTojZygbsisNqQqrHbX"
|
||||
}
|
||||
|
||||
18
greenlns-landing/Caddyfile
Normal file
18
greenlns-landing/Caddyfile
Normal file
@@ -0,0 +1,18 @@
|
||||
{$SITE_DOMAIN} {
|
||||
encode zstd gzip
|
||||
|
||||
@storage path /storage /storage/*
|
||||
handle @storage {
|
||||
uri strip_prefix /storage
|
||||
reverse_proxy minio:9000
|
||||
}
|
||||
|
||||
@api path /api /api/* /auth /auth/* /v1 /v1/* /health /plants /plants/*
|
||||
handle @api {
|
||||
reverse_proxy api:3000
|
||||
}
|
||||
|
||||
handle {
|
||||
reverse_proxy landing:3000
|
||||
}
|
||||
}
|
||||
@@ -1,36 +1,38 @@
|
||||
This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app).
|
||||
# GreenLens Landing
|
||||
|
||||
## Getting Started
|
||||
Self-hosted Next.js landing page for GreenLens. The production stack in this directory runs:
|
||||
|
||||
First, run the development server:
|
||||
- `caddy` for TLS and reverse proxy
|
||||
- `landing` for the Next.js standalone app
|
||||
- `api` for the Express backend from `../server`
|
||||
- `postgres` for persistent app data
|
||||
- `minio` for object storage under `/storage/*`
|
||||
|
||||
## Local development
|
||||
|
||||
```bash
|
||||
npm install
|
||||
npm run dev
|
||||
# or
|
||||
yarn dev
|
||||
# or
|
||||
pnpm dev
|
||||
# or
|
||||
bun dev
|
||||
```
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
|
||||
## Production stack
|
||||
|
||||
You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
|
||||
From `greenlns-landing/docker-compose.yml`:
|
||||
|
||||
This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel.
|
||||
```bash
|
||||
docker compose up --build -d
|
||||
```
|
||||
|
||||
## Learn More
|
||||
Required environment variables:
|
||||
|
||||
To learn more about Next.js, take a look at the following resources:
|
||||
- `SITE_DOMAIN`
|
||||
- `SITE_URL`
|
||||
- `POSTGRES_PASSWORD`
|
||||
- `JWT_SECRET`
|
||||
- `MINIO_SECRET_KEY`
|
||||
|
||||
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
|
||||
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
|
||||
Optional service secrets:
|
||||
|
||||
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome!
|
||||
|
||||
## Deploy on Vercel
|
||||
|
||||
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
|
||||
|
||||
Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details.
|
||||
- `OPENAI_API_KEY`
|
||||
- `REVENUECAT_WEBHOOK_SECRET`
|
||||
- `PLANT_IMPORT_ADMIN_KEY`
|
||||
|
||||
13
greenlns-landing/app/robots.ts
Normal file
13
greenlns-landing/app/robots.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { MetadataRoute } from 'next'
|
||||
|
||||
export default function robots(): MetadataRoute.Robots {
|
||||
const baseUrl = (process.env.NEXT_PUBLIC_SITE_URL || 'https://greenlenspro.com').trim()
|
||||
|
||||
return {
|
||||
rules: {
|
||||
userAgent: '*',
|
||||
allow: '/',
|
||||
},
|
||||
sitemap: `${baseUrl}/sitemap.xml`,
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { MetadataRoute } from 'next'
|
||||
|
||||
export default function sitemap(): MetadataRoute.Sitemap {
|
||||
const baseUrl = 'https://greenlns-landing.vercel.app'
|
||||
const baseUrl = (process.env.NEXT_PUBLIC_SITE_URL || 'https://greenlenspro.com').trim()
|
||||
|
||||
return [
|
||||
{
|
||||
@@ -22,5 +22,11 @@ export default function sitemap(): MetadataRoute.Sitemap {
|
||||
changeFrequency: 'monthly',
|
||||
priority: 0.3,
|
||||
},
|
||||
{
|
||||
url: `${baseUrl}/terms`,
|
||||
lastModified: new Date(),
|
||||
changeFrequency: 'monthly',
|
||||
priority: 0.3,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,60 +6,24 @@ import { siteConfig } from '@/lib/site'
|
||||
const CONTENT = {
|
||||
de: {
|
||||
title: 'Nutzungsbedingungen',
|
||||
intro:
|
||||
'Durch die Nutzung von GreenLens stimmst du diesen Nutzungsbedingungen zu. Bitte lies sie sorgfaeltig durch.',
|
||||
section1: '1. Leistungen',
|
||||
text1:
|
||||
'GreenLens bietet KI-gestuetzte Pflanzenidentifikation, Gesundheitsdiagnosen und Pflegeerinnerungen. Die App kann kostenlos genutzt werden; erweiterte Funktionen erfordern ein Abonnement oder Credits.',
|
||||
section2: '2. Abonnements und In-App-Kaeufe',
|
||||
text2:
|
||||
'GreenLens Pro ist ein automatisch erneuerbares Abonnement (monatlich oder jaehrlich). Es wird ueber deinen Apple-Account abgerechnet. Die Verlaengerung erfolgt automatisch, sofern du nicht mindestens 24 Stunden vor Ablauf des Abrechnungszeitraums kuendigst. Credits sind einmalige Kaeufe und nicht uebertragbar.',
|
||||
section3: '3. Kuendigung',
|
||||
text3:
|
||||
'Du kannst dein Abonnement jederzeit in den iPhone-Einstellungen unter deinem Apple-ID-Konto kuendigen. Nach der Kuendigung behast du den Zugriff bis zum Ende des bezahlten Zeitraums.',
|
||||
section4: '4. Haftungsausschluss',
|
||||
text4:
|
||||
'GreenLens stellt Informationen auf Basis von KI-Analysen bereit. Diese ersetzen keine professionelle Beratung. Wir uebernehmen keine Haftung fuer Schaeden, die durch die Nutzung der App entstehen.',
|
||||
section5: '5. Kontakt',
|
||||
text5: 'Bei Fragen zu diesen Nutzungsbedingungen erreichst du uns per E-Mail.',
|
||||
intro: 'Diese Bedingungen regeln die Nutzung von GreenLens und der dazugehoerigen Services.',
|
||||
section1: 'GreenLens wird als digitale App und Web-Service fuer Pflanzenscans, Informationen und accountbezogene Funktionen bereitgestellt.',
|
||||
section2: 'Vor dem Livegang muessen diese Bedingungen durch rechtlich gepruefte und vollstaendige Vertragstexte ersetzt werden.',
|
||||
contactLabel: 'Kontakt',
|
||||
},
|
||||
en: {
|
||||
title: 'Terms of Use',
|
||||
intro:
|
||||
'By using GreenLens, you agree to these Terms of Use. Please read them carefully.',
|
||||
section1: '1. Services',
|
||||
text1:
|
||||
'GreenLens provides AI-powered plant identification, health diagnosis, and care reminders. The app is free to use; advanced features require a subscription or credits.',
|
||||
section2: '2. Subscriptions and In-App Purchases',
|
||||
text2:
|
||||
'GreenLens Pro is an auto-renewable subscription (monthly or yearly). Payment is charged to your Apple Account. Your subscription automatically renews unless cancelled at least 24 hours before the end of the current billing period. Credits are one-time purchases and are non-transferable.',
|
||||
section3: '3. Cancellation',
|
||||
text3:
|
||||
'You can cancel your subscription at any time in iPhone Settings under your Apple ID account. After cancellation, you retain access until the end of the paid period.',
|
||||
section4: '4. Disclaimer',
|
||||
text4:
|
||||
'GreenLens provides information based on AI analysis. This does not replace professional advice. We accept no liability for damages arising from use of the app.',
|
||||
section5: '5. Contact',
|
||||
text5: 'If you have questions about these Terms of Use, contact us by email.',
|
||||
title: 'Terms of Service',
|
||||
intro: 'These terms govern the use of GreenLens and its related services.',
|
||||
section1: 'GreenLens is provided as a digital app and web service for plant scans, information, and account-related functionality.',
|
||||
section2: 'Before launch, replace this placeholder with legally reviewed and complete terms for your business.',
|
||||
contactLabel: 'Contact',
|
||||
},
|
||||
es: {
|
||||
title: 'Terminos de Uso',
|
||||
intro:
|
||||
'Al usar GreenLens, aceptas estos Terminos de Uso. Por favor, leelos detenidamente.',
|
||||
section1: '1. Servicios',
|
||||
text1:
|
||||
'GreenLens ofrece identificacion de plantas, diagnostico de salud y recordatorios de cuidado basados en IA. La app es gratuita; las funciones avanzadas requieren una suscripcion o creditos.',
|
||||
section2: '2. Suscripciones y Compras',
|
||||
text2:
|
||||
'GreenLens Pro es una suscripcion de renovacion automatica (mensual o anual). El pago se carga a tu cuenta de Apple. La suscripcion se renueva automaticamente salvo que la canceles al menos 24 horas antes del final del periodo actual. Los creditos son compras unicas y no son transferibles.',
|
||||
section3: '3. Cancelacion',
|
||||
text3:
|
||||
'Puedes cancelar tu suscripcion en cualquier momento en los Ajustes del iPhone bajo tu cuenta de Apple ID. Tras la cancelacion, conservas el acceso hasta el final del periodo pagado.',
|
||||
section4: '4. Exencion de responsabilidad',
|
||||
text4:
|
||||
'GreenLens proporciona informacion basada en analisis de IA. Esto no reemplaza el asesoramiento profesional. No aceptamos responsabilidad por danos derivados del uso de la app.',
|
||||
section5: '5. Contacto',
|
||||
text5: 'Si tienes preguntas sobre estos Terminos de Uso, contactanos por correo electronico.',
|
||||
title: 'Terminos del Servicio',
|
||||
intro: 'Estos terminos regulan el uso de GreenLens y sus servicios relacionados.',
|
||||
section1: 'GreenLens se ofrece como app y servicio web para escaneo de plantas, informacion y funciones de cuenta.',
|
||||
section2: 'Antes del lanzamiento, sustituye este texto por terminos completos revisados legalmente.',
|
||||
contactLabel: 'Contacto',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -72,18 +36,10 @@ export default function TermsPage() {
|
||||
<h1>{c.title}</h1>
|
||||
<div style={{ marginTop: '2rem', lineHeight: '1.8', opacity: 0.9 }}>
|
||||
<p>{c.intro}</p>
|
||||
<h2 style={{ marginTop: '1.5rem', fontSize: '1.25rem' }}>{c.section1}</h2>
|
||||
<p>{c.text1}</p>
|
||||
<h2 style={{ marginTop: '1.5rem', fontSize: '1.25rem' }}>{c.section2}</h2>
|
||||
<p>{c.text2}</p>
|
||||
<h2 style={{ marginTop: '1.5rem', fontSize: '1.25rem' }}>{c.section3}</h2>
|
||||
<p>{c.text3}</p>
|
||||
<h2 style={{ marginTop: '1.5rem', fontSize: '1.25rem' }}>{c.section4}</h2>
|
||||
<p>{c.text4}</p>
|
||||
<h2 style={{ marginTop: '1.5rem', fontSize: '1.25rem' }}>{c.section5}</h2>
|
||||
<p>{c.text5}</p>
|
||||
<p style={{ marginTop: '0.75rem' }}>
|
||||
<a href={`mailto:${siteConfig.legalEmail}`}>{siteConfig.legalEmail}</a>
|
||||
<p>{c.section1}</p>
|
||||
<p>{c.section2}</p>
|
||||
<p>
|
||||
<strong>{c.contactLabel}:</strong> <a href={`mailto:${siteConfig.legalEmail}`}>{siteConfig.legalEmail}</a>
|
||||
</p>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
@@ -1,66 +1,108 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
app:
|
||||
caddy:
|
||||
image: caddy:2.8-alpine
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
environment:
|
||||
SITE_DOMAIN: ${SITE_DOMAIN:-greenlenspro.com}
|
||||
volumes:
|
||||
- ./Caddyfile:/etc/caddy/Caddyfile:ro
|
||||
- caddy_data:/data
|
||||
- caddy_config:/config
|
||||
depends_on:
|
||||
landing:
|
||||
condition: service_started
|
||||
api:
|
||||
condition: service_healthy
|
||||
|
||||
landing:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
PORT: 3000
|
||||
NEXT_PUBLIC_SITE_URL: ${SITE_URL:-https://greenlenspro.com}
|
||||
healthcheck:
|
||||
test: ["CMD", "node", "-e", "fetch('http://127.0.0.1:3000').then((r)=>process.exit(r.ok?0:1)).catch(()=>process.exit(1))"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
api:
|
||||
build:
|
||||
context: ../server
|
||||
dockerfile: Dockerfile
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
PORT: 3000
|
||||
DATABASE_URL: postgresql://${POSTGRES_USER:-greenlns}:${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required}@postgres:5432/${POSTGRES_DB:-greenlns}
|
||||
POSTGRES_HOST: postgres
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_DB: ${POSTGRES_DB:-greenlns}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-greenlns}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required}
|
||||
MINIO_ENDPOINT: minio
|
||||
MINIO_PORT: 9000
|
||||
MINIO_USE_SSL: "false"
|
||||
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY:-greenlns-minio}
|
||||
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY:?MINIO_SECRET_KEY is required}
|
||||
MINIO_BUCKET: ${MINIO_BUCKET:-plant-images}
|
||||
MINIO_PUBLIC_URL: ${MINIO_PUBLIC_URL:-https://greenlenspro.com/storage}
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
|
||||
OPENAI_SCAN_MODEL: ${OPENAI_SCAN_MODEL:-gpt-5-mini}
|
||||
OPENAI_HEALTH_MODEL: ${OPENAI_HEALTH_MODEL:-gpt-5-mini}
|
||||
REVENUECAT_WEBHOOK_SECRET: ${REVENUECAT_WEBHOOK_SECRET:-}
|
||||
REVENUECAT_PRO_ENTITLEMENT_ID: ${REVENUECAT_PRO_ENTITLEMENT_ID:-pro}
|
||||
JWT_SECRET: ${JWT_SECRET:?JWT_SECRET is required}
|
||||
PLANT_IMPORT_ADMIN_KEY: ${PLANT_IMPORT_ADMIN_KEY:-}
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
minio:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "node", "-e", "fetch('http://127.0.0.1:3000/health').then((r)=>process.exit(r.ok?0:1)).catch(()=>process.exit(1))"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_DB: ${POSTGRES_DB:-GreenLens}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-GreenLens}
|
||||
POSTGRES_DB: ${POSTGRES_DB:-greenlns}
|
||||
POSTGRES_USER: ${POSTGRES_USER:-greenlns}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required}
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-GreenLens}"]
|
||||
test: ["CMD-SHELL", "pg_isready -U $$POSTGRES_USER -d $$POSTGRES_DB"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
# Expose to Railway/external — set firewall rules on your server!
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
minio:
|
||||
image: minio/minio:latest
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MINIO_ROOT_USER: ${MINIO_ACCESS_KEY:?MINIO_ACCESS_KEY is required}
|
||||
MINIO_ROOT_USER: ${MINIO_ACCESS_KEY:-greenlns-minio}
|
||||
MINIO_ROOT_PASSWORD: ${MINIO_SECRET_KEY:?MINIO_SECRET_KEY is required}
|
||||
command: server /data
|
||||
volumes:
|
||||
- minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
restart: unless-stopped
|
||||
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
volumes:
|
||||
- ./nginx/default.conf:/etc/nginx/conf.d/default.conf:ro
|
||||
- ./nginx/certs:/etc/nginx/certs:ro
|
||||
depends_on:
|
||||
- app
|
||||
- minio
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
caddy_config:
|
||||
caddy_data:
|
||||
postgres_data:
|
||||
minio_data:
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
const siteUrl = (process.env.NEXT_PUBLIC_SITE_URL || 'https://greenlenspro.com').trim()
|
||||
|
||||
export const siteConfig = {
|
||||
name: 'GreenLens',
|
||||
domain: 'https://greenlns-landing.vercel.app',
|
||||
domain: siteUrl,
|
||||
supportEmail: 'knuth.timo@gmail.com',
|
||||
legalEmail: 'knuth.timo@gmail.com',
|
||||
iosAppStoreUrl: '',
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import path from 'node:path'
|
||||
import type { NextConfig } from "next";
|
||||
|
||||
const nextConfig: NextConfig = {
|
||||
@@ -5,6 +6,9 @@ const nextConfig: NextConfig = {
|
||||
images: {
|
||||
unoptimized: true,
|
||||
},
|
||||
turbopack: {
|
||||
root: path.join(__dirname),
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
User-agent: *
|
||||
Allow: /
|
||||
|
||||
Sitemap: https://greenlns-landing.vercel.app/sitemap.xml
|
||||
@@ -2,10 +2,10 @@ FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --only=production
|
||||
COPY server/package*.json ./
|
||||
RUN npm ci --omit=dev
|
||||
|
||||
COPY . .
|
||||
COPY server/. .
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
|
||||
124
server/index.js
124
server/index.js
@@ -3,7 +3,6 @@ const path = require('path');
|
||||
const dotenv = require('dotenv');
|
||||
const express = require('express');
|
||||
const cors = require('cors');
|
||||
const Stripe = require('stripe');
|
||||
|
||||
const loadEnvFiles = (filePaths) => {
|
||||
const mergedFileEnv = {};
|
||||
@@ -26,7 +25,7 @@ loadEnvFiles([
|
||||
path.join(__dirname, '.env.local'),
|
||||
]);
|
||||
|
||||
const { closeDatabase, getDefaultDbPath, openDatabase, get, run } = require('./lib/sqlite');
|
||||
const { closeDatabase, getDefaultDbPath, openDatabase, get } = require('./lib/postgres');
|
||||
const { ensureAuthSchema, signUp: authSignUp, login: authLogin, issueToken, verifyJwt } = require('./lib/auth');
|
||||
const {
|
||||
PlantImportValidationError,
|
||||
@@ -58,33 +57,11 @@ const {
|
||||
isConfigured: isOpenAiConfigured,
|
||||
} = require('./lib/openai');
|
||||
const { applyCatalogGrounding, normalizeText } = require('./lib/scanGrounding');
|
||||
const { ensureStorageBucket, uploadImage, isStorageConfigured } = require('./lib/storage');
|
||||
const { ensureStorageBucketWithRetry, uploadImage, isStorageConfigured } = require('./lib/storage');
|
||||
|
||||
const app = express();
|
||||
const port = Number(process.env.PORT || 3000);
|
||||
const plantsPublicDir = path.join(__dirname, 'public', 'plants');
|
||||
const stripeSecretKey = (process.env.STRIPE_SECRET_KEY || '').trim();
|
||||
if (!stripeSecretKey) {
|
||||
console.error('STRIPE_SECRET_KEY is not set. Payment endpoints will fail.');
|
||||
}
|
||||
const stripe = new Stripe(stripeSecretKey || 'sk_test_placeholder_key_not_configured');
|
||||
|
||||
const resolveStripeModeFromKey = (key, livePrefix, testPrefix) => {
|
||||
const normalized = String(key || '').trim();
|
||||
if (normalized.startsWith(livePrefix)) return 'LIVE';
|
||||
if (normalized.startsWith(testPrefix)) return 'TEST';
|
||||
return 'MOCK';
|
||||
};
|
||||
|
||||
const getStripeSecretMode = () =>
|
||||
resolveStripeModeFromKey(process.env.STRIPE_SECRET_KEY, 'sk_live_', 'sk_test_');
|
||||
|
||||
const getStripePublishableMode = () =>
|
||||
resolveStripeModeFromKey(
|
||||
process.env.STRIPE_PUBLISHABLE_KEY || process.env.EXPO_PUBLIC_STRIPE_PUBLISHABLE_KEY,
|
||||
'pk_live_',
|
||||
'pk_test_',
|
||||
);
|
||||
|
||||
const SCAN_PRIMARY_COST = 1;
|
||||
const SCAN_REVIEW_COST = 1;
|
||||
@@ -323,35 +300,6 @@ const isAuthorizedRevenueCatWebhook = (request) => {
|
||||
return normalized === revenueCatWebhookSecret || normalized === `Bearer ${revenueCatWebhookSecret}`;
|
||||
};
|
||||
|
||||
// Webhooks must be BEFORE express.json() to preserve raw body where required.
|
||||
app.post('/api/webhook', express.raw({ type: 'application/json' }), (request, response) => {
|
||||
const signature = request.headers['stripe-signature'];
|
||||
let event;
|
||||
|
||||
try {
|
||||
event = stripe.webhooks.constructEvent(
|
||||
request.body,
|
||||
signature,
|
||||
process.env.STRIPE_WEBHOOK_SECRET,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(`Webhook Error: ${error.message}`);
|
||||
response.status(400).send(`Webhook Error: ${error.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
switch (event.type) {
|
||||
case 'payment_intent.succeeded':
|
||||
console.log('PaymentIntent succeeded.');
|
||||
break;
|
||||
default:
|
||||
console.log(`Unhandled event type: ${event.type}`);
|
||||
break;
|
||||
}
|
||||
|
||||
response.json({ received: true });
|
||||
});
|
||||
|
||||
app.post('/api/revenuecat/webhook', express.json({ limit: '1mb' }), async (request, response) => {
|
||||
try {
|
||||
if (!isAuthorizedRevenueCatWebhook(request)) {
|
||||
@@ -374,7 +322,6 @@ app.get('/', (_request, response) => {
|
||||
status: 'ok',
|
||||
endpoints: [
|
||||
'GET /health',
|
||||
'POST /api/payment-sheet',
|
||||
'GET /api/plants',
|
||||
'POST /api/plants/rebuild',
|
||||
'POST /auth/signup',
|
||||
@@ -392,18 +339,27 @@ app.get('/', (_request, response) => {
|
||||
});
|
||||
});
|
||||
|
||||
const getDatabaseHealthTarget = () => {
|
||||
const raw = getDefaultDbPath();
|
||||
if (!raw) return '';
|
||||
|
||||
try {
|
||||
const parsed = new URL(raw);
|
||||
const databaseName = parsed.pathname.replace(/^\//, '');
|
||||
return `${parsed.protocol}//${parsed.hostname}${parsed.port ? `:${parsed.port}` : ''}/${databaseName}`;
|
||||
} catch {
|
||||
return 'configured';
|
||||
}
|
||||
};
|
||||
|
||||
app.get('/health', (_request, response) => {
|
||||
const stripeSecret = (process.env.STRIPE_SECRET_KEY || '').trim();
|
||||
response.status(200).json({
|
||||
ok: true,
|
||||
uptimeSec: Math.round(process.uptime()),
|
||||
timestamp: new Date().toISOString(),
|
||||
openAiConfigured: isOpenAiConfigured(),
|
||||
dbReady: Boolean(db),
|
||||
dbPath: getDefaultDbPath(),
|
||||
stripeConfigured: Boolean(stripeSecret),
|
||||
stripeMode: getStripeSecretMode(),
|
||||
stripePublishableMode: getStripePublishableMode(),
|
||||
dbPath: getDatabaseHealthTarget(),
|
||||
scanModel: getScanModel(),
|
||||
healthModel: getHealthModel(),
|
||||
});
|
||||
@@ -467,42 +423,11 @@ app.post('/api/plants/rebuild', async (request, response) => {
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/payment-sheet', async (request, response) => {
|
||||
try {
|
||||
const amount = Number(request.body?.amount || 500);
|
||||
const currency = request.body?.currency || 'usd';
|
||||
|
||||
const paymentIntent = await stripe.paymentIntents.create({
|
||||
amount,
|
||||
currency,
|
||||
automatic_payment_methods: { enabled: true },
|
||||
});
|
||||
|
||||
const customer = await stripe.customers.create();
|
||||
const ephemeralKey = await stripe.ephemeralKeys.create(
|
||||
{ customer: customer.id },
|
||||
{ apiVersion: '2023-10-16' },
|
||||
);
|
||||
|
||||
response.json({
|
||||
paymentIntent: paymentIntent.client_secret,
|
||||
ephemeralKey: ephemeralKey.secret,
|
||||
customer: customer.id,
|
||||
publishableKey: process.env.STRIPE_PUBLISHABLE_KEY || process.env.EXPO_PUBLIC_STRIPE_PUBLISHABLE_KEY || 'pk_test_mock_key',
|
||||
});
|
||||
} catch (error) {
|
||||
response.status(400).json({
|
||||
code: 'PAYMENT_SHEET_ERROR',
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/v1/billing/summary', async (request, response) => {
|
||||
try {
|
||||
const userId = ensureRequestAuth(request);
|
||||
if (userId !== 'guest') {
|
||||
const userExists = await get(db, 'SELECT id FROM auth_users WHERE id = ?', [userId]);
|
||||
const userExists = await get(db, 'SELECT id FROM auth_users WHERE id = $1', [userId]);
|
||||
if (!userExists) {
|
||||
return response.status(401).json({ code: 'UNAUTHORIZED', message: 'User not found.' });
|
||||
}
|
||||
@@ -522,10 +447,11 @@ app.post('/v1/billing/sync-revenuecat', async (request, response) => {
|
||||
return response.status(400).json({ code: 'BAD_REQUEST', message: 'Guest users cannot sync RevenueCat state.' });
|
||||
}
|
||||
const customerInfo = request.body?.customerInfo;
|
||||
const source = typeof request.body?.source === 'string' ? request.body.source : undefined;
|
||||
if (!customerInfo || typeof customerInfo !== 'object' || !customerInfo.entitlements) {
|
||||
return response.status(400).json({ code: 'BAD_REQUEST', message: 'customerInfo is required.' });
|
||||
}
|
||||
const payload = await syncRevenueCatCustomerInfo(db, userId, customerInfo);
|
||||
const payload = await syncRevenueCatCustomerInfo(db, userId, customerInfo, { source });
|
||||
response.status(200).json(payload);
|
||||
} catch (error) {
|
||||
const payload = toApiErrorPayload(error);
|
||||
@@ -893,19 +819,9 @@ const start = async () => {
|
||||
await ensureAuthSchema(db);
|
||||
await seedBootstrapCatalogIfNeeded();
|
||||
if (isStorageConfigured()) {
|
||||
await ensureStorageBucket().catch((err) => console.warn('MinIO bucket setup failed:', err.message));
|
||||
await ensureStorageBucketWithRetry().catch((err) => console.warn('MinIO bucket setup failed:', err.message));
|
||||
}
|
||||
|
||||
const stripeMode = getStripeSecretMode();
|
||||
const stripePublishableMode = getStripePublishableMode();
|
||||
const maskKey = (key) => {
|
||||
const k = String(key || '').trim();
|
||||
if (k.length < 12) return k ? '(too short to mask)' : '(not set)';
|
||||
return `${k.slice(0, 7)}...${k.slice(-4)}`;
|
||||
};
|
||||
console.log(`Stripe Mode: ${stripeMode} | Secret: ${maskKey(process.env.STRIPE_SECRET_KEY)}`);
|
||||
console.log(`Stripe Publishable Mode: ${stripePublishableMode} | Key: ${maskKey(process.env.STRIPE_PUBLISHABLE_KEY || process.env.EXPO_PUBLIC_STRIPE_PUBLISHABLE_KEY)}`);
|
||||
|
||||
const server = app.listen(port, () => {
|
||||
console.log(`GreenLens server listening at http://localhost:${port}`);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const crypto = require('crypto');
|
||||
const { get, run } = require('./sqlite');
|
||||
const { get, run } = require('./postgres');
|
||||
|
||||
const JWT_SECRET = process.env.JWT_SECRET || 'greenlens-dev-secret-change-in-prod';
|
||||
const TOKEN_EXPIRY_SECONDS = 365 * 24 * 3600; // 1 year
|
||||
@@ -57,10 +57,10 @@ const ensureAuthSchema = async (db) => {
|
||||
db,
|
||||
`CREATE TABLE IF NOT EXISTS auth_users (
|
||||
id TEXT PRIMARY KEY,
|
||||
email TEXT NOT NULL UNIQUE COLLATE NOCASE,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL DEFAULT '',
|
||||
password_hash TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
)`,
|
||||
);
|
||||
};
|
||||
@@ -69,7 +69,7 @@ const ensureAuthSchema = async (db) => {
|
||||
|
||||
const signUp = async (db, email, name, password) => {
|
||||
const normalizedEmail = email.trim().toLowerCase();
|
||||
const existing = await get(db, 'SELECT id FROM auth_users WHERE email = ?', [normalizedEmail]);
|
||||
const existing = await get(db, 'SELECT id FROM auth_users WHERE LOWER(email) = LOWER($1)', [normalizedEmail]);
|
||||
if (existing) {
|
||||
const err = new Error('Email already in use.');
|
||||
err.code = 'EMAIL_TAKEN';
|
||||
@@ -77,7 +77,7 @@ const signUp = async (db, email, name, password) => {
|
||||
throw err;
|
||||
}
|
||||
const id = `usr_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 10)}`;
|
||||
await run(db, 'INSERT INTO auth_users (id, email, name, password_hash) VALUES (?, ?, ?, ?)', [
|
||||
await run(db, 'INSERT INTO auth_users (id, email, name, password_hash) VALUES ($1, $2, $3, $4)', [
|
||||
id,
|
||||
normalizedEmail,
|
||||
name.trim(),
|
||||
@@ -88,7 +88,11 @@ const signUp = async (db, email, name, password) => {
|
||||
|
||||
const login = async (db, email, password) => {
|
||||
const normalizedEmail = email.trim().toLowerCase();
|
||||
const user = await get(db, 'SELECT id, email, name, password_hash FROM auth_users WHERE email = ?', [normalizedEmail]);
|
||||
const user = await get(
|
||||
db,
|
||||
'SELECT id, email, name, password_hash FROM auth_users WHERE LOWER(email) = LOWER($1)',
|
||||
[normalizedEmail],
|
||||
);
|
||||
if (!user) {
|
||||
const err = new Error('No account found for this email.');
|
||||
err.code = 'USER_NOT_FOUND';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { get, run } = require('./sqlite');
|
||||
const { get, run } = require('./postgres');
|
||||
|
||||
const FREE_MONTHLY_CREDITS = 15;
|
||||
const PRO_MONTHLY_CREDITS = 250;
|
||||
@@ -51,18 +51,23 @@ const createInsufficientCreditsError = (required, available) => {
|
||||
};
|
||||
|
||||
const runInTransaction = async (db, worker) => {
|
||||
await run(db, 'BEGIN IMMEDIATE TRANSACTION');
|
||||
const client = typeof db.connect === 'function' ? await db.connect() : db;
|
||||
const release = typeof client.release === 'function' ? () => client.release() : () => {};
|
||||
|
||||
await run(client, 'BEGIN');
|
||||
try {
|
||||
const result = await worker();
|
||||
await run(db, 'COMMIT');
|
||||
const result = await worker(client);
|
||||
await run(client, 'COMMIT');
|
||||
return result;
|
||||
} catch (error) {
|
||||
try {
|
||||
await run(db, 'ROLLBACK');
|
||||
await run(client, 'ROLLBACK');
|
||||
} catch (rollbackError) {
|
||||
console.error('Failed to rollback SQLite transaction.', rollbackError);
|
||||
console.error('Failed to rollback billing transaction.', rollbackError);
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
release();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -137,27 +142,27 @@ const upsertAccount = async (db, account) => {
|
||||
await run(
|
||||
db,
|
||||
`INSERT INTO billing_accounts (
|
||||
userId,
|
||||
user_id,
|
||||
plan,
|
||||
provider,
|
||||
cycleStartedAt,
|
||||
cycleEndsAt,
|
||||
monthlyAllowance,
|
||||
usedThisCycle,
|
||||
topupBalance,
|
||||
renewsAt,
|
||||
updatedAt
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(userId) DO UPDATE SET
|
||||
plan = excluded.plan,
|
||||
provider = excluded.provider,
|
||||
cycleStartedAt = excluded.cycleStartedAt,
|
||||
cycleEndsAt = excluded.cycleEndsAt,
|
||||
monthlyAllowance = excluded.monthlyAllowance,
|
||||
usedThisCycle = excluded.usedThisCycle,
|
||||
topupBalance = excluded.topupBalance,
|
||||
renewsAt = excluded.renewsAt,
|
||||
updatedAt = excluded.updatedAt`,
|
||||
cycle_started_at,
|
||||
cycle_ends_at,
|
||||
monthly_allowance,
|
||||
used_this_cycle,
|
||||
topup_balance,
|
||||
renews_at,
|
||||
updated_at
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||
ON CONFLICT (user_id) DO UPDATE SET
|
||||
plan = EXCLUDED.plan,
|
||||
provider = EXCLUDED.provider,
|
||||
cycle_started_at = EXCLUDED.cycle_started_at,
|
||||
cycle_ends_at = EXCLUDED.cycle_ends_at,
|
||||
monthly_allowance = EXCLUDED.monthly_allowance,
|
||||
used_this_cycle = EXCLUDED.used_this_cycle,
|
||||
topup_balance = EXCLUDED.topup_balance,
|
||||
renews_at = EXCLUDED.renews_at,
|
||||
updated_at = EXCLUDED.updated_at`,
|
||||
[
|
||||
account.userId,
|
||||
account.plan,
|
||||
@@ -177,18 +182,18 @@ const getOrCreateAccount = async (db, userId) => {
|
||||
const row = await get(
|
||||
db,
|
||||
`SELECT
|
||||
userId,
|
||||
user_id AS "userId",
|
||||
plan,
|
||||
provider,
|
||||
cycleStartedAt,
|
||||
cycleEndsAt,
|
||||
monthlyAllowance,
|
||||
usedThisCycle,
|
||||
topupBalance,
|
||||
renewsAt,
|
||||
updatedAt
|
||||
cycle_started_at AS "cycleStartedAt",
|
||||
cycle_ends_at AS "cycleEndsAt",
|
||||
monthly_allowance AS "monthlyAllowance",
|
||||
used_this_cycle AS "usedThisCycle",
|
||||
topup_balance AS "topupBalance",
|
||||
renews_at AS "renewsAt",
|
||||
updated_at AS "updatedAt"
|
||||
FROM billing_accounts
|
||||
WHERE userId = ?`,
|
||||
WHERE user_id = $1`,
|
||||
[userId],
|
||||
);
|
||||
|
||||
@@ -253,13 +258,53 @@ const asIsoDate = (value) => {
|
||||
};
|
||||
|
||||
const isSupportedTopupProduct = (productId) => {
|
||||
return typeof productId === 'string' && productId.startsWith('topup_') && typeof TOPUP_CREDITS_BY_PRODUCT[productId] === 'number';
|
||||
return typeof productId === 'string'
|
||||
&& productId.startsWith('topup_')
|
||||
&& typeof TOPUP_CREDITS_BY_PRODUCT[productId] === 'number';
|
||||
};
|
||||
|
||||
const normalizeRevenueCatTransactions = (customerInfo) => {
|
||||
const nonSubscriptions = customerInfo?.nonSubscriptions;
|
||||
if (!nonSubscriptions || typeof nonSubscriptions !== 'object') return [];
|
||||
return Object.values(nonSubscriptions).flatMap((entries) => Array.isArray(entries) ? entries : []);
|
||||
return Object.values(nonSubscriptions).flatMap((entries) => (Array.isArray(entries) ? entries : []));
|
||||
};
|
||||
|
||||
const summarizeRevenueCatCustomerInfo = (customerInfo) => {
|
||||
const activeEntitlements = customerInfo?.entitlements?.active || {};
|
||||
return {
|
||||
appUserId: customerInfo?.appUserId || customerInfo?.app_user_id || null,
|
||||
originalAppUserId: customerInfo?.originalAppUserId || customerInfo?.original_app_user_id || null,
|
||||
activeEntitlements: Object.entries(activeEntitlements).map(([id, entitlement]) => ({
|
||||
id,
|
||||
productIdentifier: entitlement?.productIdentifier || null,
|
||||
expirationDate: entitlement?.expirationDate || entitlement?.expiresDate || null,
|
||||
})),
|
||||
allPurchasedProductIdentifiers: Array.isArray(customerInfo?.allPurchasedProductIdentifiers)
|
||||
? customerInfo.allPurchasedProductIdentifiers
|
||||
: [],
|
||||
nonSubscriptionTransactions: normalizeRevenueCatTransactions(customerInfo).map((transaction) => ({
|
||||
productIdentifier: transaction?.productIdentifier || null,
|
||||
transactionIdentifier: transaction?.transactionIdentifier || transaction?.transactionId || null,
|
||||
})),
|
||||
};
|
||||
};
|
||||
|
||||
const getValidProEntitlement = (customerInfo) => {
|
||||
const activeEntitlements = customerInfo?.entitlements?.active || {};
|
||||
const proEntitlement = activeEntitlements[REVENUECAT_PRO_ENTITLEMENT_ID];
|
||||
if (!proEntitlement) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (
|
||||
typeof proEntitlement?.productIdentifier === 'string'
|
||||
&& SUPPORTED_SUBSCRIPTION_PRODUCTS.has(proEntitlement.productIdentifier)
|
||||
) {
|
||||
return proEntitlement;
|
||||
}
|
||||
|
||||
console.warn('[Billing] Ignoring unsupported RevenueCat pro entitlement', summarizeRevenueCatCustomerInfo(customerInfo));
|
||||
return null;
|
||||
};
|
||||
|
||||
const applyRevenueCatEntitlementState = (account, options) => {
|
||||
@@ -279,6 +324,39 @@ const applyRevenueCatEntitlementState = (account, options) => {
|
||||
}
|
||||
};
|
||||
|
||||
const parseStoredJson = (raw) => {
|
||||
if (raw == null) return null;
|
||||
if (typeof raw === 'object') return raw;
|
||||
if (typeof raw !== 'string') return null;
|
||||
try {
|
||||
return JSON.parse(raw);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const readIdempotentValue = async (db, key) => {
|
||||
const row = await get(
|
||||
db,
|
||||
'SELECT response_json AS "responseJson" FROM billing_idempotency WHERE id = $1',
|
||||
[key],
|
||||
);
|
||||
if (!row) return null;
|
||||
return parseStoredJson(row.responseJson);
|
||||
};
|
||||
|
||||
const writeIdempotentValue = async (db, key, value) => {
|
||||
await run(
|
||||
db,
|
||||
`INSERT INTO billing_idempotency (id, response_json, created_at)
|
||||
VALUES ($1, CAST($2 AS jsonb), $3)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
response_json = EXCLUDED.response_json,
|
||||
created_at = EXCLUDED.created_at`,
|
||||
[key, JSON.stringify(value), nowIso()],
|
||||
);
|
||||
};
|
||||
|
||||
const grantRevenueCatTopupIfNeeded = async (db, account, transactionId, productId) => {
|
||||
if (!transactionId || !isSupportedTopupProduct(productId)) {
|
||||
return false;
|
||||
@@ -291,21 +369,29 @@ const grantRevenueCatTopupIfNeeded = async (db, account, transactionId, productI
|
||||
return true;
|
||||
};
|
||||
|
||||
const syncRevenueCatCustomerInfo = async (db, userId, customerInfo) => {
|
||||
return runInTransaction(db, async () => {
|
||||
const account = await getOrCreateAccount(db, userId);
|
||||
const activeEntitlements = customerInfo?.entitlements?.active || {};
|
||||
const proEntitlement = activeEntitlements[REVENUECAT_PRO_ENTITLEMENT_ID];
|
||||
const syncRevenueCatCustomerInfo = async (db, userId, customerInfo, options = {}) => {
|
||||
return runInTransaction(db, async (tx) => {
|
||||
const account = await getOrCreateAccount(tx, userId);
|
||||
const proEntitlement = getValidProEntitlement(customerInfo);
|
||||
const source = typeof options.source === 'string' ? options.source : 'app_init';
|
||||
|
||||
console.log('[Billing] Syncing RevenueCat customer info', {
|
||||
userId,
|
||||
source,
|
||||
customerInfo: summarizeRevenueCatCustomerInfo(customerInfo),
|
||||
});
|
||||
|
||||
if (source !== 'topup_purchase') {
|
||||
applyRevenueCatEntitlementState(account, {
|
||||
active: Boolean(proEntitlement),
|
||||
renewsAt: asIsoDate(proEntitlement?.expirationDate || proEntitlement?.expiresDate),
|
||||
});
|
||||
}
|
||||
|
||||
const transactions = normalizeRevenueCatTransactions(customerInfo);
|
||||
for (const transaction of transactions) {
|
||||
await grantRevenueCatTopupIfNeeded(
|
||||
db,
|
||||
tx,
|
||||
account,
|
||||
transaction?.transactionIdentifier || transaction?.transactionId,
|
||||
transaction?.productIdentifier,
|
||||
@@ -313,7 +399,7 @@ const syncRevenueCatCustomerInfo = async (db, userId, customerInfo) => {
|
||||
}
|
||||
|
||||
account.updatedAt = nowIso();
|
||||
await upsertAccount(db, account);
|
||||
await upsertAccount(tx, account);
|
||||
return {
|
||||
billing: buildBillingSummary(account),
|
||||
syncedAt: nowIso(),
|
||||
@@ -340,10 +426,10 @@ const shouldRevokeRevenueCatSubscription = (eventType) => {
|
||||
|
||||
const syncRevenueCatWebhookEvent = async (db, eventPayload) => {
|
||||
const appUserId = String(
|
||||
eventPayload?.app_user_id ||
|
||||
eventPayload?.appUserId ||
|
||||
eventPayload?.original_app_user_id ||
|
||||
'',
|
||||
eventPayload?.app_user_id
|
||||
|| eventPayload?.appUserId
|
||||
|| eventPayload?.original_app_user_id
|
||||
|| '',
|
||||
).trim();
|
||||
if (!appUserId) {
|
||||
const error = new Error('RevenueCat webhook is missing app_user_id.');
|
||||
@@ -352,12 +438,24 @@ const syncRevenueCatWebhookEvent = async (db, eventPayload) => {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return runInTransaction(db, async () => {
|
||||
const account = await getOrCreateAccount(db, appUserId);
|
||||
return runInTransaction(db, async (tx) => {
|
||||
const account = await getOrCreateAccount(tx, appUserId);
|
||||
const eventType = String(eventPayload?.type || '').toUpperCase();
|
||||
const productId = typeof eventPayload?.product_id === 'string' ? eventPayload.product_id : '';
|
||||
const entitlementIds = Array.isArray(eventPayload?.entitlement_ids) ? eventPayload.entitlement_ids : [];
|
||||
const affectsProEntitlement = entitlementIds.includes(REVENUECAT_PRO_ENTITLEMENT_ID) || SUPPORTED_SUBSCRIPTION_PRODUCTS.has(productId);
|
||||
const hasSubscriptionProduct = SUPPORTED_SUBSCRIPTION_PRODUCTS.has(productId);
|
||||
const hasTopupProduct = isSupportedTopupProduct(productId);
|
||||
const affectsProEntitlement = hasSubscriptionProduct
|
||||
|| (entitlementIds.includes(REVENUECAT_PRO_ENTITLEMENT_ID) && !hasTopupProduct);
|
||||
|
||||
if (entitlementIds.includes(REVENUECAT_PRO_ENTITLEMENT_ID) && hasTopupProduct) {
|
||||
console.warn('[Billing] Ignoring RevenueCat webhook entitlement for top-up product', {
|
||||
appUserId,
|
||||
eventType,
|
||||
productId,
|
||||
entitlementIds,
|
||||
});
|
||||
}
|
||||
|
||||
if (affectsProEntitlement && shouldGrantRevenueCatSubscription(eventType)) {
|
||||
applyRevenueCatEntitlementState(account, {
|
||||
@@ -373,7 +471,7 @@ const syncRevenueCatWebhookEvent = async (db, eventPayload) => {
|
||||
|
||||
if (isSupportedTopupProduct(productId)) {
|
||||
await grantRevenueCatTopupIfNeeded(
|
||||
db,
|
||||
tx,
|
||||
account,
|
||||
eventPayload?.transaction_id || eventPayload?.store_transaction_id || eventPayload?.id,
|
||||
productId,
|
||||
@@ -381,7 +479,7 @@ const syncRevenueCatWebhookEvent = async (db, eventPayload) => {
|
||||
}
|
||||
|
||||
account.updatedAt = nowIso();
|
||||
await upsertAccount(db, account);
|
||||
await upsertAccount(tx, account);
|
||||
return {
|
||||
billing: buildBillingSummary(account),
|
||||
syncedAt: nowIso(),
|
||||
@@ -414,33 +512,6 @@ const consumeCredits = (account, cost) => {
|
||||
return cost;
|
||||
};
|
||||
|
||||
const parseStoredJson = (raw) => {
|
||||
if (!raw || typeof raw !== 'string') return null;
|
||||
try {
|
||||
return JSON.parse(raw);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const readIdempotentValue = async (db, key) => {
|
||||
const row = await get(db, 'SELECT responseJson FROM billing_idempotency WHERE id = ?', [key]);
|
||||
if (!row || typeof row.responseJson !== 'string') return null;
|
||||
return parseStoredJson(row.responseJson);
|
||||
};
|
||||
|
||||
const writeIdempotentValue = async (db, key, value) => {
|
||||
await run(
|
||||
db,
|
||||
`INSERT INTO billing_idempotency (id, responseJson, createdAt)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
responseJson = excluded.responseJson,
|
||||
createdAt = excluded.createdAt`,
|
||||
[key, JSON.stringify(value), nowIso()],
|
||||
);
|
||||
};
|
||||
|
||||
const endpointKey = (scope, userId, idempotencyKey) => {
|
||||
return `endpoint:${scope}:${userId}:${idempotencyKey}`;
|
||||
};
|
||||
@@ -450,15 +521,15 @@ const chargeKey = (scope, userId, idempotencyKey) => {
|
||||
};
|
||||
|
||||
const consumeCreditsWithIdempotency = async (db, userId, key, cost) => {
|
||||
return runInTransaction(db, async () => {
|
||||
const existing = await readIdempotentValue(db, key);
|
||||
return runInTransaction(db, async (tx) => {
|
||||
const existing = await readIdempotentValue(tx, key);
|
||||
if (existing && typeof existing.charged === 'number') return existing.charged;
|
||||
|
||||
const account = await getOrCreateAccount(db, userId);
|
||||
const account = await getOrCreateAccount(tx, userId);
|
||||
const charged = consumeCredits(account, cost);
|
||||
account.updatedAt = nowIso();
|
||||
await upsertAccount(db, account);
|
||||
await writeIdempotentValue(db, key, { charged });
|
||||
await upsertAccount(tx, account);
|
||||
await writeIdempotentValue(tx, key, { charged });
|
||||
return charged;
|
||||
});
|
||||
};
|
||||
@@ -473,15 +544,15 @@ const getBillingSummary = async (db, userId) => {
|
||||
topupBalance: 0,
|
||||
available: 5,
|
||||
cycleStartedAt: nowIso(),
|
||||
cycleEndsAt: nowIso()
|
||||
cycleEndsAt: nowIso(),
|
||||
},
|
||||
availableProducts: AVAILABLE_PRODUCTS,
|
||||
};
|
||||
}
|
||||
return runInTransaction(db, async () => {
|
||||
const account = await getOrCreateAccount(db, userId);
|
||||
return runInTransaction(db, async (tx) => {
|
||||
const account = await getOrCreateAccount(tx, userId);
|
||||
account.updatedAt = nowIso();
|
||||
await upsertAccount(db, account);
|
||||
await upsertAccount(tx, account);
|
||||
return buildBillingSummary(account);
|
||||
});
|
||||
};
|
||||
@@ -501,10 +572,10 @@ const getAccountSnapshot = async (db, userId) => {
|
||||
updatedAt: nowIso(),
|
||||
};
|
||||
}
|
||||
return runInTransaction(db, async () => {
|
||||
const account = await getOrCreateAccount(db, userId);
|
||||
return runInTransaction(db, async (tx) => {
|
||||
const account = await getOrCreateAccount(tx, userId);
|
||||
account.updatedAt = nowIso();
|
||||
await upsertAccount(db, account);
|
||||
await upsertAccount(tx, account);
|
||||
return account;
|
||||
});
|
||||
};
|
||||
@@ -523,11 +594,11 @@ const simulatePurchase = async (db, userId, idempotencyKey, productId) => {
|
||||
const cached = await getEndpointResponse(db, endpointId);
|
||||
if (cached) return cached;
|
||||
|
||||
const response = await runInTransaction(db, async () => {
|
||||
const existingInsideTx = await readIdempotentValue(db, endpointId);
|
||||
return runInTransaction(db, async (tx) => {
|
||||
const existingInsideTx = await readIdempotentValue(tx, endpointId);
|
||||
if (existingInsideTx) return existingInsideTx;
|
||||
|
||||
const account = await getOrCreateAccount(db, userId);
|
||||
const account = await getOrCreateAccount(tx, userId);
|
||||
|
||||
if (productId === 'monthly_pro' || productId === 'yearly_pro') {
|
||||
const now = new Date();
|
||||
@@ -551,17 +622,15 @@ const simulatePurchase = async (db, userId, idempotencyKey, productId) => {
|
||||
}
|
||||
|
||||
account.updatedAt = nowIso();
|
||||
await upsertAccount(db, account);
|
||||
await upsertAccount(tx, account);
|
||||
|
||||
const payload = {
|
||||
appliedProduct: productId,
|
||||
billing: buildBillingSummary(account),
|
||||
};
|
||||
await storeEndpointResponse(db, endpointId, payload);
|
||||
await storeEndpointResponse(tx, endpointId, payload);
|
||||
return payload;
|
||||
});
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
const simulateWebhook = async (db, userId, idempotencyKey, event, payload = {}) => {
|
||||
@@ -569,11 +638,11 @@ const simulateWebhook = async (db, userId, idempotencyKey, event, payload = {})
|
||||
const cached = await getEndpointResponse(db, endpointId);
|
||||
if (cached) return cached;
|
||||
|
||||
const response = await runInTransaction(db, async () => {
|
||||
const existingInsideTx = await readIdempotentValue(db, endpointId);
|
||||
return runInTransaction(db, async (tx) => {
|
||||
const existingInsideTx = await readIdempotentValue(tx, endpointId);
|
||||
if (existingInsideTx) return existingInsideTx;
|
||||
|
||||
const account = await getOrCreateAccount(db, userId);
|
||||
const account = await getOrCreateAccount(tx, userId);
|
||||
|
||||
if (event === 'entitlement_granted') {
|
||||
const now = new Date();
|
||||
@@ -609,33 +678,31 @@ const simulateWebhook = async (db, userId, idempotencyKey, event, payload = {})
|
||||
}
|
||||
|
||||
account.updatedAt = nowIso();
|
||||
await upsertAccount(db, account);
|
||||
await upsertAccount(tx, account);
|
||||
|
||||
const payloadResponse = {
|
||||
event,
|
||||
billing: buildBillingSummary(account),
|
||||
};
|
||||
await storeEndpointResponse(db, endpointId, payloadResponse);
|
||||
await storeEndpointResponse(tx, endpointId, payloadResponse);
|
||||
return payloadResponse;
|
||||
});
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
const ensureBillingSchema = async (db) => {
|
||||
await run(
|
||||
db,
|
||||
`CREATE TABLE IF NOT EXISTS billing_accounts (
|
||||
userId TEXT PRIMARY KEY,
|
||||
user_id TEXT PRIMARY KEY,
|
||||
plan TEXT NOT NULL DEFAULT 'free',
|
||||
provider TEXT NOT NULL DEFAULT 'revenuecat',
|
||||
cycleStartedAt TEXT NOT NULL,
|
||||
cycleEndsAt TEXT NOT NULL,
|
||||
monthlyAllowance INTEGER NOT NULL DEFAULT 15,
|
||||
usedThisCycle INTEGER NOT NULL DEFAULT 0,
|
||||
topupBalance INTEGER NOT NULL DEFAULT 0,
|
||||
renewsAt TEXT,
|
||||
updatedAt TEXT NOT NULL
|
||||
cycle_started_at TIMESTAMPTZ NOT NULL,
|
||||
cycle_ends_at TIMESTAMPTZ NOT NULL,
|
||||
monthly_allowance INTEGER NOT NULL DEFAULT 15,
|
||||
used_this_cycle INTEGER NOT NULL DEFAULT 0,
|
||||
topup_balance INTEGER NOT NULL DEFAULT 0,
|
||||
renews_at TIMESTAMPTZ,
|
||||
updated_at TIMESTAMPTZ NOT NULL
|
||||
)`,
|
||||
);
|
||||
|
||||
@@ -643,15 +710,15 @@ const ensureBillingSchema = async (db) => {
|
||||
db,
|
||||
`CREATE TABLE IF NOT EXISTS billing_idempotency (
|
||||
id TEXT PRIMARY KEY,
|
||||
responseJson TEXT NOT NULL,
|
||||
createdAt TEXT NOT NULL
|
||||
response_json JSONB NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL
|
||||
)`,
|
||||
);
|
||||
|
||||
await run(
|
||||
db,
|
||||
`CREATE INDEX IF NOT EXISTS idx_billing_idempotency_created_at
|
||||
ON billing_idempotency(createdAt DESC)`,
|
||||
ON billing_idempotency (created_at DESC)`,
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const crypto = require('crypto');
|
||||
const { all, get, run } = require('./sqlite');
|
||||
const { all, get, run } = require('./postgres');
|
||||
const { normalizeSearchText, rankHybridEntries } = require('./hybridSearch');
|
||||
|
||||
const DEFAULT_LIMIT = 60;
|
||||
@@ -201,7 +201,6 @@ const prepareEntry = (rawEntry, index, existingIdMap, preserveExistingIds) => {
|
||||
|
||||
const normalizedBotanicalKey = botanicalName ? normalizeKey(botanicalName) : '';
|
||||
const existingId = preserveExistingIds ? existingIdMap.get(normalizedBotanicalKey) : null;
|
||||
|
||||
const incomingId = typeof rawEntry?.id === 'string' ? normalizeWhitespace(rawEntry.id) : '';
|
||||
const id = incomingId || existingId || (botanicalName ? buildStablePlantId(botanicalName) : '');
|
||||
|
||||
@@ -314,95 +313,32 @@ const assertValidPreparedEntries = (entries, enforceUniqueImages) => {
|
||||
}
|
||||
};
|
||||
|
||||
const ensureColumn = async (db, tableName, columnName, definitionSql) => {
|
||||
const columns = await all(db, `PRAGMA table_info(${tableName})`);
|
||||
const hasColumn = columns.some((column) => column.name === columnName);
|
||||
if (hasColumn) return;
|
||||
await run(db, `ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${definitionSql}`);
|
||||
};
|
||||
|
||||
const ensurePlantSchema = async (db) => {
|
||||
await run(
|
||||
db,
|
||||
`CREATE TABLE IF NOT EXISTS plants (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
botanicalName TEXT NOT NULL,
|
||||
imageUri TEXT NOT NULL,
|
||||
description TEXT,
|
||||
categories TEXT NOT NULL,
|
||||
careInfo TEXT NOT NULL,
|
||||
confidence REAL NOT NULL
|
||||
)`,
|
||||
);
|
||||
|
||||
await ensureColumn(db, 'plants', 'imageStatus', `TEXT NOT NULL DEFAULT 'ok'`);
|
||||
await ensureColumn(db, 'plants', 'createdAt', `TEXT`);
|
||||
await ensureColumn(db, 'plants', 'updatedAt', `TEXT`);
|
||||
|
||||
await run(
|
||||
db,
|
||||
`CREATE TABLE IF NOT EXISTS plant_import_audit (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
source TEXT NOT NULL,
|
||||
importedCount INTEGER NOT NULL DEFAULT 0,
|
||||
preservedIds INTEGER NOT NULL DEFAULT 0,
|
||||
duplicateImageCount INTEGER NOT NULL DEFAULT 0,
|
||||
status TEXT NOT NULL,
|
||||
details TEXT,
|
||||
backupTable TEXT,
|
||||
startedAt TEXT NOT NULL,
|
||||
completedAt TEXT NOT NULL
|
||||
)`,
|
||||
);
|
||||
|
||||
await run(
|
||||
db,
|
||||
`CREATE INDEX IF NOT EXISTS idx_plants_name ON plants(name COLLATE NOCASE)`,
|
||||
);
|
||||
await run(
|
||||
db,
|
||||
`CREATE INDEX IF NOT EXISTS idx_plants_botanical_name ON plants(botanicalName COLLATE NOCASE)`,
|
||||
);
|
||||
await run(
|
||||
db,
|
||||
`CREATE INDEX IF NOT EXISTS idx_plant_import_audit_started_at ON plant_import_audit(startedAt DESC)`,
|
||||
);
|
||||
|
||||
await run(
|
||||
db,
|
||||
`UPDATE plants SET imageStatus = COALESCE(NULLIF(imageStatus, ''), 'ok')`,
|
||||
);
|
||||
await run(
|
||||
db,
|
||||
`UPDATE plants SET createdAt = COALESCE(createdAt, datetime('now'))`,
|
||||
);
|
||||
await run(
|
||||
db,
|
||||
`UPDATE plants SET updatedAt = COALESCE(updatedAt, datetime('now'))`,
|
||||
);
|
||||
};
|
||||
|
||||
const parseJsonArray = (value) => {
|
||||
if (!value) return [];
|
||||
if (Array.isArray(value)) return value;
|
||||
if (typeof value === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(value);
|
||||
return Array.isArray(parsed) ? parsed : [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
const parseJsonObject = (value) => {
|
||||
if (!value) return {};
|
||||
if (typeof value === 'object') return value;
|
||||
if (typeof value === 'object' && !Array.isArray(value)) return value;
|
||||
if (typeof value === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(value);
|
||||
return parsed && typeof parsed === 'object' ? parsed : {};
|
||||
return parsed && typeof parsed === 'object' && !Array.isArray(parsed) ? parsed : {};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
return {};
|
||||
};
|
||||
|
||||
const toApiPlant = (row) => {
|
||||
@@ -422,6 +358,54 @@ const toApiPlant = (row) => {
|
||||
};
|
||||
};
|
||||
|
||||
const ensurePlantSchema = async (db) => {
|
||||
await run(
|
||||
db,
|
||||
`CREATE TABLE IF NOT EXISTS plants (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
botanical_name TEXT NOT NULL,
|
||||
image_uri TEXT NOT NULL,
|
||||
image_status TEXT NOT NULL DEFAULT 'ok',
|
||||
description TEXT,
|
||||
categories JSONB NOT NULL DEFAULT '[]'::jsonb,
|
||||
care_info JSONB NOT NULL DEFAULT '{}'::jsonb,
|
||||
confidence DOUBLE PRECISION NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
);
|
||||
|
||||
await run(
|
||||
db,
|
||||
`CREATE TABLE IF NOT EXISTS plant_import_audit (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
source TEXT NOT NULL,
|
||||
imported_count INTEGER NOT NULL DEFAULT 0,
|
||||
preserved_ids INTEGER NOT NULL DEFAULT 0,
|
||||
duplicate_image_count INTEGER NOT NULL DEFAULT 0,
|
||||
status TEXT NOT NULL,
|
||||
details JSONB,
|
||||
backup_table TEXT,
|
||||
started_at TIMESTAMPTZ NOT NULL,
|
||||
completed_at TIMESTAMPTZ NOT NULL
|
||||
)`,
|
||||
);
|
||||
|
||||
await run(
|
||||
db,
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS idx_plants_botanical_name_unique ON plants (LOWER(botanical_name))',
|
||||
);
|
||||
await run(
|
||||
db,
|
||||
'CREATE INDEX IF NOT EXISTS idx_plants_name ON plants (LOWER(name))',
|
||||
);
|
||||
await run(
|
||||
db,
|
||||
'CREATE INDEX IF NOT EXISTS idx_plant_import_audit_started_at ON plant_import_audit (started_at DESC)',
|
||||
);
|
||||
};
|
||||
|
||||
const getPlants = async (db, options = {}) => {
|
||||
const query = typeof options.query === 'string' ? options.query.trim() : '';
|
||||
const category = typeof options.category === 'string' ? options.category.trim() : '';
|
||||
@@ -430,21 +414,22 @@ const getPlants = async (db, options = {}) => {
|
||||
? Math.max(1, Math.min(MAX_LIMIT, Math.round(limitRaw)))
|
||||
: DEFAULT_LIMIT;
|
||||
|
||||
let sql = `SELECT
|
||||
const rows = await all(
|
||||
db,
|
||||
`SELECT
|
||||
id,
|
||||
name,
|
||||
botanicalName,
|
||||
imageUri,
|
||||
imageStatus,
|
||||
botanical_name AS "botanicalName",
|
||||
image_uri AS "imageUri",
|
||||
image_status AS "imageStatus",
|
||||
description,
|
||||
categories,
|
||||
careInfo,
|
||||
care_info AS "careInfo",
|
||||
confidence
|
||||
FROM plants`;
|
||||
const params = [];
|
||||
sql += ' ORDER BY name COLLATE NOCASE ASC';
|
||||
FROM plants
|
||||
ORDER BY LOWER(name) ASC`,
|
||||
);
|
||||
|
||||
const rows = await all(db, sql, params);
|
||||
let results = rows.map(toApiPlant);
|
||||
|
||||
if (category) {
|
||||
@@ -463,31 +448,31 @@ const getPlantDiagnostics = async (db) => {
|
||||
const totals = await get(
|
||||
db,
|
||||
`SELECT
|
||||
COUNT(*) AS totalCount,
|
||||
SUM(CASE WHEN imageUri IS NULL OR TRIM(imageUri) = '' THEN 1 ELSE 0 END) AS missingImageCount,
|
||||
SUM(CASE WHEN COALESCE(imageStatus, 'ok') <> 'ok' THEN 1 ELSE 0 END) AS nonOkImageStatusCount
|
||||
COUNT(*) AS "totalCount",
|
||||
SUM(CASE WHEN image_uri IS NULL OR BTRIM(image_uri) = '' THEN 1 ELSE 0 END) AS "missingImageCount",
|
||||
SUM(CASE WHEN COALESCE(image_status, 'ok') <> 'ok' THEN 1 ELSE 0 END) AS "nonOkImageStatusCount"
|
||||
FROM plants`,
|
||||
);
|
||||
|
||||
const duplicateImages = await all(
|
||||
db,
|
||||
`SELECT imageUri, COUNT(*) AS count
|
||||
`SELECT image_uri AS "imageUri", COUNT(*) AS count
|
||||
FROM plants
|
||||
WHERE imageUri IS NOT NULL AND TRIM(imageUri) <> ''
|
||||
GROUP BY imageUri
|
||||
WHERE image_uri IS NOT NULL AND BTRIM(image_uri) <> ''
|
||||
GROUP BY image_uri
|
||||
HAVING COUNT(*) > 1
|
||||
ORDER BY count DESC, imageUri ASC
|
||||
ORDER BY count DESC, image_uri ASC
|
||||
LIMIT 200`,
|
||||
);
|
||||
|
||||
const duplicateBotanicalNames = await all(
|
||||
db,
|
||||
`SELECT botanicalName, COUNT(*) AS count
|
||||
`SELECT botanical_name AS "botanicalName", COUNT(*) AS count
|
||||
FROM plants
|
||||
WHERE botanicalName IS NOT NULL AND TRIM(botanicalName) <> ''
|
||||
GROUP BY LOWER(botanicalName)
|
||||
WHERE botanical_name IS NOT NULL AND BTRIM(botanical_name) <> ''
|
||||
GROUP BY LOWER(botanical_name), botanical_name
|
||||
HAVING COUNT(*) > 1
|
||||
ORDER BY count DESC, botanicalName ASC
|
||||
ORDER BY count DESC, botanical_name ASC
|
||||
LIMIT 200`,
|
||||
);
|
||||
|
||||
@@ -496,16 +481,16 @@ const getPlantDiagnostics = async (db) => {
|
||||
`SELECT
|
||||
id,
|
||||
source,
|
||||
importedCount,
|
||||
preservedIds,
|
||||
duplicateImageCount,
|
||||
imported_count AS "importedCount",
|
||||
preserved_ids AS "preservedIds",
|
||||
duplicate_image_count AS "duplicateImageCount",
|
||||
status,
|
||||
details,
|
||||
backupTable,
|
||||
startedAt,
|
||||
completedAt
|
||||
backup_table AS "backupTable",
|
||||
started_at AS "startedAt",
|
||||
completed_at AS "completedAt"
|
||||
FROM plant_import_audit
|
||||
ORDER BY startedAt DESC
|
||||
ORDER BY started_at DESC
|
||||
LIMIT 20`,
|
||||
);
|
||||
|
||||
@@ -519,7 +504,7 @@ const getPlantDiagnostics = async (db) => {
|
||||
duplicateBotanicalNames,
|
||||
recentAudits: recentAudits.map((audit) => ({
|
||||
...audit,
|
||||
details: audit.details ? parseJsonObject(audit.details) : null,
|
||||
details: parseJsonObject(audit.details),
|
||||
})),
|
||||
};
|
||||
};
|
||||
@@ -529,15 +514,15 @@ const writeAuditRow = async (db, audit) => {
|
||||
db,
|
||||
`INSERT INTO plant_import_audit (
|
||||
source,
|
||||
importedCount,
|
||||
preservedIds,
|
||||
duplicateImageCount,
|
||||
imported_count,
|
||||
preserved_ids,
|
||||
duplicate_image_count,
|
||||
status,
|
||||
details,
|
||||
backupTable,
|
||||
startedAt,
|
||||
completedAt
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
backup_table,
|
||||
started_at,
|
||||
completed_at
|
||||
) VALUES ($1, $2, $3, $4, $5, CAST($6 AS jsonb), $7, $8, $9)`,
|
||||
[
|
||||
audit.source,
|
||||
audit.importedCount,
|
||||
@@ -559,6 +544,23 @@ const sanitizeIdentifier = (value) => {
|
||||
return value;
|
||||
};
|
||||
|
||||
const openTransaction = async (db) => {
|
||||
if (typeof db.connect === 'function') {
|
||||
const client = await db.connect();
|
||||
await run(client, 'BEGIN');
|
||||
return {
|
||||
tx: client,
|
||||
release: () => client.release(),
|
||||
};
|
||||
}
|
||||
|
||||
await run(db, 'BEGIN');
|
||||
return {
|
||||
tx: db,
|
||||
release: () => {},
|
||||
};
|
||||
};
|
||||
|
||||
const rebuildPlantsCatalog = async (db, rawEntries, options = {}) => {
|
||||
if (!Array.isArray(rawEntries)) {
|
||||
throw new PlantImportValidationError('Import payload must be an array of entries.', [
|
||||
@@ -573,7 +575,10 @@ const rebuildPlantsCatalog = async (db, rawEntries, options = {}) => {
|
||||
const enforceUniqueImages = options.enforceUniqueImages !== false;
|
||||
const startedAtIso = new Date().toISOString();
|
||||
|
||||
const existingRows = await all(db, 'SELECT id, botanicalName FROM plants');
|
||||
const existingRows = await all(
|
||||
db,
|
||||
'SELECT id, botanical_name AS "botanicalName" FROM plants',
|
||||
);
|
||||
const existingIdMap = parseExistingIdMap(existingRows);
|
||||
|
||||
const validationErrors = [];
|
||||
@@ -608,28 +613,29 @@ const rebuildPlantsCatalog = async (db, rawEntries, options = {}) => {
|
||||
preparedCount: preparedEntries.length,
|
||||
};
|
||||
|
||||
const { tx, release } = await openTransaction(db);
|
||||
|
||||
try {
|
||||
await run(db, 'BEGIN IMMEDIATE TRANSACTION');
|
||||
await run(db, `DROP TABLE IF EXISTS ${backupTable}`);
|
||||
await run(db, `CREATE TABLE ${backupTable} AS SELECT * FROM plants`);
|
||||
await run(db, 'DELETE FROM plants');
|
||||
await run(tx, `DROP TABLE IF EXISTS ${backupTable}`);
|
||||
await run(tx, `CREATE TABLE ${backupTable} AS SELECT * FROM plants`);
|
||||
await run(tx, 'DELETE FROM plants');
|
||||
|
||||
for (const entry of preparedEntries) {
|
||||
await run(
|
||||
db,
|
||||
tx,
|
||||
`INSERT INTO plants (
|
||||
id,
|
||||
name,
|
||||
botanicalName,
|
||||
imageUri,
|
||||
imageStatus,
|
||||
botanical_name,
|
||||
image_uri,
|
||||
image_status,
|
||||
description,
|
||||
categories,
|
||||
careInfo,
|
||||
care_info,
|
||||
confidence,
|
||||
createdAt,
|
||||
updatedAt
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
created_at,
|
||||
updated_at
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, CAST($7 AS jsonb), CAST($8 AS jsonb), $9, $10, $11)`,
|
||||
[
|
||||
entry.id,
|
||||
entry.name,
|
||||
@@ -647,21 +653,27 @@ const rebuildPlantsCatalog = async (db, rawEntries, options = {}) => {
|
||||
}
|
||||
|
||||
await run(
|
||||
db,
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS idx_plants_botanical_name_unique ON plants(botanicalName)',
|
||||
tx,
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS idx_plants_botanical_name_unique ON plants (LOWER(botanical_name))',
|
||||
);
|
||||
if (enforceUniqueImages) {
|
||||
await run(
|
||||
db,
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS idx_plants_image_uri_unique ON plants(imageUri)',
|
||||
tx,
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS idx_plants_image_uri_unique ON plants (image_uri)',
|
||||
);
|
||||
} else {
|
||||
await run(db, 'DROP INDEX IF EXISTS idx_plants_image_uri_unique');
|
||||
await run(tx, 'DROP INDEX IF EXISTS idx_plants_image_uri_unique');
|
||||
}
|
||||
|
||||
await run(db, 'COMMIT');
|
||||
await run(tx, 'COMMIT');
|
||||
} catch (error) {
|
||||
await run(db, 'ROLLBACK');
|
||||
try {
|
||||
await run(tx, 'ROLLBACK');
|
||||
} catch (rollbackError) {
|
||||
console.error('Failed to rollback plant rebuild transaction.', rollbackError);
|
||||
}
|
||||
release();
|
||||
|
||||
const completedAtIso = new Date().toISOString();
|
||||
await writeAuditRow(db, {
|
||||
source,
|
||||
@@ -680,11 +692,13 @@ const rebuildPlantsCatalog = async (db, rawEntries, options = {}) => {
|
||||
throw error;
|
||||
}
|
||||
|
||||
release();
|
||||
|
||||
const duplicateImages = await all(
|
||||
db,
|
||||
`SELECT imageUri, COUNT(*) AS count
|
||||
`SELECT image_uri AS "imageUri", COUNT(*) AS count
|
||||
FROM plants
|
||||
GROUP BY imageUri
|
||||
GROUP BY image_uri
|
||||
HAVING COUNT(*) > 1`,
|
||||
);
|
||||
|
||||
|
||||
93
server/lib/postgres.js
Normal file
93
server/lib/postgres.js
Normal file
@@ -0,0 +1,93 @@
|
||||
const { Pool } = require('pg');
|
||||
|
||||
const parseBoolean = (value, fallback = false) => {
|
||||
if (typeof value !== 'string') return fallback;
|
||||
const normalized = value.trim().toLowerCase();
|
||||
if (['1', 'true', 'yes', 'on'].includes(normalized)) return true;
|
||||
if (['0', 'false', 'no', 'off'].includes(normalized)) return false;
|
||||
return fallback;
|
||||
};
|
||||
|
||||
const buildDatabaseUrlFromParts = () => {
|
||||
const host = (process.env.POSTGRES_HOST || 'postgres').trim();
|
||||
const port = Number(process.env.POSTGRES_PORT || 5432);
|
||||
const database = (process.env.POSTGRES_DB || 'greenlns').trim();
|
||||
const user = (process.env.POSTGRES_USER || 'greenlns').trim();
|
||||
const password = process.env.POSTGRES_PASSWORD;
|
||||
if (!password) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return `postgresql://${encodeURIComponent(user)}:${encodeURIComponent(password)}@${host}:${port}/${encodeURIComponent(database)}`;
|
||||
};
|
||||
|
||||
const getDefaultDbPath = () => {
|
||||
return (process.env.DATABASE_URL || buildDatabaseUrlFromParts()).trim();
|
||||
};
|
||||
|
||||
const getPoolConfig = () => {
|
||||
const connectionString = getDefaultDbPath();
|
||||
if (!connectionString) {
|
||||
throw new Error('DATABASE_URL or POSTGRES_* environment variables are required.');
|
||||
}
|
||||
|
||||
const sslEnabled = parseBoolean(process.env.DATABASE_SSL, false);
|
||||
return {
|
||||
connectionString,
|
||||
max: Number(process.env.PGPOOL_MAX || 10),
|
||||
ssl: sslEnabled ? { rejectUnauthorized: false } : false,
|
||||
};
|
||||
};
|
||||
|
||||
const translateSql = (sql) => {
|
||||
if (typeof sql !== 'string') return sql;
|
||||
|
||||
let placeholderIndex = 0;
|
||||
return sql
|
||||
.replace(/\?/g, () => {
|
||||
placeholderIndex += 1;
|
||||
return `$${placeholderIndex}`;
|
||||
})
|
||||
.replace(/BEGIN\s+IMMEDIATE\s+TRANSACTION/gi, 'BEGIN')
|
||||
.replace(/datetime\('now'\)/gi, 'CURRENT_TIMESTAMP')
|
||||
.replace(/\s+COLLATE\s+NOCASE/gi, '');
|
||||
};
|
||||
|
||||
const openDatabase = async () => {
|
||||
const pool = new Pool(getPoolConfig());
|
||||
await pool.query('SELECT 1');
|
||||
return pool;
|
||||
};
|
||||
|
||||
const closeDatabase = async (db) => {
|
||||
if (!db || typeof db.end !== 'function') return;
|
||||
await db.end();
|
||||
};
|
||||
|
||||
const run = async (db, sql, params = []) => {
|
||||
const result = await db.query(translateSql(sql), params);
|
||||
return {
|
||||
lastId: result.rows?.[0]?.id ?? null,
|
||||
changes: result.rowCount || 0,
|
||||
rows: result.rows || [],
|
||||
};
|
||||
};
|
||||
|
||||
const get = async (db, sql, params = []) => {
|
||||
const result = await db.query(translateSql(sql), params);
|
||||
return result.rows[0] || null;
|
||||
};
|
||||
|
||||
const all = async (db, sql, params = []) => {
|
||||
const result = await db.query(translateSql(sql), params);
|
||||
return result.rows || [];
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
all,
|
||||
closeDatabase,
|
||||
get,
|
||||
getDefaultDbPath,
|
||||
openDatabase,
|
||||
run,
|
||||
};
|
||||
@@ -1,86 +1 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const sqlite3 = require('sqlite3').verbose();
|
||||
|
||||
const getDefaultDbPath = () => {
|
||||
return process.env.PLANT_DB_PATH || path.join(__dirname, '..', 'data', 'greenlns.sqlite');
|
||||
};
|
||||
|
||||
const ensureDbDirectory = (dbPath) => {
|
||||
const directory = path.dirname(dbPath);
|
||||
fs.mkdirSync(directory, { recursive: true });
|
||||
};
|
||||
|
||||
const openDatabase = (dbPath = getDefaultDbPath()) => {
|
||||
ensureDbDirectory(dbPath);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const db = new sqlite3.Database(dbPath, (error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve(db);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const closeDatabase = (db) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.close((error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const run = (db, sql, params = []) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.run(sql, params, function onRun(error) {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve({
|
||||
lastId: this.lastID,
|
||||
changes: this.changes,
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const get = (db, sql, params = []) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.get(sql, params, (error, row) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve(row || null);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const all = (db, sql, params = []) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
db.all(sql, params, (error, rows) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve(rows || []);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
all,
|
||||
closeDatabase,
|
||||
get,
|
||||
getDefaultDbPath,
|
||||
openDatabase,
|
||||
run,
|
||||
};
|
||||
module.exports = require('./postgres');
|
||||
|
||||
@@ -29,6 +29,8 @@ const getClient = () => {
|
||||
});
|
||||
};
|
||||
|
||||
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||
|
||||
const ensureStorageBucket = async () => {
|
||||
const client = getClient();
|
||||
const exists = await client.bucketExists(MINIO_BUCKET);
|
||||
@@ -50,6 +52,28 @@ const ensureStorageBucket = async () => {
|
||||
}
|
||||
};
|
||||
|
||||
const ensureStorageBucketWithRetry = async (options = {}) => {
|
||||
const attempts = Number(options.attempts || 5);
|
||||
const delayMs = Number(options.delayMs || 2000);
|
||||
|
||||
let lastError;
|
||||
for (let attempt = 1; attempt <= attempts; attempt += 1) {
|
||||
try {
|
||||
await ensureStorageBucket();
|
||||
return;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
if (attempt === attempts) break;
|
||||
console.warn(
|
||||
`MinIO bucket setup attempt ${attempt}/${attempts} failed: ${error.message}. Retrying in ${delayMs}ms...`,
|
||||
);
|
||||
await sleep(delayMs);
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError;
|
||||
};
|
||||
|
||||
const uploadImage = async (base64Data, contentType = 'image/jpeg') => {
|
||||
const client = getClient();
|
||||
const rawExtension = contentType.split('/')[1] || 'jpg';
|
||||
@@ -67,6 +91,7 @@ const uploadImage = async (base64Data, contentType = 'image/jpeg') => {
|
||||
|
||||
module.exports = {
|
||||
ensureStorageBucket,
|
||||
ensureStorageBucketWithRetry,
|
||||
uploadImage,
|
||||
isStorageConfigured,
|
||||
};
|
||||
|
||||
1427
server/package-lock.json
generated
1427
server/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -18,8 +18,8 @@
|
||||
"dotenv": "^17.3.1",
|
||||
"express": "^5.2.1",
|
||||
"minio": "^8.0.5",
|
||||
"pg": "^8.16.3",
|
||||
"sharp": "^0.34.5",
|
||||
"sqlite3": "^5.1.7",
|
||||
"stripe": "^20.3.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -287,7 +287,7 @@ const convertToWebp = async (inputBuffer, outputPath) => {
|
||||
const updatePlantImageUri = async (db, plantId, localImageUri) => {
|
||||
await run(
|
||||
db,
|
||||
'UPDATE plants SET imageUri = ?, imageStatus = ?, updatedAt = datetime(\'now\') WHERE id = ?',
|
||||
'UPDATE plants SET image_uri = $1, image_status = $2, updated_at = NOW() WHERE id = $3',
|
||||
[localImageUri, 'ok', plantId],
|
||||
);
|
||||
};
|
||||
@@ -404,9 +404,9 @@ const main = async () => {
|
||||
await ensurePlantSchema(db);
|
||||
const plants = await all(
|
||||
db,
|
||||
`SELECT id, name, botanicalName, imageUri
|
||||
`SELECT id, name, botanical_name AS "botanicalName", image_uri AS "imageUri"
|
||||
FROM plants
|
||||
ORDER BY name COLLATE NOCASE ASC`,
|
||||
ORDER BY LOWER(name) ASC`,
|
||||
);
|
||||
|
||||
console.log(`Preparing ${plants.length} plant images...`);
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
import * as SecureStore from 'expo-secure-store';
|
||||
import { AuthDb } from './database';
|
||||
import { getConfiguredBackendRootUrl } from '../utils/backendUrl';
|
||||
|
||||
const SESSION_KEY = 'greenlens_session_v3';
|
||||
const BACKEND_URL = (
|
||||
process.env.EXPO_PUBLIC_BACKEND_URL ||
|
||||
process.env.EXPO_PUBLIC_PAYMENT_SERVER_URL ||
|
||||
''
|
||||
).trim();
|
||||
|
||||
export interface AuthSession {
|
||||
userId: number; // local SQLite id (for plants/settings queries)
|
||||
@@ -24,8 +20,9 @@ const clearStoredSession = async (): Promise<void> => {
|
||||
};
|
||||
|
||||
const authPost = async (path: string, body: object): Promise<{ userId: string; email: string; name: string; token: string }> => {
|
||||
const hasBackendUrl = Boolean(BACKEND_URL);
|
||||
const url = hasBackendUrl ? `${BACKEND_URL}${path}` : path;
|
||||
const backendUrl = getConfiguredBackendRootUrl();
|
||||
const hasBackendUrl = Boolean(backendUrl);
|
||||
const url = hasBackendUrl ? `${backendUrl}${path}` : path;
|
||||
let response: Response;
|
||||
try {
|
||||
response = await fetch(url, {
|
||||
@@ -107,9 +104,10 @@ export const AuthService = {
|
||||
async validateWithServer(): Promise<'valid' | 'invalid' | 'unreachable'> {
|
||||
const session = await this.getSession();
|
||||
if (!session) return 'invalid';
|
||||
if (!BACKEND_URL) return 'unreachable';
|
||||
const backendUrl = getConfiguredBackendRootUrl();
|
||||
if (!backendUrl) return 'unreachable';
|
||||
try {
|
||||
const response = await fetch(`${BACKEND_URL}/v1/billing/summary`, {
|
||||
const response = await fetch(`${backendUrl}/v1/billing/summary`, {
|
||||
headers: { Authorization: `Bearer ${session.token}` },
|
||||
});
|
||||
if (response.status === 401 || response.status === 403) return 'invalid';
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
HealthCheckResponse,
|
||||
PurchaseProductId,
|
||||
RevenueCatCustomerInfo,
|
||||
RevenueCatSyncSource,
|
||||
ScanPlantResponse,
|
||||
SemanticSearchResponse,
|
||||
ServiceHealthResponse,
|
||||
@@ -16,8 +17,8 @@ import {
|
||||
import { getAuthToken } from './userIdentityService';
|
||||
import { mockBackendService } from './mockBackendService';
|
||||
import { CareInfo, Language } from '../../types';
|
||||
import { getConfiguredBackendRootUrl } from '../../utils/backendUrl';
|
||||
|
||||
const BACKEND_BASE_URL = (process.env.EXPO_PUBLIC_BACKEND_URL || process.env.EXPO_PUBLIC_PAYMENT_SERVER_URL || '').trim();
|
||||
const REQUEST_TIMEOUT_MS = 15000;
|
||||
|
||||
const mapHttpStatusToErrorCode = (status: number): BackendErrorCode => {
|
||||
@@ -29,7 +30,9 @@ const mapHttpStatusToErrorCode = (status: number): BackendErrorCode => {
|
||||
};
|
||||
|
||||
const buildBackendUrl = (path: string): string => {
|
||||
const base = BACKEND_BASE_URL.replace(/\/$/, '');
|
||||
const backendBaseUrl = getConfiguredBackendRootUrl();
|
||||
if (!backendBaseUrl) return path;
|
||||
const base = backendBaseUrl.replace(/\/$/, '');
|
||||
return `${base}${path}`;
|
||||
};
|
||||
|
||||
@@ -105,7 +108,7 @@ const makeRequest = async <T,>(
|
||||
|
||||
export const backendApiClient = {
|
||||
getServiceHealth: async (): Promise<ServiceHealthResponse> => {
|
||||
if (!BACKEND_BASE_URL) {
|
||||
if (!getConfiguredBackendRootUrl()) {
|
||||
return {
|
||||
ok: true,
|
||||
uptimeSec: 0,
|
||||
@@ -113,7 +116,6 @@ export const backendApiClient = {
|
||||
openAiConfigured: Boolean(process.env.EXPO_PUBLIC_OPENAI_API_KEY),
|
||||
dbReady: true,
|
||||
dbPath: 'in-app-mock-backend',
|
||||
stripeConfigured: Boolean(process.env.EXPO_PUBLIC_STRIPE_PUBLISHABLE_KEY),
|
||||
scanModel: (process.env.EXPO_PUBLIC_OPENAI_SCAN_MODEL || 'gpt-5').trim(),
|
||||
healthModel: (process.env.EXPO_PUBLIC_OPENAI_HEALTH_MODEL || process.env.EXPO_PUBLIC_OPENAI_SCAN_MODEL || 'gpt-5').trim(),
|
||||
};
|
||||
@@ -128,7 +130,7 @@ export const backendApiClient = {
|
||||
|
||||
getBillingSummary: async (): Promise<BillingSummary> => {
|
||||
const token = await getAuthToken();
|
||||
if (!BACKEND_BASE_URL) {
|
||||
if (!getConfiguredBackendRootUrl()) {
|
||||
return mockBackendService.getBillingSummary(token);
|
||||
}
|
||||
|
||||
@@ -140,12 +142,14 @@ export const backendApiClient = {
|
||||
|
||||
syncRevenueCatState: async (params: {
|
||||
customerInfo: RevenueCatCustomerInfo;
|
||||
source?: RevenueCatSyncSource;
|
||||
}): Promise<SyncRevenueCatStateResponse> => {
|
||||
const token = await getAuthToken();
|
||||
if (!BACKEND_BASE_URL) {
|
||||
if (!getConfiguredBackendRootUrl()) {
|
||||
return mockBackendService.syncRevenueCatState({
|
||||
userId: token,
|
||||
customerInfo: params.customerInfo,
|
||||
source: params.source,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -154,6 +158,7 @@ export const backendApiClient = {
|
||||
token,
|
||||
body: {
|
||||
customerInfo: params.customerInfo,
|
||||
source: params.source,
|
||||
},
|
||||
});
|
||||
},
|
||||
@@ -164,7 +169,7 @@ export const backendApiClient = {
|
||||
language: Language;
|
||||
}): Promise<ScanPlantResponse> => {
|
||||
const token = await getAuthToken();
|
||||
if (!BACKEND_BASE_URL) {
|
||||
if (!getConfiguredBackendRootUrl()) {
|
||||
return mockBackendService.scanPlant({
|
||||
userId: token,
|
||||
idempotencyKey: params.idempotencyKey,
|
||||
@@ -190,7 +195,7 @@ export const backendApiClient = {
|
||||
language: Language;
|
||||
}): Promise<SemanticSearchResponse> => {
|
||||
const token = await getAuthToken();
|
||||
if (!BACKEND_BASE_URL) {
|
||||
if (!getConfiguredBackendRootUrl()) {
|
||||
return mockBackendService.semanticSearch({
|
||||
userId: token,
|
||||
idempotencyKey: params.idempotencyKey,
|
||||
@@ -222,7 +227,7 @@ export const backendApiClient = {
|
||||
};
|
||||
}): Promise<HealthCheckResponse> => {
|
||||
const token = await getAuthToken();
|
||||
if (!BACKEND_BASE_URL) {
|
||||
if (!getConfiguredBackendRootUrl()) {
|
||||
return mockBackendService.healthCheck({
|
||||
userId: token,
|
||||
idempotencyKey: params.idempotencyKey,
|
||||
@@ -249,7 +254,7 @@ export const backendApiClient = {
|
||||
productId: PurchaseProductId;
|
||||
}): Promise<SimulatePurchaseResponse> => {
|
||||
const token = await getAuthToken();
|
||||
if (!BACKEND_BASE_URL) {
|
||||
if (!getConfiguredBackendRootUrl()) {
|
||||
return mockBackendService.simulatePurchase({
|
||||
userId: token,
|
||||
idempotencyKey: params.idempotencyKey,
|
||||
@@ -273,7 +278,7 @@ export const backendApiClient = {
|
||||
payload?: { credits?: number };
|
||||
}): Promise<SimulateWebhookResponse> => {
|
||||
const token = await getAuthToken();
|
||||
if (!BACKEND_BASE_URL) {
|
||||
if (!getConfiguredBackendRootUrl()) {
|
||||
return mockBackendService.simulateWebhook({
|
||||
userId: token,
|
||||
idempotencyKey: params.idempotencyKey,
|
||||
|
||||
@@ -119,7 +119,6 @@ export interface ServiceHealthResponse {
|
||||
openAiConfigured: boolean;
|
||||
dbReady?: boolean;
|
||||
dbPath?: string;
|
||||
stripeConfigured?: boolean;
|
||||
scanModel?: string;
|
||||
healthModel?: string;
|
||||
}
|
||||
@@ -149,6 +148,12 @@ export interface SimulateWebhookResponse {
|
||||
billing: BillingSummary;
|
||||
}
|
||||
|
||||
export type RevenueCatSyncSource =
|
||||
| 'app_init'
|
||||
| 'subscription_purchase'
|
||||
| 'topup_purchase'
|
||||
| 'restore';
|
||||
|
||||
export interface SyncRevenueCatStateResponse {
|
||||
billing: BillingSummary;
|
||||
syncedAt: string;
|
||||
|
||||
@@ -8,7 +8,9 @@ import {
|
||||
PlanId,
|
||||
PurchaseProductId,
|
||||
RevenueCatCustomerInfo,
|
||||
RevenueCatEntitlementInfo,
|
||||
RevenueCatNonSubscriptionTransaction,
|
||||
RevenueCatSyncSource,
|
||||
ScanPlantRequest,
|
||||
ScanPlantResponse,
|
||||
SemanticSearchRequest,
|
||||
@@ -46,11 +48,12 @@ const TOPUP_CREDITS_BY_PRODUCT: Record<PurchaseProductId, number> = {
|
||||
monthly_pro: 0,
|
||||
yearly_pro: 0,
|
||||
topup_small: 25,
|
||||
topup_medium: 75,
|
||||
topup_large: 200,
|
||||
topup_medium: 120,
|
||||
topup_large: 300,
|
||||
};
|
||||
|
||||
const REVENUECAT_PRO_ENTITLEMENT_ID = (process.env.EXPO_PUBLIC_REVENUECAT_PRO_ENTITLEMENT_ID || 'pro').trim() || 'pro';
|
||||
const SUPPORTED_REVENUECAT_SUBSCRIPTION_PRODUCTS = new Set<PurchaseProductId>(['monthly_pro', 'yearly_pro']);
|
||||
|
||||
interface MockAccountRecord {
|
||||
userId: string;
|
||||
@@ -244,6 +247,42 @@ const normalizeRevenueCatTransactions = (
|
||||
return Object.values(nonSubscriptions).flatMap((entries) => Array.isArray(entries) ? entries : []);
|
||||
};
|
||||
|
||||
const summarizeRevenueCatCustomerInfo = (customerInfo: RevenueCatCustomerInfo) => {
|
||||
const activeEntitlements = customerInfo?.entitlements?.active || {};
|
||||
return {
|
||||
appUserId: customerInfo?.appUserId ?? null,
|
||||
originalAppUserId: customerInfo?.originalAppUserId ?? null,
|
||||
activeEntitlements: Object.entries(activeEntitlements).map(([id, entitlement]) => ({
|
||||
id,
|
||||
productIdentifier: entitlement?.productIdentifier ?? null,
|
||||
expirationDate: entitlement?.expirationDate || entitlement?.expiresDate || null,
|
||||
})),
|
||||
allPurchasedProductIdentifiers: customerInfo?.allPurchasedProductIdentifiers ?? [],
|
||||
nonSubscriptionTransactions: normalizeRevenueCatTransactions(customerInfo).map((transaction) => ({
|
||||
productIdentifier: transaction?.productIdentifier ?? null,
|
||||
transactionIdentifier: transaction?.transactionIdentifier || transaction?.transactionId || null,
|
||||
})),
|
||||
};
|
||||
};
|
||||
|
||||
const getValidProEntitlement = (customerInfo: RevenueCatCustomerInfo): RevenueCatEntitlementInfo | null => {
|
||||
const activeEntitlements = customerInfo?.entitlements?.active || {};
|
||||
const proEntitlement = activeEntitlements[REVENUECAT_PRO_ENTITLEMENT_ID];
|
||||
if (!proEntitlement) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (
|
||||
proEntitlement.productIdentifier
|
||||
&& SUPPORTED_REVENUECAT_SUBSCRIPTION_PRODUCTS.has(proEntitlement.productIdentifier as PurchaseProductId)
|
||||
) {
|
||||
return proEntitlement;
|
||||
}
|
||||
|
||||
console.warn('[Billing][Mock] Ignoring unsupported RevenueCat pro entitlement', summarizeRevenueCatCustomerInfo(customerInfo));
|
||||
return null;
|
||||
};
|
||||
|
||||
const readIdempotentResponse = <T,>(store: IdempotencyStore, key: string): T | null => {
|
||||
const record = store[key];
|
||||
if (!record) return null;
|
||||
@@ -652,17 +691,25 @@ export const mockBackendService = {
|
||||
syncRevenueCatState: async (request: {
|
||||
userId: string;
|
||||
customerInfo: RevenueCatCustomerInfo;
|
||||
source?: RevenueCatSyncSource;
|
||||
}): Promise<SyncRevenueCatStateResponse> => {
|
||||
return withUserLock(request.userId, async () => {
|
||||
const stores = await loadStores();
|
||||
const account = getOrCreateAccount(stores, request.userId);
|
||||
const activeEntitlements = request.customerInfo?.entitlements?.active || {};
|
||||
const proEntitlement = activeEntitlements[REVENUECAT_PRO_ENTITLEMENT_ID];
|
||||
const proEntitlement = getValidProEntitlement(request.customerInfo);
|
||||
const source = request.source || 'app_init';
|
||||
|
||||
console.log('[Billing][Mock] Syncing RevenueCat customer info', {
|
||||
source,
|
||||
customerInfo: summarizeRevenueCatCustomerInfo(request.customerInfo),
|
||||
});
|
||||
|
||||
if (source !== 'topup_purchase') {
|
||||
account.plan = proEntitlement ? 'pro' : 'free';
|
||||
account.provider = 'revenuecat';
|
||||
account.monthlyAllowance = getMonthlyAllowanceForPlan(account.plan, account.userId);
|
||||
account.renewsAt = proEntitlement?.expirationDate || proEntitlement?.expiresDate || null;
|
||||
}
|
||||
|
||||
for (const transaction of normalizeRevenueCatTransactions(request.customerInfo)) {
|
||||
const productId = transaction.productIdentifier as PurchaseProductId | undefined;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { IdentificationResult, Language } from '../types';
|
||||
import { resolveImageUri, tryResolveImageUri } from '../utils/imageUri';
|
||||
import { getConfiguredApiBaseUrl } from '../utils/backendUrl';
|
||||
import { getConfiguredApiBaseUrl, getConfiguredBackendRootUrl } from '../utils/backendUrl';
|
||||
import { backendApiClient } from './backend/backendApiClient';
|
||||
import { BackendDatabaseEntry, isBackendApiError } from './backend/contracts';
|
||||
import { createIdempotencyKey } from '../utils/idempotency';
|
||||
@@ -26,14 +26,7 @@ export interface SemanticSearchResult {
|
||||
|
||||
const DEFAULT_SEARCH_LIMIT = 500;
|
||||
|
||||
const hasConfiguredPlantBackend = (): boolean => Boolean(
|
||||
String(
|
||||
process.env.EXPO_PUBLIC_API_URL
|
||||
|| process.env.EXPO_PUBLIC_BACKEND_URL
|
||||
|| process.env.EXPO_PUBLIC_PAYMENT_SERVER_URL
|
||||
|| '',
|
||||
).trim(),
|
||||
);
|
||||
const hasConfiguredPlantBackend = (): boolean => Boolean(getConfiguredBackendRootUrl());
|
||||
|
||||
const normalizeImageStatus = (status?: string, imageUri?: string): 'ok' | 'missing' | 'invalid' => {
|
||||
if (status === 'ok' || status === 'missing' || status === 'invalid') return status;
|
||||
|
||||
@@ -26,6 +26,19 @@ export const getConfiguredApiBaseUrl = (): string => {
|
||||
return DEFAULT_API_BASE_URL;
|
||||
};
|
||||
|
||||
export const getConfiguredBackendRootUrl = (): string => {
|
||||
const explicitApiUrl = normalizeHttpUrl(process.env.EXPO_PUBLIC_API_URL);
|
||||
if (explicitApiUrl) {
|
||||
return explicitApiUrl.endsWith('/api')
|
||||
? explicitApiUrl.slice(0, -4).replace(/\/+$/, '')
|
||||
: explicitApiUrl;
|
||||
}
|
||||
|
||||
return normalizeHttpUrl(
|
||||
process.env.EXPO_PUBLIC_BACKEND_URL || process.env.EXPO_PUBLIC_PAYMENT_SERVER_URL,
|
||||
) || '';
|
||||
};
|
||||
|
||||
export const getConfiguredAssetBaseUrl = (): string => {
|
||||
const apiBaseUrl = getConfiguredApiBaseUrl();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user