Add production deployment config, Alembic migration, switch to Haiku
- Production Docker Compose with Caddy reverse proxy, Gunicorn, Nginx - Multi-stage frontend build for production - Deploy script and automated database backup script - Initial Alembic migration with all tables - Switch recommendation model from Sonnet to Haiku for cost efficiency
This commit is contained in:
19
Caddyfile
Normal file
19
Caddyfile
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
{$DOMAIN:localhost} {
|
||||||
|
# Frontend
|
||||||
|
handle {
|
||||||
|
reverse_proxy frontend:80
|
||||||
|
}
|
||||||
|
|
||||||
|
# API
|
||||||
|
handle /api/* {
|
||||||
|
reverse_proxy backend:8000
|
||||||
|
}
|
||||||
|
|
||||||
|
# Security headers
|
||||||
|
header {
|
||||||
|
X-Content-Type-Options nosniff
|
||||||
|
X-Frame-Options DENY
|
||||||
|
Referrer-Policy strict-origin-when-cross-origin
|
||||||
|
-Server
|
||||||
|
}
|
||||||
|
}
|
||||||
16
backend/Dockerfile.prod
Normal file
16
backend/Dockerfile.prod
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends gcc libpq-dev && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt gunicorn
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN alembic upgrade head 2>/dev/null || true
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
CMD ["sh", "-c", "alembic upgrade head && gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000"]
|
||||||
90
backend/alembic/versions/001_initial.py
Normal file
90
backend/alembic/versions/001_initial.py
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
"""Initial schema
|
||||||
|
|
||||||
|
Revision ID: 001
|
||||||
|
Revises:
|
||||||
|
Create Date: 2026-03-30
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
revision: str = "001"
|
||||||
|
down_revision: Union[str, None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table(
|
||||||
|
"users",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True),
|
||||||
|
sa.Column("email", sa.String(255), unique=True, index=True, nullable=False),
|
||||||
|
sa.Column("name", sa.String(255), nullable=False),
|
||||||
|
sa.Column("hashed_password", sa.String(255), nullable=True),
|
||||||
|
sa.Column("is_pro", sa.Boolean(), default=False, nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("spotify_id", sa.String(255), unique=True, nullable=True),
|
||||||
|
sa.Column("spotify_access_token", sa.Text(), nullable=True),
|
||||||
|
sa.Column("spotify_refresh_token", sa.Text(), nullable=True),
|
||||||
|
sa.Column("stripe_customer_id", sa.String(255), unique=True, nullable=True),
|
||||||
|
sa.Column("stripe_subscription_id", sa.String(255), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"playlists",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True),
|
||||||
|
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), index=True, nullable=False),
|
||||||
|
sa.Column("name", sa.String(500), nullable=False),
|
||||||
|
sa.Column("platform_source", sa.String(50), nullable=False),
|
||||||
|
sa.Column("external_id", sa.String(255), nullable=True),
|
||||||
|
sa.Column("track_count", sa.Integer(), default=0, nullable=False),
|
||||||
|
sa.Column("taste_profile", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("imported_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"tracks",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True),
|
||||||
|
sa.Column("playlist_id", sa.Integer(), sa.ForeignKey("playlists.id", ondelete="CASCADE"), index=True, nullable=False),
|
||||||
|
sa.Column("title", sa.String(500), nullable=False),
|
||||||
|
sa.Column("artist", sa.String(500), nullable=False),
|
||||||
|
sa.Column("album", sa.String(500), nullable=True),
|
||||||
|
sa.Column("spotify_id", sa.String(255), nullable=True),
|
||||||
|
sa.Column("isrc", sa.String(20), nullable=True),
|
||||||
|
sa.Column("preview_url", sa.String(500), nullable=True),
|
||||||
|
sa.Column("image_url", sa.String(500), nullable=True),
|
||||||
|
sa.Column("tempo", sa.Float(), nullable=True),
|
||||||
|
sa.Column("energy", sa.Float(), nullable=True),
|
||||||
|
sa.Column("danceability", sa.Float(), nullable=True),
|
||||||
|
sa.Column("valence", sa.Float(), nullable=True),
|
||||||
|
sa.Column("acousticness", sa.Float(), nullable=True),
|
||||||
|
sa.Column("instrumentalness", sa.Float(), nullable=True),
|
||||||
|
sa.Column("genres", sa.JSON(), nullable=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"recommendations",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True),
|
||||||
|
sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), index=True, nullable=False),
|
||||||
|
sa.Column("playlist_id", sa.Integer(), sa.ForeignKey("playlists.id", ondelete="SET NULL"), nullable=True),
|
||||||
|
sa.Column("title", sa.String(500), nullable=False),
|
||||||
|
sa.Column("artist", sa.String(500), nullable=False),
|
||||||
|
sa.Column("album", sa.String(500), nullable=True),
|
||||||
|
sa.Column("spotify_id", sa.String(255), nullable=True),
|
||||||
|
sa.Column("preview_url", sa.String(500), nullable=True),
|
||||||
|
sa.Column("image_url", sa.String(500), nullable=True),
|
||||||
|
sa.Column("reason", sa.Text(), nullable=False),
|
||||||
|
sa.Column("score", sa.Float(), nullable=True),
|
||||||
|
sa.Column("query", sa.Text(), nullable=True),
|
||||||
|
sa.Column("saved", sa.Boolean(), default=False, nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_table("recommendations")
|
||||||
|
op.drop_table("tracks")
|
||||||
|
op.drop_table("playlists")
|
||||||
|
op.drop_table("users")
|
||||||
@@ -135,7 +135,7 @@ Return ONLY the JSON array, no other text."""
|
|||||||
# Call Claude API
|
# Call Claude API
|
||||||
client = anthropic.Anthropic(api_key=settings.ANTHROPIC_API_KEY)
|
client = anthropic.Anthropic(api_key=settings.ANTHROPIC_API_KEY)
|
||||||
message = client.messages.create(
|
message = client.messages.create(
|
||||||
model="claude-sonnet-4-20250514",
|
model="claude-haiku-4-5-20251001",
|
||||||
max_tokens=2000,
|
max_tokens=2000,
|
||||||
messages=[{"role": "user", "content": prompt}],
|
messages=[{"role": "user", "content": prompt}],
|
||||||
)
|
)
|
||||||
|
|||||||
20
backup.sh
Executable file
20
backup.sh
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Vynl database backup script
|
||||||
|
# Run via cron: 0 3 * * * /path/to/backup.sh
|
||||||
|
|
||||||
|
BACKUP_DIR="/backups/vynl"
|
||||||
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
KEEP_DAYS=14
|
||||||
|
|
||||||
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
|
# Dump database from running container
|
||||||
|
docker compose -f docker-compose.prod.yml exec -T db \
|
||||||
|
pg_dump -U vynl vynl | gzip > "${BACKUP_DIR}/vynl_${TIMESTAMP}.sql.gz"
|
||||||
|
|
||||||
|
# Remove backups older than KEEP_DAYS
|
||||||
|
find "$BACKUP_DIR" -name "vynl_*.sql.gz" -mtime +${KEEP_DAYS} -delete
|
||||||
|
|
||||||
|
echo "Backup complete: vynl_${TIMESTAMP}.sql.gz"
|
||||||
33
deploy.sh
Executable file
33
deploy.sh
Executable file
@@ -0,0 +1,33 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Vynl production deployment script
|
||||||
|
# Usage: ./deploy.sh [domain]
|
||||||
|
# Example: ./deploy.sh vynl.app
|
||||||
|
|
||||||
|
DOMAIN=${1:-localhost}
|
||||||
|
|
||||||
|
echo "=== Deploying Vynl to ${DOMAIN} ==="
|
||||||
|
|
||||||
|
# Check .env exists
|
||||||
|
if [ ! -f backend/.env ]; then
|
||||||
|
echo "ERROR: backend/.env not found. Copy backend/.env.example and fill in your values."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set domain for Caddy
|
||||||
|
export DOMAIN
|
||||||
|
export DB_PASSWORD=$(grep POSTGRES_PASSWORD backend/.env 2>/dev/null || echo "vynl")
|
||||||
|
|
||||||
|
# Build and start
|
||||||
|
docker compose -f docker-compose.prod.yml build
|
||||||
|
docker compose -f docker-compose.prod.yml up -d
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Vynl deployed ==="
|
||||||
|
echo "URL: https://${DOMAIN}"
|
||||||
|
echo ""
|
||||||
|
echo "Useful commands:"
|
||||||
|
echo " docker compose -f docker-compose.prod.yml logs -f # View logs"
|
||||||
|
echo " docker compose -f docker-compose.prod.yml down # Stop"
|
||||||
|
echo " docker compose -f docker-compose.prod.yml restart # Restart"
|
||||||
51
docker-compose.prod.yml
Normal file
51
docker-compose.prod.yml
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
services:
|
||||||
|
db:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: vynl
|
||||||
|
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||||
|
POSTGRES_DB: vynl
|
||||||
|
volumes:
|
||||||
|
- pgdata:/var/lib/postgresql/data
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
backend:
|
||||||
|
build:
|
||||||
|
context: ./backend
|
||||||
|
dockerfile: Dockerfile.prod
|
||||||
|
env_file: ./backend/.env
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
- redis
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
frontend:
|
||||||
|
build:
|
||||||
|
context: ./frontend
|
||||||
|
dockerfile: Dockerfile.prod
|
||||||
|
args:
|
||||||
|
VITE_API_URL: ${VITE_API_URL:-}
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
caddy:
|
||||||
|
image: caddy:2-alpine
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
- "443:443"
|
||||||
|
volumes:
|
||||||
|
- ./Caddyfile:/etc/caddy/Caddyfile
|
||||||
|
- caddy_data:/data
|
||||||
|
- caddy_config:/config
|
||||||
|
depends_on:
|
||||||
|
- backend
|
||||||
|
- frontend
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
pgdata:
|
||||||
|
caddy_data:
|
||||||
|
caddy_config:
|
||||||
18
frontend/Dockerfile.prod
Normal file
18
frontend/Dockerfile.prod
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
FROM node:22-alpine AS build
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
|
FROM nginx:alpine
|
||||||
|
|
||||||
|
COPY --from=build /app/dist /usr/share/nginx/html
|
||||||
|
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
CMD ["nginx", "-g", "daemon off;"]
|
||||||
14
frontend/nginx.conf
Normal file
14
frontend/nginx.conf
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
index index.html;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
try_files $uri $uri/ /index.html;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /assets/ {
|
||||||
|
expires 1y;
|
||||||
|
add_header Cache-Control "public, immutable";
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user