diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..cdcdc3b --- /dev/null +++ b/.env.example @@ -0,0 +1,14 @@ +# Database configuration +DATABASE_URL=postgresql+asyncpg://postgres:postgres@db:5432/yuntu_kol +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_DB=yuntu_kol + +# Brand API +BRAND_API_BASE_URL=https://api.internal.intelligrow.cn + +# Frontend API URL (for production) +NEXT_PUBLIC_API_URL=http://localhost:8000/api/v1 + +# CORS configuration +CORS_ORIGINS=http://localhost:3000,http://frontend:3000 diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..72a3c83 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,30 @@ +FROM python:3.11-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements and install Python dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY app/ ./app/ + +# Create non-root user +RUN useradd -m appuser && chown -R appuser:appuser /app +USER appuser + +# Expose port +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# Start application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/backend/app/api/v1/export.py b/backend/app/api/v1/export.py index e65d870..15bd528 100644 --- a/backend/app/api/v1/export.py +++ b/backend/app/api/v1/export.py @@ -1,13 +1,15 @@ from datetime import datetime from typing import Literal -from fastapi import APIRouter, Query -from fastapi.responses import StreamingResponse +from fastapi import APIRouter, Query, HTTPException +from fastapi.responses import StreamingResponse, JSONResponse from io import BytesIO from app.services.export_service import generate_excel, generate_csv +from app.core.logging import get_logger router = APIRouter() +logger = get_logger(__name__) # 存储最近的查询结果 (简化实现, 生产环境应使用 Redis 等缓存) _cached_data: list = [] @@ -37,23 +39,40 @@ async def export_data( Returns: 文件下载响应 """ - data = get_export_data() + try: + data = get_export_data() - timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + if not data: + logger.warning("Export requested but no data available") + return JSONResponse( + status_code=400, + content={"success": False, "error": "无数据可导出,请先执行查询"} + ) - if format == "xlsx": - content = generate_excel(data) - media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" - filename = f"kol_data_{timestamp}.xlsx" - else: - content = generate_csv(data) - media_type = "text/csv; charset=utf-8" - filename = f"kol_data_{timestamp}.csv" + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + logger.info(f"Exporting {len(data)} records as {format}") - return StreamingResponse( - BytesIO(content), - media_type=media_type, - headers={ - "Content-Disposition": f'attachment; filename="{filename}"', - }, - ) + if format == "xlsx": + content = generate_excel(data) + media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + filename = f"kol_data_{timestamp}.xlsx" + else: + content = generate_csv(data) + media_type = "text/csv; charset=utf-8" + filename = f"kol_data_{timestamp}.csv" + + logger.info(f"Export successful: {filename}") + return StreamingResponse( + BytesIO(content), + media_type=media_type, + headers={ + "Content-Disposition": f'attachment; filename="{filename}"', + }, + ) + + except Exception as e: + logger.error(f"Export error: {e}") + return JSONResponse( + status_code=500, + content={"success": False, "error": "导出失败,请重试"} + ) diff --git a/backend/app/api/v1/query.py b/backend/app/api/v1/query.py index ae16c3b..9dfdebc 100644 --- a/backend/app/api/v1/query.py +++ b/backend/app/api/v1/query.py @@ -1,5 +1,6 @@ from fastapi import APIRouter, Depends from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.exc import SQLAlchemyError from app.database import get_db from app.schemas.query import QueryRequest, QueryResponse, VideoData @@ -7,8 +8,10 @@ from app.services.query_service import query_videos from app.services.calculator import calculate_metrics from app.services.brand_api import get_brand_names from app.api.v1.export import set_export_data +from app.core.logging import get_logger router = APIRouter() +logger = get_logger(__name__) @router.post("/query", response_model=QueryResponse) @@ -26,14 +29,22 @@ async def query( """ try: # 1. 查询数据库 + logger.info(f"Querying videos: type={request.type}, count={len(request.values)}") videos = await query_videos(db, request.type, request.values) if not videos: + logger.info("No videos found for query") return QueryResponse(success=True, data=[], total=0) # 2. 提取品牌ID并批量获取品牌名称 brand_ids = [v.brand_id for v in videos if v.brand_id] - brand_map = await get_brand_names(brand_ids) if brand_ids else {} + brand_map = {} + if brand_ids: + try: + brand_map = await get_brand_names(brand_ids) + except Exception as brand_err: + logger.warning(f"Failed to fetch brand names, using fallback: {brand_err}") + # 降级处理:使用 brand_id 作为名称 # 3. 转换为响应模型并计算指标 data = [] @@ -60,7 +71,17 @@ async def query( # 缓存数据供导出使用 set_export_data([d.model_dump() for d in data]) + logger.info(f"Query successful: {len(data)} videos found") return QueryResponse(success=True, data=data, total=len(data)) + except SQLAlchemyError as db_err: + logger.error(f"Database error: {db_err}") + return QueryResponse( + success=False, + data=[], + total=0, + error="数据库连接失败,请稍后重试" + ) except Exception as e: + logger.error(f"Query error: {e}") return QueryResponse(success=False, data=[], total=0, error=str(e)) diff --git a/backend/app/core/logging.py b/backend/app/core/logging.py new file mode 100644 index 0000000..a49efa4 --- /dev/null +++ b/backend/app/core/logging.py @@ -0,0 +1,20 @@ +import logging +import sys + +def setup_logging(): + """Configure application logging.""" + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + handlers=[ + logging.StreamHandler(sys.stdout), + ], + ) + + # Reduce noise from external libraries + logging.getLogger("httpx").setLevel(logging.WARNING) + logging.getLogger("sqlalchemy").setLevel(logging.WARNING) + +def get_logger(name: str) -> logging.Logger: + """Get a logger instance.""" + return logging.getLogger(name) diff --git a/backend/tests/test_error_handling.py b/backend/tests/test_error_handling.py new file mode 100644 index 0000000..d2f20a2 --- /dev/null +++ b/backend/tests/test_error_handling.py @@ -0,0 +1,163 @@ +import pytest +from unittest.mock import patch, AsyncMock, MagicMock +from httpx import AsyncClient, ASGITransport +from sqlalchemy.exc import SQLAlchemyError + +from app.main import app +from app.models import KolVideo +from app.api.v1.export import set_export_data, get_export_data + + +class TestErrorHandling: + """Tests for error handling scenarios.""" + + @pytest.fixture + async def client(self, override_get_db): + """Create test client with dependency override.""" + transport = ASGITransport(app=app) + async with AsyncClient(transport=transport, base_url="http://test") as ac: + yield ac + + @pytest.fixture + async def seed_data(self, test_session, sample_video_data): + """Seed test data.""" + data = sample_video_data.copy() + data["item_id"] = "error_test_001" + data["star_id"] = "error_star_001" + video = KolVideo(**data) + test_session.add(video) + await test_session.commit() + return video + + # Query API error handling tests + + @patch("app.api.v1.query.query_videos") + @patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock) + async def test_query_database_error( + self, mock_brand, mock_query, client + ): + """Test query returns error on database failure.""" + mock_brand.return_value = {} + mock_query.side_effect = SQLAlchemyError("Database connection failed") + + response = await client.post( + "/api/v1/query", + json={"type": "star_id", "values": ["test_id"]}, + ) + assert response.status_code == 200 + data = response.json() + assert data["success"] is False + assert "数据库连接失败" in data["error"] + + @patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock) + async def test_query_brand_api_failure_fallback( + self, mock_brand, client, test_session, seed_data + ): + """Test query continues with fallback when brand API fails.""" + mock_brand.side_effect = Exception("Brand API timeout") + + response = await client.post( + "/api/v1/query", + json={"type": "star_id", "values": ["error_star_001"]}, + ) + assert response.status_code == 200 + data = response.json() + # Should still succeed, brand name falls back to brand_id + assert data["success"] is True + assert data["total"] >= 0 + + @patch("app.api.v1.query.query_videos") + @patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock) + async def test_query_generic_error( + self, mock_brand, mock_query, client + ): + """Test query returns error on unexpected exception.""" + mock_brand.return_value = {} + mock_query.side_effect = Exception("Unexpected error") + + response = await client.post( + "/api/v1/query", + json={"type": "star_id", "values": ["test_id"]}, + ) + assert response.status_code == 200 + data = response.json() + assert data["success"] is False + assert "Unexpected error" in data["error"] + + # Export API error handling tests + + async def test_export_no_data_error(self, client): + """Test export returns error when no data is cached.""" + # Clear cached data + set_export_data([]) + + response = await client.get("/api/v1/export?format=xlsx") + assert response.status_code == 400 + data = response.json() + assert data["success"] is False + assert "无数据可导出" in data["error"] + + async def test_export_with_data_success(self, client, sample_video_data): + """Test export succeeds when data is cached.""" + # Set cached data + set_export_data([sample_video_data]) + + response = await client.get("/api/v1/export?format=xlsx") + assert response.status_code == 200 + assert "application/vnd.openxmlformats" in response.headers["content-type"] + + async def test_export_csv_with_data_success(self, client, sample_video_data): + """Test CSV export succeeds when data is cached.""" + set_export_data([sample_video_data]) + + response = await client.get("/api/v1/export?format=csv") + assert response.status_code == 200 + assert "text/csv" in response.headers["content-type"] + + @patch("app.api.v1.export.generate_excel") + async def test_export_generation_error( + self, mock_generate, client, sample_video_data + ): + """Test export returns error when file generation fails.""" + mock_generate.side_effect = Exception("Excel generation failed") + set_export_data([sample_video_data]) + + response = await client.get("/api/v1/export?format=xlsx") + assert response.status_code == 500 + data = response.json() + assert data["success"] is False + assert "导出失败" in data["error"] + + # Input validation tests + + async def test_query_validation_empty_values(self, client): + """Test query returns 422 for empty values.""" + response = await client.post( + "/api/v1/query", + json={"type": "star_id", "values": []}, + ) + assert response.status_code == 422 + + async def test_query_validation_invalid_type(self, client): + """Test query returns 422 for invalid query type.""" + response = await client.post( + "/api/v1/query", + json={"type": "invalid_type", "values": ["test"]}, + ) + assert response.status_code == 422 + + async def test_query_validation_missing_type(self, client): + """Test query returns 422 for missing type field.""" + response = await client.post( + "/api/v1/query", + json={"values": ["test"]}, + ) + assert response.status_code == 422 + + async def test_query_validation_missing_values(self, client): + """Test query returns 422 for missing values field.""" + response = await client.post( + "/api/v1/query", + json={"type": "star_id"}, + ) + assert response.status_code == 422 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..9506310 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,61 @@ +version: '3.8' + +services: + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + ports: + - "3000:3000" + environment: + - NEXT_PUBLIC_API_URL=http://backend:8000/api/v1 + depends_on: + - backend + networks: + - kol-network + + backend: + build: + context: ./backend + dockerfile: Dockerfile + ports: + - "8000:8000" + environment: + - DATABASE_URL=${DATABASE_URL} + - CORS_ORIGINS=http://localhost:3000,http://frontend:3000 + - BRAND_API_BASE_URL=${BRAND_API_BASE_URL:-https://api.internal.intelligrow.cn} + depends_on: + - db + networks: + - kol-network + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 10s + + db: + image: postgres:14-alpine + ports: + - "5432:5432" + environment: + - POSTGRES_USER=${POSTGRES_USER:-postgres} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres} + - POSTGRES_DB=${POSTGRES_DB:-yuntu_kol} + volumes: + - postgres_data:/var/lib/postgresql/data + networks: + - kol-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + +networks: + kol-network: + driver: bridge + +volumes: + postgres_data: diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..9b33422 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,41 @@ +FROM node:20-alpine AS builder + +WORKDIR /app + +# Install pnpm +RUN corepack enable && corepack prepare pnpm@latest --activate + +# Copy package files +COPY package.json pnpm-lock.yaml ./ + +# Install dependencies +RUN pnpm install --frozen-lockfile + +# Copy application code +COPY . . + +# Build application +RUN pnpm build + +# Production image +FROM node:20-alpine AS runner + +WORKDIR /app + +# Create non-root user +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs + +# Copy built files +COPY --from=builder /app/public ./public +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static + +USER nextjs + +EXPOSE 3000 + +ENV PORT 3000 +ENV HOSTNAME "0.0.0.0" + +CMD ["node", "server.js"] diff --git a/frontend/next.config.mjs b/frontend/next.config.mjs index 4678774..e25a6a2 100644 --- a/frontend/next.config.mjs +++ b/frontend/next.config.mjs @@ -1,4 +1,6 @@ /** @type {import('next').NextConfig} */ -const nextConfig = {}; +const nextConfig = { + output: 'standalone', +}; export default nextConfig;