Compare commits
No commits in common. "376f0be6b4fe43d9218f1e7e7ca6aa9da6bdf8af" and "ac0f086821c55b2cef6badc0cdff8c218b68404b" have entirely different histories.
376f0be6b4
...
ac0f086821
14
.env.example
14
.env.example
@ -1,14 +0,0 @@
|
||||
# Database configuration
|
||||
DATABASE_URL=postgresql+asyncpg://postgres:postgres@db:5432/yuntu_kol
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=yuntu_kol
|
||||
|
||||
# Brand API
|
||||
BRAND_API_BASE_URL=https://api.internal.intelligrow.cn
|
||||
|
||||
# Frontend API URL (for production)
|
||||
NEXT_PUBLIC_API_URL=http://localhost:8000/api/v1
|
||||
|
||||
# CORS configuration
|
||||
CORS_ORIGINS=http://localhost:3000,http://frontend:3000
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -47,7 +47,3 @@ Thumbs.db
|
||||
.eggs/
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
|
||||
|
||||
temp/
|
||||
84
CLAUDE.md
84
CLAUDE.md
@ -15,7 +15,6 @@ KOL Insight 是一个 KOL(关键意见领袖)数据查询与分析工具,
|
||||
- **前端**: Next.js 14.x (App Router) + React + TypeScript + Tailwind CSS
|
||||
- **后端**: Python FastAPI 0.104+ + SQLAlchemy 2.0+ (异步 ORM) + asyncpg
|
||||
- **数据库**: PostgreSQL 14.x+
|
||||
- **包管理**: uv (pyproject.toml)
|
||||
- **部署**: Docker + Uvicorn (ASGI 服务器)
|
||||
|
||||
## 常用命令
|
||||
@ -45,49 +44,28 @@ pnpm type-check # 如果配置了此脚本
|
||||
### 后端开发
|
||||
|
||||
```bash
|
||||
# 安装依赖(使用 uv)
|
||||
uv sync
|
||||
|
||||
# 仅安装生产依赖
|
||||
uv sync --no-dev
|
||||
# 安装依赖
|
||||
pip install -r requirements.txt
|
||||
# 或使用 Poetry
|
||||
poetry install
|
||||
|
||||
# 开发模式运行(热重载)
|
||||
uv run uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
|
||||
uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
|
||||
|
||||
# 生产模式运行
|
||||
uv run uvicorn app.main:app --host 0.0.0.0 --port 8000 --workers 4
|
||||
uvicorn app.main:app --host 0.0.0.0 --port 8000 --workers 4
|
||||
|
||||
# 运行测试(TDD 必须)
|
||||
uv run pytest
|
||||
pytest
|
||||
|
||||
# 运行测试并生成覆盖率报告
|
||||
uv run pytest --cov=app --cov-report=html
|
||||
pytest --cov=app --cov-report=html
|
||||
|
||||
# 运行特定测试文件
|
||||
uv run pytest tests/test_query_service.py
|
||||
pytest tests/test_query_service.py
|
||||
|
||||
# 运行特定测试函数
|
||||
uv run pytest tests/test_query_service.py::test_query_by_star_id
|
||||
|
||||
# 添加新依赖
|
||||
uv add <package-name>
|
||||
|
||||
# 添加开发依赖
|
||||
uv add --group dev <package-name>
|
||||
|
||||
# 类型检查(修改后端代码后必须执行)
|
||||
uv run basedpyright
|
||||
|
||||
# 代码风格检查(修改后端代码后必须执行)
|
||||
uv run ruff check app/
|
||||
```
|
||||
|
||||
### 后端代码质量检查
|
||||
|
||||
**修改后端 Python 文件后,必须执行以下检查:**
|
||||
|
||||
```bash
|
||||
uv run basedpyright && uv run ruff check app/
|
||||
pytest tests/test_query_service.py::test_query_by_star_id
|
||||
```
|
||||
|
||||
### 数据库操作
|
||||
@ -97,13 +75,13 @@ uv run basedpyright && uv run ruff check app/
|
||||
psql "postgresql://user:password@host:5432/yuntu_kol"
|
||||
|
||||
# 创建迁移
|
||||
uv run alembic revision --autogenerate -m "description"
|
||||
alembic revision --autogenerate -m "description"
|
||||
|
||||
# 执行迁移
|
||||
uv run alembic upgrade head
|
||||
alembic upgrade head
|
||||
|
||||
# 回滚迁移
|
||||
uv run alembic downgrade -1
|
||||
alembic downgrade -1
|
||||
```
|
||||
|
||||
### Docker 部署
|
||||
@ -425,7 +403,7 @@ kol-insight/
|
||||
│ │ ├── api/v1/ # API 路由
|
||||
│ │ └── services/ # 业务逻辑
|
||||
│ ├── tests/ # 测试文件(TDD 必须)
|
||||
│ └── pyproject.toml
|
||||
│ └── requirements.txt
|
||||
│
|
||||
├── doc/ # 项目文档
|
||||
│ ├── PRD.md # 产品需求文档
|
||||
@ -472,12 +450,12 @@ kol-insight/
|
||||
|
||||
### Q: 如何运行单个测试?
|
||||
```bash
|
||||
uv run pytest tests/test_calculator.py::test_calculate_natural_cpm -v
|
||||
pytest tests/test_calculator.py::test_calculate_natural_cpm -v
|
||||
```
|
||||
|
||||
### Q: 如何查看测试覆盖率?
|
||||
```bash
|
||||
uv run pytest --cov=app --cov-report=html
|
||||
pytest --cov=app --cov-report=html
|
||||
# 打开 htmlcov/index.html 查看详细报告
|
||||
```
|
||||
|
||||
@ -507,33 +485,3 @@ export async function queryVideos(request: QueryRequest): Promise<QueryResponse>
|
||||
\d kol_videos
|
||||
-- 应该看到 idx_star_id, idx_star_unique_id, idx_star_nickname
|
||||
```
|
||||
|
||||
## 前后端数据结构一致性
|
||||
|
||||
**重要**:后端返回的 JSON 字段名必须与前端 TypeScript 类型定义完全匹配。
|
||||
- 前端类型定义位置:`frontend/src/types/index.ts`
|
||||
- 修改后端响应结构时,务必同步检查前端类型定义
|
||||
- 常见错误:`Cannot read properties of undefined` 通常是字段名不匹配
|
||||
|
||||
## 外部 API 参数格式
|
||||
|
||||
### 云图 API (GetContentMaterialAnalysisInfo)
|
||||
- 日期格式:`YYYYMMDD`(不是 `YYYY-MM-DD`)
|
||||
- industry_id:数组格式 `["20"]`(不是字符串)
|
||||
- Cookie:直接使用 `sessionid=xxx` 格式
|
||||
|
||||
### 品牌 API
|
||||
- URL 格式:`/v1/yuntu/brands?brand_id=xxx`(查询参数,非路径参数)
|
||||
- 认证:`Authorization: Bearer {token}`
|
||||
|
||||
## 前端常见问题
|
||||
|
||||
- **Next.js 模块错误**:清理缓存 `rm -rf .next node_modules/.cache && pnpm build`
|
||||
- **Ant Design Modal 文字无法复制**:需要多层修复:
|
||||
1. Modal: `styles={{ body: { userSelect: 'text' } }}`
|
||||
2. Descriptions: 添加 `contentStyle={{ userSelect: 'text', cursor: 'text' }}`
|
||||
3. globals.css: 添加 `.ant-descriptions td * { user-select: text !important; }`
|
||||
- **Next.js layout.tsx 不能加 'use client'**:因为导出 metadata,需创建单独的 Provider 组件(如 AntdProvider.tsx)
|
||||
- **Ant Design v6 ConfigProvider**:v6 不支持 `theme.cssVar` 和 `theme.hashed`,直接用 `<ConfigProvider locale={zhCN}>` 即可
|
||||
- **前端性能优化**:使用 `useMemo` 包裹 columns、`useCallback` 包裹事件处理器、`memo` 包裹子组件
|
||||
- **CORS 400 错误**:检查后端 `CORSMiddleware` 配置的 `allow_origins`
|
||||
|
||||
@ -6,7 +6,3 @@ CORS_ORIGINS=["http://localhost:3000"]
|
||||
|
||||
# 品牌 API 配置
|
||||
BRAND_API_BASE_URL=https://api.internal.intelligrow.cn
|
||||
BRAND_API_TOKEN=your_brand_api_token_here
|
||||
|
||||
# 云图 API 配置 (SessionID池服务)
|
||||
YUNTU_API_TOKEN=your_yuntu_api_token_here
|
||||
|
||||
@ -1,32 +0,0 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
libpq-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy dependency files and install
|
||||
COPY pyproject.toml uv.lock ./
|
||||
RUN uv sync --frozen --no-dev --no-install-project
|
||||
|
||||
# Copy application code
|
||||
COPY app/ ./app/
|
||||
|
||||
# Create non-root user
|
||||
RUN useradd -m appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Start application
|
||||
CMD ["uv", "run", "uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
@ -1,78 +0,0 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import APIRouter, Query, HTTPException
|
||||
from fastapi.responses import StreamingResponse, JSONResponse
|
||||
from io import BytesIO
|
||||
|
||||
from app.services.export_service import generate_excel, generate_csv
|
||||
from app.core.logging import get_logger
|
||||
|
||||
router = APIRouter()
|
||||
logger = get_logger(__name__)
|
||||
|
||||
# 存储最近的查询结果 (简化实现, 生产环境应使用 Redis 等缓存)
|
||||
_cached_data: list = []
|
||||
|
||||
|
||||
def set_export_data(data: list):
|
||||
"""设置导出数据缓存."""
|
||||
global _cached_data
|
||||
_cached_data = data
|
||||
|
||||
|
||||
def get_export_data() -> list:
|
||||
"""获取导出数据缓存."""
|
||||
return _cached_data
|
||||
|
||||
|
||||
@router.get("/export")
|
||||
async def export_data(
|
||||
format: Literal["xlsx", "csv"] = Query("xlsx", description="导出格式"),
|
||||
):
|
||||
"""
|
||||
导出查询结果.
|
||||
|
||||
Args:
|
||||
format: 导出格式 (xlsx 或 csv)
|
||||
|
||||
Returns:
|
||||
文件下载响应
|
||||
"""
|
||||
try:
|
||||
data = get_export_data()
|
||||
|
||||
if not data:
|
||||
logger.warning("Export requested but no data available")
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content={"success": False, "error": "无数据可导出,请先执行查询"}
|
||||
)
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
logger.info(f"Exporting {len(data)} records as {format}")
|
||||
|
||||
if format == "xlsx":
|
||||
content = generate_excel(data)
|
||||
media_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
filename = f"kol_data_{timestamp}.xlsx"
|
||||
else:
|
||||
content = generate_csv(data)
|
||||
media_type = "text/csv; charset=utf-8"
|
||||
filename = f"kol_data_{timestamp}.csv"
|
||||
|
||||
logger.info(f"Export successful: {filename}")
|
||||
return StreamingResponse(
|
||||
BytesIO(content),
|
||||
media_type=media_type,
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="{filename}"',
|
||||
},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Export error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"success": False, "error": "导出失败,请重试"}
|
||||
)
|
||||
@ -1,87 +0,0 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from app.database import get_db
|
||||
from app.schemas.query import QueryRequest, QueryResponse, VideoData
|
||||
from app.services.query_service import query_videos
|
||||
from app.services.calculator import calculate_metrics
|
||||
from app.services.brand_api import get_brand_names
|
||||
from app.api.v1.export import set_export_data
|
||||
from app.core.logging import get_logger
|
||||
|
||||
router = APIRouter()
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
@router.post("/query", response_model=QueryResponse)
|
||||
async def query(
|
||||
request: QueryRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> QueryResponse:
|
||||
"""
|
||||
批量查询 KOL 视频数据.
|
||||
|
||||
支持三种查询方式:
|
||||
- star_id: 按星图ID精准匹配
|
||||
- unique_id: 按达人unique_id精准匹配
|
||||
- nickname: 按达人昵称模糊匹配
|
||||
"""
|
||||
try:
|
||||
# 1. 查询数据库
|
||||
logger.info(f"Querying videos: type={request.type}, count={len(request.values)}")
|
||||
videos = await query_videos(db, request.type, request.values)
|
||||
|
||||
if not videos:
|
||||
logger.info("No videos found for query")
|
||||
return QueryResponse(success=True, data=[], total=0)
|
||||
|
||||
# 2. 提取品牌ID并批量获取品牌名称
|
||||
brand_ids = [v.brand_id for v in videos if v.brand_id]
|
||||
brand_map = {}
|
||||
if brand_ids:
|
||||
try:
|
||||
brand_map = await get_brand_names(brand_ids)
|
||||
except Exception as brand_err:
|
||||
logger.warning(f"Failed to fetch brand names, using fallback: {brand_err}")
|
||||
# 降级处理:使用 brand_id 作为名称
|
||||
|
||||
# 3. 转换为响应模型并计算指标
|
||||
data = []
|
||||
for video in videos:
|
||||
video_data = VideoData.model_validate(video)
|
||||
|
||||
# 填充品牌名称
|
||||
if video.brand_id:
|
||||
video_data.brand_name = brand_map.get(video.brand_id, video.brand_id)
|
||||
|
||||
# 计算预估指标
|
||||
metrics = calculate_metrics(
|
||||
estimated_video_cost=video.estimated_video_cost,
|
||||
natural_play_cnt=video.natural_play_cnt,
|
||||
total_play_cnt=video.total_play_cnt,
|
||||
after_view_search_uv=video.after_view_search_uv,
|
||||
)
|
||||
video_data.estimated_natural_cpm = metrics["estimated_natural_cpm"]
|
||||
video_data.estimated_natural_search_uv = metrics["estimated_natural_search_uv"]
|
||||
video_data.estimated_natural_search_cost = metrics["estimated_natural_search_cost"]
|
||||
|
||||
data.append(video_data)
|
||||
|
||||
# 缓存数据供导出使用
|
||||
set_export_data([d.model_dump() for d in data])
|
||||
|
||||
logger.info(f"Query successful: {len(data)} videos found")
|
||||
return QueryResponse(success=True, data=data, total=len(data))
|
||||
|
||||
except SQLAlchemyError as db_err:
|
||||
logger.error(f"Database error: {db_err}")
|
||||
return QueryResponse(
|
||||
success=False,
|
||||
data=[],
|
||||
total=0,
|
||||
error="数据库连接失败,请稍后重试"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Query error: {e}")
|
||||
return QueryResponse(success=False, data=[], total=0, error=str(e))
|
||||
@ -1,175 +0,0 @@
|
||||
"""
|
||||
视频分析API路由 (T-024)
|
||||
|
||||
GET /api/v1/videos/{item_id}/analysis - 单个视频分析
|
||||
POST /api/v1/videos/search - 搜索视频列表(支持 star_id / nickname)
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.services.video_analysis import (
|
||||
get_video_analysis_data,
|
||||
get_video_base_info,
|
||||
search_videos_by_star_id,
|
||||
search_videos_by_unique_id,
|
||||
search_videos_by_nickname,
|
||||
get_video_list_with_a3,
|
||||
)
|
||||
from app.services.yuntu_api import YuntuAPIError
|
||||
|
||||
router = APIRouter(prefix="/videos", tags=["视频分析"])
|
||||
|
||||
|
||||
class SearchRequest(BaseModel):
|
||||
"""搜索请求"""
|
||||
type: str # "star_id" | "unique_id" | "nickname"
|
||||
value: str
|
||||
|
||||
|
||||
class VideoListItem(BaseModel):
|
||||
"""视频列表项"""
|
||||
item_id: str
|
||||
title: str
|
||||
star_nickname: str
|
||||
star_unique_id: str
|
||||
create_date: Optional[str]
|
||||
hot_type: str
|
||||
total_play_cnt: int
|
||||
total_new_a3_cnt: int
|
||||
total_cost: float
|
||||
|
||||
|
||||
@router.get("/{item_id}/analysis")
|
||||
async def get_video_analysis(
|
||||
item_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
获取单个视频分析数据。
|
||||
|
||||
返回6大类指标:
|
||||
- 基础信息 (8字段)
|
||||
- 触达指标 (7字段)
|
||||
- A3指标 (3字段)
|
||||
- 搜索指标 (5字段)
|
||||
- 费用指标 (3字段)
|
||||
- 成本指标 (6字段,计算得出)
|
||||
|
||||
Args:
|
||||
item_id: 视频ID
|
||||
|
||||
Returns:
|
||||
视频分析数据
|
||||
|
||||
Raises:
|
||||
404: 视频不存在
|
||||
500: API调用失败
|
||||
"""
|
||||
try:
|
||||
result = await get_video_analysis_data(db, item_id)
|
||||
return {
|
||||
"success": True,
|
||||
"data": result,
|
||||
}
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except YuntuAPIError as e:
|
||||
# API失败但有降级数据时不抛错
|
||||
raise HTTPException(status_code=500, detail=f"API Error: {e.message}")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/search")
|
||||
async def search_videos(
|
||||
request: SearchRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
搜索视频列表。
|
||||
|
||||
支持三种搜索方式(均返回列表,点击详情查看完整数据):
|
||||
- star_id: 星图ID精准匹配
|
||||
- unique_id: 达人unique_id精准匹配
|
||||
- nickname: 达人昵称模糊匹配
|
||||
|
||||
Args:
|
||||
request: 搜索请求,包含 type 和 value
|
||||
|
||||
Returns:
|
||||
视频列表(含A3数据和成本指标)
|
||||
"""
|
||||
try:
|
||||
if request.type == "star_id":
|
||||
# 星图ID查询,返回视频列表
|
||||
videos = await search_videos_by_star_id(db, request.value)
|
||||
if not videos:
|
||||
return {
|
||||
"success": True,
|
||||
"type": "list",
|
||||
"data": [],
|
||||
"total": 0,
|
||||
}
|
||||
|
||||
# 获取 A3 数据
|
||||
result = await get_video_list_with_a3(db, videos)
|
||||
return {
|
||||
"success": True,
|
||||
"type": "list",
|
||||
"data": result,
|
||||
"total": len(result),
|
||||
}
|
||||
|
||||
elif request.type == "unique_id":
|
||||
# 达人unique_id查询,返回视频列表
|
||||
videos = await search_videos_by_unique_id(db, request.value)
|
||||
if not videos:
|
||||
return {
|
||||
"success": True,
|
||||
"type": "list",
|
||||
"data": [],
|
||||
"total": 0,
|
||||
}
|
||||
|
||||
# 获取 A3 数据
|
||||
result = await get_video_list_with_a3(db, videos)
|
||||
return {
|
||||
"success": True,
|
||||
"type": "list",
|
||||
"data": result,
|
||||
"total": len(result),
|
||||
}
|
||||
|
||||
elif request.type == "nickname":
|
||||
# 昵称模糊查询,返回视频列表
|
||||
videos = await search_videos_by_nickname(db, request.value)
|
||||
if not videos:
|
||||
return {
|
||||
"success": True,
|
||||
"type": "list",
|
||||
"data": [],
|
||||
"total": 0,
|
||||
}
|
||||
|
||||
# 获取 A3 数据
|
||||
result = await get_video_list_with_a3(db, videos)
|
||||
return {
|
||||
"success": True,
|
||||
"type": "list",
|
||||
"data": result,
|
||||
"total": len(result),
|
||||
}
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid search type: {request.type}")
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except YuntuAPIError as e:
|
||||
raise HTTPException(status_code=500, detail=f"API Error: {e.message}")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Internal error: {str(e)}")
|
||||
@ -8,7 +8,6 @@ class Settings(BaseSettings):
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore", # 忽略额外的环境变量
|
||||
)
|
||||
|
||||
# Database
|
||||
@ -19,17 +18,11 @@ class Settings(BaseSettings):
|
||||
|
||||
# Brand API
|
||||
BRAND_API_BASE_URL: str = "https://api.internal.intelligrow.cn"
|
||||
BRAND_API_TOKEN: str = "" # Bearer Token for Brand API authentication
|
||||
|
||||
# Yuntu API (for SessionID pool)
|
||||
YUNTU_API_TOKEN: str = "" # Bearer Token for Yuntu Cookie API
|
||||
YUNTU_AADVID: str = "1648829117232140" # 广告主ID,用于巨量云图API调用
|
||||
|
||||
# API Settings
|
||||
MAX_QUERY_LIMIT: int = 1000
|
||||
BRAND_API_TIMEOUT: float = 3.0
|
||||
BRAND_API_CONCURRENCY: int = 10
|
||||
YUNTU_API_TIMEOUT: float = 10.0 # 巨量云图API超时
|
||||
|
||||
|
||||
settings = Settings()
|
||||
|
||||
@ -1,20 +0,0 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
def setup_logging():
|
||||
"""Configure application logging."""
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
handlers=[
|
||||
logging.StreamHandler(sys.stdout),
|
||||
],
|
||||
)
|
||||
|
||||
# Reduce noise from external libraries
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||
logging.getLogger("sqlalchemy").setLevel(logging.WARNING)
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""Get a logger instance."""
|
||||
return logging.getLogger(name)
|
||||
@ -2,7 +2,6 @@ from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from app.config import settings
|
||||
from app.api.v1 import query, export, video_analysis
|
||||
|
||||
app = FastAPI(
|
||||
title="KOL Insight API",
|
||||
@ -19,11 +18,6 @@ app.add_middleware(
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# 注册 API 路由
|
||||
app.include_router(query.router, prefix="/api/v1", tags=["Query"])
|
||||
app.include_router(export.router, prefix="/api/v1", tags=["Export"])
|
||||
app.include_router(video_analysis.router, prefix="/api/v1", tags=["VideoAnalysis"])
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
|
||||
@ -1,119 +1,52 @@
|
||||
from sqlalchemy import Column, String, Integer, Float, DateTime, BigInteger, Boolean, Date, Text
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy import Column, String, Integer, Float, DateTime, Index
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class KolVideo(Base):
|
||||
"""KOL 视频数据模型 - 映射真实数据库表 yuntu_industry_kol_records."""
|
||||
"""KOL 视频数据模型."""
|
||||
|
||||
__tablename__ = "yuntu_industry_kol_records"
|
||||
__tablename__ = "kol_videos"
|
||||
|
||||
# 主键
|
||||
item_id = Column(String, primary_key=True)
|
||||
|
||||
# 基础信息
|
||||
title = Column(String, nullable=True)
|
||||
video_url = Column(Text, nullable=True)
|
||||
vid = Column(String, nullable=True)
|
||||
video_duration = Column(Float, nullable=True)
|
||||
create_date = Column(Date, nullable=True)
|
||||
data_date = Column(Date, nullable=True)
|
||||
|
||||
# 达人信息
|
||||
viral_type = Column(String, nullable=True)
|
||||
video_url = Column(String, nullable=True)
|
||||
star_id = Column(String, nullable=False)
|
||||
star_unique_id = Column(String, nullable=False)
|
||||
star_nickname = Column(String, nullable=False)
|
||||
star_uid = Column(String, nullable=True)
|
||||
star_fans_cnt = Column(BigInteger, nullable=True)
|
||||
star_mcn = Column(String, nullable=True)
|
||||
|
||||
# 热度类型
|
||||
hot_type = Column(String, nullable=True) # 映射为 viral_type
|
||||
is_hot = Column(Boolean, nullable=True)
|
||||
has_cart = Column(Boolean, nullable=True)
|
||||
publish_time = Column(DateTime, nullable=True)
|
||||
|
||||
# 曝光指标
|
||||
natural_play_cnt = Column(BigInteger, default=0)
|
||||
heated_play_cnt = Column(BigInteger, default=0)
|
||||
total_play_cnt = Column(BigInteger, default=0)
|
||||
natural_play_cnt = Column(Integer, default=0)
|
||||
heated_play_cnt = Column(Integer, default=0)
|
||||
total_play_cnt = Column(Integer, default=0)
|
||||
|
||||
# 互动指标
|
||||
total_interaction_cnt = Column(BigInteger, default=0) # 映射为 total_interact
|
||||
natural_interaction_cnt = Column(BigInteger, default=0)
|
||||
heated_interaction_cnt = Column(BigInteger, default=0)
|
||||
digg_cnt = Column(BigInteger, default=0) # 映射为 like_cnt
|
||||
share_cnt = Column(BigInteger, default=0)
|
||||
comment_cnt = Column(BigInteger, default=0)
|
||||
play_over_cnt = Column(BigInteger, default=0)
|
||||
play_over_rate = Column(Float, nullable=True)
|
||||
total_interact = Column(Integer, default=0)
|
||||
like_cnt = Column(Integer, default=0)
|
||||
share_cnt = Column(Integer, default=0)
|
||||
comment_cnt = Column(Integer, default=0)
|
||||
|
||||
# 搜索效果指标
|
||||
back_search_cnt = Column(BigInteger, default=0) # 映射为 return_search_cnt
|
||||
back_search_uv = Column(BigInteger, default=0)
|
||||
after_view_search_cnt = Column(BigInteger, default=0)
|
||||
after_view_search_uv = Column(BigInteger, default=0)
|
||||
after_view_search_rate = Column(Float, nullable=True)
|
||||
|
||||
# A3 指标
|
||||
# 效果指标
|
||||
new_a3_rate = Column(Float, nullable=True)
|
||||
total_new_a3_cnt = Column(BigInteger, default=0)
|
||||
natural_new_a3_cnt = Column(BigInteger, default=0)
|
||||
heated_new_a3_cnt = Column(BigInteger, default=0)
|
||||
|
||||
# 成本指标
|
||||
total_cost = Column(Float, nullable=True)
|
||||
heated_cost = Column(Float, nullable=True)
|
||||
star_task_cost = Column(Float, nullable=True)
|
||||
search_cost = Column(Float, nullable=True)
|
||||
ad_hot_roi = Column(Float, nullable=True)
|
||||
estimated_video_cost = Column(Float, default=0)
|
||||
price_under_20s = Column(BigInteger, nullable=True)
|
||||
price_20_60s = Column(BigInteger, nullable=True)
|
||||
price_over_60s = Column(BigInteger, nullable=True)
|
||||
after_view_search_uv = Column(Integer, default=0)
|
||||
return_search_cnt = Column(Integer, default=0)
|
||||
|
||||
# 商业信息
|
||||
industry_id = Column(String, nullable=True)
|
||||
industry_name = Column(String, nullable=True)
|
||||
brand_id = Column(String, nullable=True)
|
||||
order_id = Column(String, nullable=True)
|
||||
estimated_video_cost = Column(Float, default=0)
|
||||
|
||||
# JSON 字段
|
||||
content_type = Column(JSONB, nullable=True)
|
||||
industry_tags = Column(JSONB, nullable=True)
|
||||
ad_hot_type = Column(JSONB, nullable=True)
|
||||
trend = Column(JSONB, nullable=True)
|
||||
trend_daily = Column(JSONB, nullable=True)
|
||||
trend_total = Column(JSONB, nullable=True)
|
||||
component_metric_list = Column(JSONB, nullable=True)
|
||||
key_word_after_search_infos = Column(JSONB, nullable=True)
|
||||
index_map = Column(JSONB, nullable=True)
|
||||
search_keywords = Column(JSONB, nullable=True)
|
||||
keywords = Column(JSONB, nullable=True)
|
||||
|
||||
# 时间戳
|
||||
created_at = Column(DateTime, nullable=True)
|
||||
updated_at = Column(DateTime, nullable=True)
|
||||
# 索引定义
|
||||
__table_args__ = (
|
||||
Index("idx_star_id", "star_id"),
|
||||
Index("idx_star_unique_id", "star_unique_id"),
|
||||
Index("idx_star_nickname", "star_nickname"),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<KolVideo(item_id={self.item_id}, title={self.title})>"
|
||||
|
||||
# 兼容属性 - 映射旧字段名到新字段名
|
||||
@property
|
||||
def viral_type(self):
|
||||
return self.hot_type
|
||||
|
||||
@property
|
||||
def total_interact(self):
|
||||
return self.total_interaction_cnt
|
||||
|
||||
@property
|
||||
def like_cnt(self):
|
||||
return self.digg_cnt
|
||||
|
||||
@property
|
||||
def return_search_cnt(self):
|
||||
return self.back_search_cnt
|
||||
|
||||
@property
|
||||
def publish_time(self):
|
||||
return self.create_date
|
||||
|
||||
@ -1,67 +0,0 @@
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from typing import List, Literal, Optional
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class QueryRequest(BaseModel):
|
||||
"""查询请求模型."""
|
||||
|
||||
type: Literal["star_id", "unique_id", "nickname"] = Field(
|
||||
..., description="查询类型: star_id, unique_id, nickname"
|
||||
)
|
||||
values: List[str] = Field(
|
||||
..., description="查询值列表 (批量ID 或单个昵称)", min_length=1
|
||||
)
|
||||
|
||||
|
||||
class VideoData(BaseModel):
|
||||
"""视频数据模型."""
|
||||
|
||||
# 基础信息
|
||||
item_id: str
|
||||
title: Optional[str] = None
|
||||
viral_type: Optional[str] = None
|
||||
video_url: Optional[str] = None
|
||||
star_id: str
|
||||
star_unique_id: str
|
||||
star_nickname: str
|
||||
publish_time: Optional[datetime] = None
|
||||
|
||||
# 曝光指标
|
||||
natural_play_cnt: int = 0
|
||||
heated_play_cnt: int = 0
|
||||
total_play_cnt: int = 0
|
||||
|
||||
# 互动指标
|
||||
total_interact: int = 0
|
||||
like_cnt: int = 0
|
||||
share_cnt: int = 0
|
||||
comment_cnt: int = 0
|
||||
|
||||
# 效果指标
|
||||
new_a3_rate: Optional[float] = None
|
||||
after_view_search_uv: int = 0
|
||||
return_search_cnt: int = 0
|
||||
|
||||
# 商业信息
|
||||
industry_id: Optional[str] = None
|
||||
industry_name: Optional[str] = None
|
||||
brand_id: Optional[str] = None
|
||||
brand_name: Optional[str] = None # 从品牌 API 获取
|
||||
estimated_video_cost: float = 0
|
||||
|
||||
# 计算字段
|
||||
estimated_natural_cpm: Optional[float] = None
|
||||
estimated_natural_search_uv: Optional[float] = None
|
||||
estimated_natural_search_cost: Optional[float] = None
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class QueryResponse(BaseModel):
|
||||
"""查询响应模型."""
|
||||
|
||||
success: bool = True
|
||||
data: List[VideoData] = []
|
||||
total: int = 0
|
||||
error: Optional[str] = None
|
||||
@ -1,95 +0,0 @@
|
||||
import asyncio
|
||||
from typing import Dict, List, Tuple
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
from app.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def fetch_brand_name(
|
||||
brand_id: str,
|
||||
semaphore: asyncio.Semaphore,
|
||||
) -> Tuple[str, str]:
|
||||
"""
|
||||
获取单个品牌名称.
|
||||
|
||||
Args:
|
||||
brand_id: 品牌ID
|
||||
semaphore: 并发控制信号量
|
||||
|
||||
Returns:
|
||||
(brand_id, brand_name) 元组, 失败时 brand_name 为 brand_id
|
||||
"""
|
||||
async with semaphore:
|
||||
try:
|
||||
# 构建请求头,包含 Bearer Token 认证 (T-020)
|
||||
headers = {}
|
||||
if settings.BRAND_API_TOKEN:
|
||||
headers["Authorization"] = f"Bearer {settings.BRAND_API_TOKEN}"
|
||||
|
||||
async with httpx.AsyncClient(
|
||||
timeout=settings.BRAND_API_TIMEOUT
|
||||
) as client:
|
||||
response = await client.get(
|
||||
f"{settings.BRAND_API_BASE_URL}/v1/yuntu/brands",
|
||||
params={"brand_id": brand_id},
|
||||
headers=headers,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
# T-019: 正确解析品牌API响应
|
||||
# 响应格式: {"total": 1, "data": [{"brand_id": xxx, "brand_name": "xxx"}]}
|
||||
if isinstance(data, dict):
|
||||
data_list = data.get("data", [])
|
||||
if isinstance(data_list, list) and len(data_list) > 0:
|
||||
first_item = data_list[0]
|
||||
if isinstance(first_item, dict):
|
||||
name = first_item.get("brand_name")
|
||||
if name:
|
||||
return brand_id, name
|
||||
except httpx.TimeoutException:
|
||||
logger.warning(f"Brand API timeout for brand_id: {brand_id}")
|
||||
except httpx.RequestError as e:
|
||||
logger.warning(f"Brand API request error for brand_id: {brand_id}, error: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error fetching brand {brand_id}: {e}")
|
||||
|
||||
# 失败时降级返回 brand_id
|
||||
return brand_id, brand_id
|
||||
|
||||
|
||||
async def get_brand_names(brand_ids: List[str]) -> Dict[str, str]:
|
||||
"""
|
||||
批量获取品牌名称.
|
||||
|
||||
Args:
|
||||
brand_ids: 品牌ID列表
|
||||
|
||||
Returns:
|
||||
brand_id -> brand_name 映射字典
|
||||
"""
|
||||
# 过滤空值并去重
|
||||
unique_ids = list(set(filter(None, brand_ids)))
|
||||
|
||||
if not unique_ids:
|
||||
return {}
|
||||
|
||||
# 创建并发控制信号量
|
||||
semaphore = asyncio.Semaphore(settings.BRAND_API_CONCURRENCY)
|
||||
|
||||
# 批量并发请求
|
||||
tasks = [fetch_brand_name(brand_id, semaphore) for brand_id in unique_ids]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
# 构建映射表
|
||||
brand_map: Dict[str, str] = {}
|
||||
for result in results:
|
||||
if isinstance(result, tuple):
|
||||
brand_id, brand_name = result
|
||||
brand_map[brand_id] = brand_name
|
||||
elif isinstance(result, Exception):
|
||||
logger.error(f"Error in batch brand fetch: {result}")
|
||||
|
||||
return brand_map
|
||||
@ -1,102 +0,0 @@
|
||||
from typing import Optional, Dict
|
||||
|
||||
|
||||
def calculate_natural_cpm(
|
||||
estimated_video_cost: float,
|
||||
natural_play_cnt: int,
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
计算预估自然CPM.
|
||||
|
||||
公式: estimated_video_cost / natural_play_cnt * 1000
|
||||
|
||||
Args:
|
||||
estimated_video_cost: 预估视频成本
|
||||
natural_play_cnt: 自然播放量
|
||||
|
||||
Returns:
|
||||
预估自然CPM (元/千次曝光), 除零时返回 None
|
||||
"""
|
||||
if natural_play_cnt <= 0:
|
||||
return None
|
||||
return round((estimated_video_cost / natural_play_cnt) * 1000, 2)
|
||||
|
||||
|
||||
def calculate_natural_search_uv(
|
||||
natural_play_cnt: int,
|
||||
total_play_cnt: int,
|
||||
after_view_search_uv: int,
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
计算预估自然看后搜人数.
|
||||
|
||||
公式: natural_play_cnt / total_play_cnt * after_view_search_uv
|
||||
|
||||
Args:
|
||||
natural_play_cnt: 自然播放量
|
||||
total_play_cnt: 总播放量
|
||||
after_view_search_uv: 看后搜人数
|
||||
|
||||
Returns:
|
||||
预估自然看后搜人数, 除零时返回 None
|
||||
"""
|
||||
if total_play_cnt <= 0:
|
||||
return None
|
||||
return round((natural_play_cnt / total_play_cnt) * after_view_search_uv, 2)
|
||||
|
||||
|
||||
def calculate_natural_search_cost(
|
||||
estimated_video_cost: float,
|
||||
estimated_natural_search_uv: Optional[float],
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
计算预估自然看后搜人数成本.
|
||||
|
||||
公式: estimated_video_cost / 预估自然看后搜人数
|
||||
|
||||
Args:
|
||||
estimated_video_cost: 预估视频成本
|
||||
estimated_natural_search_uv: 预估自然看后搜人数
|
||||
|
||||
Returns:
|
||||
预估自然看后搜人数成本 (元/人), 除零时返回 None
|
||||
"""
|
||||
if estimated_natural_search_uv is None or estimated_natural_search_uv <= 0:
|
||||
return None
|
||||
return round(estimated_video_cost / estimated_natural_search_uv, 2)
|
||||
|
||||
|
||||
def calculate_metrics(
|
||||
estimated_video_cost: float,
|
||||
natural_play_cnt: int,
|
||||
total_play_cnt: int,
|
||||
after_view_search_uv: int,
|
||||
) -> Dict[str, Optional[float]]:
|
||||
"""
|
||||
批量计算所有预估指标.
|
||||
|
||||
Args:
|
||||
estimated_video_cost: 预估视频成本
|
||||
natural_play_cnt: 自然播放量
|
||||
total_play_cnt: 总播放量
|
||||
after_view_search_uv: 看后搜人数
|
||||
|
||||
Returns:
|
||||
包含所有计算结果的字典
|
||||
"""
|
||||
# 计算 CPM
|
||||
cpm = calculate_natural_cpm(estimated_video_cost, natural_play_cnt)
|
||||
|
||||
# 计算看后搜人数
|
||||
search_uv = calculate_natural_search_uv(
|
||||
natural_play_cnt, total_play_cnt, after_view_search_uv
|
||||
)
|
||||
|
||||
# 计算看后搜成本
|
||||
search_cost = calculate_natural_search_cost(estimated_video_cost, search_uv)
|
||||
|
||||
return {
|
||||
"estimated_natural_cpm": cpm,
|
||||
"estimated_natural_search_uv": search_uv,
|
||||
"estimated_natural_search_cost": search_cost,
|
||||
}
|
||||
@ -1,97 +0,0 @@
|
||||
import csv
|
||||
from io import BytesIO, StringIO
|
||||
from typing import List, Dict, Any, Tuple
|
||||
from openpyxl import Workbook
|
||||
|
||||
# 列定义: (中文名, 字段名)
|
||||
COLUMN_HEADERS: List[Tuple[str, str]] = [
|
||||
("视频ID", "item_id"),
|
||||
("视频标题", "title"),
|
||||
("爆文类型", "viral_type"),
|
||||
("视频链接", "video_url"),
|
||||
("新增A3率", "new_a3_rate"),
|
||||
("看后搜人数", "after_view_search_uv"),
|
||||
("回搜次数", "return_search_cnt"),
|
||||
("自然曝光数", "natural_play_cnt"),
|
||||
("加热曝光数", "heated_play_cnt"),
|
||||
("总曝光数", "total_play_cnt"),
|
||||
("总互动", "total_interact"),
|
||||
("点赞", "like_cnt"),
|
||||
("转发", "share_cnt"),
|
||||
("评论", "comment_cnt"),
|
||||
("合作行业ID", "industry_id"),
|
||||
("合作行业", "industry_name"),
|
||||
("合作品牌ID", "brand_id"),
|
||||
("合作品牌", "brand_name"),
|
||||
("发布时间", "publish_time"),
|
||||
("达人昵称", "star_nickname"),
|
||||
("达人unique_id", "star_unique_id"),
|
||||
("预估视频价格", "estimated_video_cost"),
|
||||
("预估自然CPM", "estimated_natural_cpm"),
|
||||
("预估自然看后搜人数", "estimated_natural_search_uv"),
|
||||
("预估自然看后搜人数成本", "estimated_natural_search_cost"),
|
||||
]
|
||||
|
||||
|
||||
def format_value(value: Any) -> Any:
|
||||
"""格式化导出值."""
|
||||
if value is None:
|
||||
return ""
|
||||
return value
|
||||
|
||||
|
||||
def generate_excel(data: List[Dict[str, Any]]) -> bytes:
|
||||
"""
|
||||
生成 Excel 文件.
|
||||
|
||||
Args:
|
||||
data: 数据列表
|
||||
|
||||
Returns:
|
||||
Excel 文件的字节内容
|
||||
"""
|
||||
wb = Workbook()
|
||||
ws = wb.active
|
||||
ws.title = "KOL数据"
|
||||
|
||||
# 写入表头
|
||||
headers = [col[0] for col in COLUMN_HEADERS]
|
||||
ws.append(headers)
|
||||
|
||||
# 写入数据
|
||||
for row in data:
|
||||
row_data = [format_value(row.get(col[1])) for col in COLUMN_HEADERS]
|
||||
ws.append(row_data)
|
||||
|
||||
# 保存到内存
|
||||
output = BytesIO()
|
||||
wb.save(output)
|
||||
output.seek(0)
|
||||
return output.read()
|
||||
|
||||
|
||||
def generate_csv(data: List[Dict[str, Any]]) -> bytes:
|
||||
"""
|
||||
生成 CSV 文件.
|
||||
|
||||
Args:
|
||||
data: 数据列表
|
||||
|
||||
Returns:
|
||||
CSV 文件的字节内容 (UTF-8 BOM 编码)
|
||||
"""
|
||||
output = StringIO()
|
||||
writer = csv.writer(output, quoting=csv.QUOTE_MINIMAL)
|
||||
|
||||
# 写入表头
|
||||
headers = [col[0] for col in COLUMN_HEADERS]
|
||||
writer.writerow(headers)
|
||||
|
||||
# 写入数据
|
||||
for row in data:
|
||||
row_data = [format_value(row.get(col[1])) for col in COLUMN_HEADERS]
|
||||
writer.writerow(row_data)
|
||||
|
||||
# 返回 UTF-8 BOM 编码的内容 (Excel 可正确识别中文)
|
||||
content = output.getvalue()
|
||||
return ("\ufeff" + content).encode("utf-8")
|
||||
@ -1,42 +0,0 @@
|
||||
from typing import List, Literal
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models import KolVideo
|
||||
from app.config import settings
|
||||
|
||||
|
||||
async def query_videos(
|
||||
session: AsyncSession,
|
||||
query_type: Literal["star_id", "unique_id", "nickname"],
|
||||
values: List[str],
|
||||
) -> List[KolVideo]:
|
||||
"""
|
||||
查询 KOL 视频数据.
|
||||
|
||||
Args:
|
||||
session: 数据库会话
|
||||
query_type: 查询类型 (star_id, unique_id, nickname)
|
||||
values: 查询值列表
|
||||
|
||||
Returns:
|
||||
匹配的视频列表
|
||||
"""
|
||||
stmt = select(KolVideo)
|
||||
|
||||
if query_type == "star_id":
|
||||
# 精准匹配 star_id
|
||||
stmt = stmt.where(KolVideo.star_id.in_(values))
|
||||
elif query_type == "unique_id":
|
||||
# 精准匹配 star_unique_id
|
||||
stmt = stmt.where(KolVideo.star_unique_id.in_(values))
|
||||
elif query_type == "nickname":
|
||||
# 模糊匹配 star_nickname (使用第一个值)
|
||||
if values:
|
||||
stmt = stmt.where(KolVideo.star_nickname.like(f"%{values[0]}%"))
|
||||
|
||||
# 限制返回数量
|
||||
stmt = stmt.limit(settings.MAX_QUERY_LIMIT)
|
||||
|
||||
result = await session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
@ -1,292 +0,0 @@
|
||||
"""
|
||||
SessionID池服务 (T-021, T-027)
|
||||
|
||||
从内部API获取Cookie列表,随机选取 aadvid/auth_token 用于 API 调用。
|
||||
|
||||
T-027 修复:
|
||||
- 改为随机选取任意一组配置,不按 brand_id 匹配
|
||||
- auth_token 直接使用完整值 (如 "sessionid=xxx")
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from app.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CookieConfig:
|
||||
"""Cookie 配置"""
|
||||
brand_id: str
|
||||
aadvid: str
|
||||
auth_token: str # 完整的 cookie 值,如 "sessionid=xxx"
|
||||
industry_id: int
|
||||
brand_name: str
|
||||
|
||||
|
||||
class SessionPool:
|
||||
"""SessionID池管理器 - T-027: 改为随机选取"""
|
||||
|
||||
def __init__(self):
|
||||
# 存储所有配置的列表
|
||||
self._configs: List[CookieConfig] = []
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
async def refresh(self) -> bool:
|
||||
"""
|
||||
从内部API刷新配置列表。
|
||||
|
||||
Returns:
|
||||
bool: 刷新是否成功
|
||||
"""
|
||||
async with self._lock:
|
||||
try:
|
||||
headers = {}
|
||||
if settings.YUNTU_API_TOKEN:
|
||||
headers["Authorization"] = f"Bearer {settings.YUNTU_API_TOKEN}"
|
||||
|
||||
async with httpx.AsyncClient(
|
||||
timeout=settings.YUNTU_API_TIMEOUT
|
||||
) as client:
|
||||
response = await client.get(
|
||||
f"{settings.BRAND_API_BASE_URL}/v1/yuntu/get_cookie",
|
||||
params={"page": 1, "page_size": 100},
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
if isinstance(data, dict):
|
||||
cookie_list = data.get("data", [])
|
||||
if isinstance(cookie_list, list):
|
||||
self._configs = []
|
||||
for item in cookie_list:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
|
||||
brand_id = str(item.get("brand_id", ""))
|
||||
aadvid = str(item.get("aadvid", ""))
|
||||
# T-027: 直接使用 auth_token 或 sessionid_cookie 完整值
|
||||
auth_token = item.get("auth_token") or item.get("sessionid_cookie", "")
|
||||
industry_id = item.get("industry_id", 0)
|
||||
brand_name = item.get("brand_name", "")
|
||||
|
||||
if brand_id and aadvid and auth_token:
|
||||
self._configs.append(CookieConfig(
|
||||
brand_id=brand_id,
|
||||
aadvid=aadvid,
|
||||
auth_token=auth_token,
|
||||
industry_id=int(industry_id) if industry_id else 0,
|
||||
brand_name=brand_name,
|
||||
))
|
||||
|
||||
logger.info(
|
||||
f"SessionPool refreshed: {len(self._configs)} configs"
|
||||
)
|
||||
return len(self._configs) > 0
|
||||
|
||||
logger.warning(
|
||||
f"Failed to refresh session pool: status={response.status_code}"
|
||||
)
|
||||
return False
|
||||
|
||||
except httpx.TimeoutException:
|
||||
logger.error("SessionPool refresh timeout")
|
||||
return False
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"SessionPool refresh request error: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"SessionPool refresh unexpected error: {e}")
|
||||
return False
|
||||
|
||||
def get_random_config(self) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
T-027: 随机选取任意一组配置。
|
||||
|
||||
Returns:
|
||||
Dict or None: 包含 aadvid 和 auth_token 的字典
|
||||
"""
|
||||
if not self._configs:
|
||||
return None
|
||||
config = random.choice(self._configs)
|
||||
return {
|
||||
"brand_id": config.brand_id,
|
||||
"aadvid": config.aadvid,
|
||||
"auth_token": config.auth_token,
|
||||
"industry_id": config.industry_id,
|
||||
"brand_name": config.brand_name,
|
||||
}
|
||||
|
||||
def remove_by_auth_token(self, auth_token: str) -> None:
|
||||
"""
|
||||
从池中移除失效的配置。
|
||||
|
||||
Args:
|
||||
auth_token: 要移除的 auth_token
|
||||
"""
|
||||
self._configs = [c for c in self._configs if c.auth_token != auth_token]
|
||||
logger.info(f"Removed invalid config: {auth_token[:20]}...")
|
||||
|
||||
# 兼容旧接口
|
||||
def remove(self, session_id: str) -> None:
|
||||
"""兼容旧接口:移除包含指定 session_id 的配置"""
|
||||
self._configs = [c for c in self._configs if session_id not in c.auth_token]
|
||||
|
||||
@property
|
||||
def size(self) -> int:
|
||||
"""返回池中配置数量"""
|
||||
return len(self._configs)
|
||||
|
||||
@property
|
||||
def is_empty(self) -> bool:
|
||||
"""检查池是否为空"""
|
||||
return len(self._configs) == 0
|
||||
|
||||
def get_distinct_configs(self, count: int) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
获取 count 个不同的配置,用于并发调用。
|
||||
|
||||
- 池中配置 >= count:随机抽样 count 个不重复的
|
||||
- 池中配置 < count:全部取出,循环复用补足
|
||||
- 池为空:返回空列表
|
||||
|
||||
Args:
|
||||
count: 需要的配置数量
|
||||
|
||||
Returns:
|
||||
List[Dict]: 配置字典列表
|
||||
"""
|
||||
if not self._configs or count <= 0:
|
||||
return []
|
||||
|
||||
def _to_dict(config: CookieConfig) -> Dict[str, Any]:
|
||||
return {
|
||||
"brand_id": config.brand_id,
|
||||
"aadvid": config.aadvid,
|
||||
"auth_token": config.auth_token,
|
||||
"industry_id": config.industry_id,
|
||||
"brand_name": config.brand_name,
|
||||
}
|
||||
|
||||
if len(self._configs) >= count:
|
||||
sampled = random.sample(self._configs, count)
|
||||
return [_to_dict(c) for c in sampled]
|
||||
|
||||
# 池中配置不足,全部取出后循环复用
|
||||
result = [_to_dict(c) for c in self._configs]
|
||||
shuffled = list(self._configs)
|
||||
random.shuffle(shuffled)
|
||||
idx = 0
|
||||
while len(result) < count:
|
||||
result.append(_to_dict(shuffled[idx % len(shuffled)]))
|
||||
idx += 1
|
||||
return result
|
||||
|
||||
# 兼容旧接口
|
||||
def get_random(self) -> Optional[str]:
|
||||
"""兼容旧接口:随机获取一个 SessionID"""
|
||||
config = self.get_random_config()
|
||||
if config:
|
||||
# 从 auth_token 中提取 sessionid
|
||||
auth_token = config["auth_token"]
|
||||
if "=" in auth_token:
|
||||
return auth_token.split("=", 1)[-1]
|
||||
return auth_token
|
||||
return None
|
||||
|
||||
# 兼容旧代码
|
||||
@property
|
||||
def _brand_configs(self) -> Dict[str, Any]:
|
||||
"""兼容旧接口"""
|
||||
return {c.brand_id: c for c in self._configs}
|
||||
|
||||
|
||||
# 全局单例
|
||||
session_pool = SessionPool()
|
||||
|
||||
|
||||
async def get_random_config(max_retries: int = 3) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
T-027: 随机获取一组配置,必要时刷新池。
|
||||
|
||||
Args:
|
||||
max_retries: 最大重试次数
|
||||
|
||||
Returns:
|
||||
Dict or None: 包含 aadvid 和 auth_token 的字典
|
||||
"""
|
||||
for attempt in range(max_retries):
|
||||
if session_pool.is_empty:
|
||||
success = await session_pool.refresh()
|
||||
if not success:
|
||||
logger.warning(f"Session pool refresh failed, attempt {attempt + 1}")
|
||||
continue
|
||||
|
||||
config = session_pool.get_random_config()
|
||||
if config:
|
||||
return config
|
||||
|
||||
logger.error("Failed to get config after all retries")
|
||||
return None
|
||||
|
||||
|
||||
# 兼容旧接口
|
||||
async def get_session_with_retry(max_retries: int = 3) -> Optional[str]:
|
||||
"""
|
||||
获取SessionID,必要时刷新池 (兼容旧接口)。
|
||||
|
||||
Args:
|
||||
max_retries: 最大重试次数
|
||||
|
||||
Returns:
|
||||
Optional[str]: SessionID,获取失败返回None
|
||||
"""
|
||||
config = await get_random_config(max_retries)
|
||||
if config:
|
||||
auth_token = config["auth_token"]
|
||||
if "=" in auth_token:
|
||||
return auth_token.split("=", 1)[-1]
|
||||
return auth_token
|
||||
return None
|
||||
|
||||
|
||||
async def get_distinct_configs(count: int, max_retries: int = 3) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
获取 count 个不同的配置,必要时刷新池。
|
||||
|
||||
Args:
|
||||
count: 需要的配置数量
|
||||
max_retries: 最大重试次数
|
||||
|
||||
Returns:
|
||||
List[Dict]: 配置字典列表
|
||||
"""
|
||||
for attempt in range(max_retries):
|
||||
if session_pool.is_empty:
|
||||
success = await session_pool.refresh()
|
||||
if not success:
|
||||
logger.warning(f"Session pool refresh failed, attempt {attempt + 1}")
|
||||
continue
|
||||
|
||||
configs = session_pool.get_distinct_configs(count)
|
||||
if configs:
|
||||
return configs
|
||||
|
||||
logger.error("Failed to get distinct configs after all retries")
|
||||
return []
|
||||
|
||||
|
||||
async def get_config_for_brand(brand_id: str, max_retries: int = 3) -> Optional[Any]:
|
||||
"""
|
||||
兼容旧接口:获取品牌对应的配置。
|
||||
T-027: 实际上现在随机选取,不再按 brand_id 匹配。
|
||||
"""
|
||||
return await get_random_config(max_retries)
|
||||
@ -1,594 +0,0 @@
|
||||
"""
|
||||
视频分析服务 (T-024)
|
||||
|
||||
实现视频分析数据获取和成本指标计算。
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.kol_video import KolVideo
|
||||
from app.services.session_pool import (
|
||||
get_distinct_configs,
|
||||
get_random_config,
|
||||
session_pool,
|
||||
)
|
||||
from app.services.yuntu_api import (
|
||||
SessionInvalidError,
|
||||
call_yuntu_api,
|
||||
parse_analysis_response,
|
||||
)
|
||||
from app.services.yuntu_api import (
|
||||
get_video_analysis as fetch_yuntu_analysis,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _needs_api_call(video: KolVideo) -> bool:
|
||||
"""
|
||||
判断是否需要调用 Yuntu API 获取 A3/Cost 数据。
|
||||
|
||||
如果数据库中已有 A3 或 Cost 数据,直接使用数据库数据,不调 API。
|
||||
"""
|
||||
has_a3 = (video.total_new_a3_cnt or 0) > 0
|
||||
has_cost = (video.total_cost or 0) > 0
|
||||
return not (has_a3 or has_cost)
|
||||
|
||||
|
||||
def calculate_cost_metrics(
|
||||
cost: float,
|
||||
natural_play_cnt: int,
|
||||
a3_increase_cnt: int,
|
||||
natural_a3_increase_cnt: int,
|
||||
after_view_search_uv: int,
|
||||
total_play_cnt: int,
|
||||
) -> Dict[str, Optional[float]]:
|
||||
"""
|
||||
计算成本指标。
|
||||
|
||||
Args:
|
||||
cost: 总花费
|
||||
natural_play_cnt: 自然播放数
|
||||
a3_increase_cnt: 新增A3
|
||||
natural_a3_increase_cnt: 自然新增A3
|
||||
after_view_search_uv: 看后搜人数
|
||||
total_play_cnt: 总播放数
|
||||
|
||||
Returns:
|
||||
Dict: 成本指标字典
|
||||
"""
|
||||
metrics = {}
|
||||
|
||||
# CPM = cost / total_play_cnt * 1000
|
||||
if total_play_cnt and total_play_cnt > 0:
|
||||
metrics["cpm"] = round(cost / total_play_cnt * 1000, 2)
|
||||
else:
|
||||
metrics["cpm"] = None
|
||||
|
||||
# 自然CPM = cost / natural_play_cnt * 1000
|
||||
if natural_play_cnt and natural_play_cnt > 0:
|
||||
metrics["natural_cpm"] = round(cost / natural_play_cnt * 1000, 2)
|
||||
else:
|
||||
metrics["natural_cpm"] = None
|
||||
|
||||
# CPA3 = cost / a3_increase_cnt
|
||||
if a3_increase_cnt and a3_increase_cnt > 0:
|
||||
metrics["cpa3"] = round(cost / a3_increase_cnt, 2)
|
||||
else:
|
||||
metrics["cpa3"] = None
|
||||
|
||||
# 自然CPA3 = cost / natural_a3_increase_cnt
|
||||
if natural_a3_increase_cnt and natural_a3_increase_cnt > 0:
|
||||
metrics["natural_cpa3"] = round(cost / natural_a3_increase_cnt, 2)
|
||||
else:
|
||||
metrics["natural_cpa3"] = None
|
||||
|
||||
# CPsearch = cost / after_view_search_uv
|
||||
if after_view_search_uv and after_view_search_uv > 0:
|
||||
metrics["cp_search"] = round(cost / after_view_search_uv, 2)
|
||||
else:
|
||||
metrics["cp_search"] = None
|
||||
|
||||
# 预估自然看后搜人数 = natural_play_cnt / total_play_cnt * after_view_search_uv
|
||||
if total_play_cnt and total_play_cnt > 0 and after_view_search_uv:
|
||||
estimated_natural_search_uv = (
|
||||
natural_play_cnt / total_play_cnt * after_view_search_uv
|
||||
)
|
||||
metrics["estimated_natural_search_uv"] = round(estimated_natural_search_uv, 2)
|
||||
|
||||
# 自然CPsearch = cost / estimated_natural_search_uv
|
||||
if estimated_natural_search_uv > 0:
|
||||
metrics["natural_cp_search"] = round(cost / estimated_natural_search_uv, 2)
|
||||
else:
|
||||
metrics["natural_cp_search"] = None
|
||||
else:
|
||||
metrics["estimated_natural_search_uv"] = None
|
||||
metrics["natural_cp_search"] = None
|
||||
|
||||
return metrics
|
||||
|
||||
|
||||
async def get_video_base_info(
|
||||
session: AsyncSession, item_id: str
|
||||
) -> Optional[KolVideo]:
|
||||
"""
|
||||
从数据库获取视频基础信息。
|
||||
|
||||
Args:
|
||||
session: 数据库会话
|
||||
item_id: 视频ID
|
||||
|
||||
Returns:
|
||||
KolVideo or None
|
||||
"""
|
||||
stmt = select(KolVideo).where(KolVideo.item_id == item_id)
|
||||
result = await session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def get_video_analysis_data(
|
||||
session: AsyncSession, item_id: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
获取视频分析数据(T-024主接口)。
|
||||
|
||||
返回6大类指标(匹配前端 VideoAnalysisData 类型):
|
||||
- base_info: 基础信息
|
||||
- reach_metrics: 触达指标
|
||||
- a3_metrics: A3指标
|
||||
- search_metrics: 搜索指标
|
||||
- cost_metrics: 费用指标
|
||||
- calculated_metrics: 成本指标(实时计算)
|
||||
|
||||
Args:
|
||||
session: 数据库会话
|
||||
item_id: 视频ID
|
||||
|
||||
Returns:
|
||||
Dict: 完整的视频分析数据
|
||||
|
||||
Raises:
|
||||
ValueError: 视频不存在时抛出
|
||||
"""
|
||||
from app.services.brand_api import get_brand_names
|
||||
|
||||
# 1. 从数据库获取基础信息
|
||||
video = await get_video_base_info(session, item_id)
|
||||
if video is None:
|
||||
raise ValueError(f"Video not found: {item_id}")
|
||||
|
||||
# 2. 获取品牌名称
|
||||
brand_name = ""
|
||||
if video.brand_id:
|
||||
brand_map = await get_brand_names([video.brand_id])
|
||||
brand_name = brand_map.get(video.brand_id, video.brand_id)
|
||||
|
||||
# 3. 获取 A3 数据和 cost(缓存优先策略)
|
||||
a3_increase_cnt = 0
|
||||
ad_a3_increase_cnt = 0
|
||||
natural_a3_increase_cnt = 0
|
||||
api_cost = 0.0
|
||||
ad_cost = 0.0
|
||||
|
||||
if not _needs_api_call(video):
|
||||
# 数据库已有数据,直接使用
|
||||
logger.info(f"Using DB data for {item_id} (A3/Cost already cached)")
|
||||
a3_increase_cnt = video.total_new_a3_cnt or 0
|
||||
ad_a3_increase_cnt = video.heated_new_a3_cnt or 0
|
||||
natural_a3_increase_cnt = video.natural_new_a3_cnt or 0
|
||||
api_cost = video.total_cost or 0.0
|
||||
ad_cost = video.heated_cost or 0.0
|
||||
else:
|
||||
# 需要调用 API 获取数据
|
||||
try:
|
||||
publish_time = video.publish_time or datetime.now()
|
||||
industry_id = video.industry_id or ""
|
||||
|
||||
api_response = await fetch_yuntu_analysis(
|
||||
item_id=item_id,
|
||||
publish_time=publish_time,
|
||||
industry_id=industry_id,
|
||||
)
|
||||
analysis_data = parse_analysis_response(api_response)
|
||||
a3_increase_cnt = analysis_data.get("a3_increase_cnt", 0)
|
||||
ad_a3_increase_cnt = analysis_data.get("ad_a3_increase_cnt", 0)
|
||||
natural_a3_increase_cnt = analysis_data.get("natural_a3_increase_cnt", 0)
|
||||
api_cost = analysis_data.get("cost", 0)
|
||||
ad_cost = analysis_data.get("ad_cost", 0)
|
||||
|
||||
# 写回数据库
|
||||
await update_video_a3_metrics(
|
||||
session=session,
|
||||
item_id=item_id,
|
||||
total_new_a3_cnt=int(a3_increase_cnt),
|
||||
heated_new_a3_cnt=int(ad_a3_increase_cnt),
|
||||
natural_new_a3_cnt=int(natural_a3_increase_cnt),
|
||||
total_cost=float(api_cost),
|
||||
heated_cost=float(ad_cost),
|
||||
)
|
||||
logger.info(f"API data fetched and saved to DB for {item_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"API failed for {item_id}: {e}, using DB data")
|
||||
a3_increase_cnt = video.total_new_a3_cnt or 0
|
||||
ad_a3_increase_cnt = video.heated_new_a3_cnt or 0
|
||||
natural_a3_increase_cnt = video.natural_new_a3_cnt or 0
|
||||
api_cost = video.total_cost or 0.0
|
||||
ad_cost = video.heated_cost or 0.0
|
||||
|
||||
# 4. 数据库字段
|
||||
estimated_video_cost = video.estimated_video_cost or 0.0
|
||||
natural_play_cnt = video.natural_play_cnt or 0
|
||||
heated_play_cnt = video.heated_play_cnt or 0
|
||||
total_play_cnt = video.total_play_cnt or 0
|
||||
after_view_search_uv = video.after_view_search_uv or 0
|
||||
|
||||
# 5. 计算成本指标
|
||||
heated_cost = ad_cost
|
||||
|
||||
# 预估自然看后搜人数
|
||||
estimated_natural_search_uv = None
|
||||
if total_play_cnt > 0 and after_view_search_uv > 0:
|
||||
estimated_natural_search_uv = round((natural_play_cnt / total_play_cnt) * after_view_search_uv, 2)
|
||||
|
||||
# 预估CPM = (total_cost / total_play_cnt) * 1000
|
||||
estimated_cpm = round((api_cost / total_play_cnt) * 1000, 2) if total_play_cnt > 0 else None
|
||||
|
||||
# 预估自然CPM = (estimated_video_cost / natural_play_cnt) * 1000
|
||||
estimated_natural_cpm = round((estimated_video_cost / natural_play_cnt) * 1000, 2) if natural_play_cnt > 0 else None
|
||||
|
||||
# 预估CPA3 = total_cost / a3_increase_cnt
|
||||
estimated_cp_a3 = round(api_cost / a3_increase_cnt, 2) if a3_increase_cnt > 0 else None
|
||||
|
||||
# 预估自然CPA3 = estimated_video_cost / natural_a3_increase_cnt
|
||||
estimated_natural_cp_a3 = round(estimated_video_cost / natural_a3_increase_cnt, 2) if natural_a3_increase_cnt > 0 else None
|
||||
|
||||
# 预估CPsearch = total_cost / after_view_search_uv
|
||||
estimated_cp_search = round(api_cost / after_view_search_uv, 2) if after_view_search_uv > 0 else None
|
||||
|
||||
# 自然CPsearch = estimated_video_cost / estimated_natural_search_uv
|
||||
estimated_natural_cp_search = round(estimated_video_cost / estimated_natural_search_uv, 2) if estimated_natural_search_uv and estimated_natural_search_uv > 0 else None
|
||||
|
||||
# 6. 组装返回数据(匹配前端 VideoAnalysisData 类型)
|
||||
return {
|
||||
"base_info": {
|
||||
"star_nickname": video.star_nickname or "",
|
||||
"star_unique_id": video.star_unique_id or "",
|
||||
"vid": video.item_id,
|
||||
"title": video.title or "",
|
||||
"create_date": video.publish_time.isoformat() if video.publish_time else None,
|
||||
"hot_type": video.viral_type or "",
|
||||
"industry_id": video.industry_id or "",
|
||||
"brand_id": video.brand_id or "",
|
||||
"brand_name": brand_name,
|
||||
"video_url": video.video_url or "",
|
||||
},
|
||||
"reach_metrics": {
|
||||
"natural_play_cnt": natural_play_cnt,
|
||||
"heated_play_cnt": heated_play_cnt,
|
||||
"total_play_cnt": total_play_cnt,
|
||||
"total_interaction_cnt": video.total_interact or 0,
|
||||
"digg_cnt": video.like_cnt or 0,
|
||||
"share_cnt": video.share_cnt or 0,
|
||||
"comment_cnt": video.comment_cnt or 0,
|
||||
},
|
||||
"a3_metrics": {
|
||||
"total_new_a3_cnt": a3_increase_cnt,
|
||||
"heated_new_a3_cnt": ad_a3_increase_cnt,
|
||||
"natural_new_a3_cnt": natural_a3_increase_cnt,
|
||||
},
|
||||
"search_metrics": {
|
||||
"back_search_uv": video.return_search_cnt or 0,
|
||||
"back_search_cnt": video.return_search_cnt or 0,
|
||||
"after_view_search_uv": after_view_search_uv,
|
||||
"after_view_search_cnt": after_view_search_uv,
|
||||
"estimated_natural_search_uv": estimated_natural_search_uv,
|
||||
},
|
||||
"cost_metrics": {
|
||||
"total_cost": api_cost,
|
||||
"heated_cost": heated_cost,
|
||||
"estimated_video_cost": estimated_video_cost,
|
||||
},
|
||||
"calculated_metrics": {
|
||||
"estimated_cpm": estimated_cpm,
|
||||
"estimated_natural_cpm": estimated_natural_cpm,
|
||||
"estimated_cp_a3": estimated_cp_a3,
|
||||
"estimated_natural_cp_a3": estimated_natural_cp_a3,
|
||||
"estimated_cp_search": estimated_cp_search,
|
||||
"estimated_natural_cp_search": estimated_natural_cp_search,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def update_video_a3_metrics(
|
||||
session: AsyncSession,
|
||||
item_id: str,
|
||||
total_new_a3_cnt: int,
|
||||
heated_new_a3_cnt: int,
|
||||
natural_new_a3_cnt: int,
|
||||
total_cost: float,
|
||||
heated_cost: float = 0.0,
|
||||
) -> bool:
|
||||
"""
|
||||
更新数据库中的A3指标和费用数据 (T-025)。
|
||||
|
||||
Args:
|
||||
session: 数据库会话
|
||||
item_id: 视频ID
|
||||
total_new_a3_cnt: 总新增A3
|
||||
heated_new_a3_cnt: 加热新增A3
|
||||
natural_new_a3_cnt: 自然新增A3
|
||||
total_cost: 预估总费用
|
||||
heated_cost: 预估加热费用
|
||||
|
||||
Returns:
|
||||
bool: 更新是否成功
|
||||
"""
|
||||
try:
|
||||
stmt = (
|
||||
update(KolVideo)
|
||||
.where(KolVideo.item_id == item_id)
|
||||
.values(
|
||||
total_new_a3_cnt=total_new_a3_cnt,
|
||||
heated_new_a3_cnt=heated_new_a3_cnt,
|
||||
natural_new_a3_cnt=natural_new_a3_cnt,
|
||||
total_cost=total_cost,
|
||||
heated_cost=heated_cost,
|
||||
)
|
||||
)
|
||||
result = await session.execute(stmt)
|
||||
await session.commit()
|
||||
|
||||
if result.rowcount > 0:
|
||||
logger.info(f"Updated A3 metrics for video {item_id}")
|
||||
return True
|
||||
else:
|
||||
logger.warning(f"No video found to update: {item_id}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update A3 metrics for {item_id}: {e}")
|
||||
await session.rollback()
|
||||
return False
|
||||
|
||||
|
||||
async def search_videos_by_star_id(
|
||||
session: AsyncSession, star_id: str
|
||||
) -> list[KolVideo]:
|
||||
"""根据星图ID精准匹配搜索视频列表。"""
|
||||
stmt = select(KolVideo).where(KolVideo.star_id == star_id)
|
||||
result = await session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def search_videos_by_unique_id(
|
||||
session: AsyncSession, unique_id: str
|
||||
) -> list[KolVideo]:
|
||||
"""根据达人unique_id精准匹配搜索视频列表。"""
|
||||
stmt = select(KolVideo).where(KolVideo.star_unique_id == unique_id)
|
||||
result = await session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def search_videos_by_nickname(
|
||||
session: AsyncSession, nickname: str
|
||||
) -> list[KolVideo]:
|
||||
"""根据达人昵称模糊匹配搜索视频列表。"""
|
||||
stmt = select(KolVideo).where(KolVideo.star_nickname.ilike(f"%{nickname}%"))
|
||||
result = await session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
def _build_video_list_item(
|
||||
video: KolVideo,
|
||||
a3_increase_cnt: int,
|
||||
ad_a3_increase_cnt: int,
|
||||
natural_a3_increase_cnt: int,
|
||||
api_cost: float,
|
||||
brand_name: str,
|
||||
) -> Dict[str, Any]:
|
||||
"""构建视频列表项的结果字典。"""
|
||||
estimated_video_cost = video.estimated_video_cost or 0.0
|
||||
natural_play_cnt = video.natural_play_cnt or 0
|
||||
total_play_cnt = video.total_play_cnt or 0
|
||||
after_view_search_uv = video.after_view_search_uv or 0
|
||||
|
||||
estimated_natural_search_uv = None
|
||||
if total_play_cnt > 0 and after_view_search_uv > 0:
|
||||
estimated_natural_search_uv = (natural_play_cnt / total_play_cnt) * after_view_search_uv
|
||||
|
||||
estimated_natural_cpm = round((estimated_video_cost / natural_play_cnt) * 1000, 2) if natural_play_cnt > 0 else None
|
||||
estimated_cp_a3 = round(api_cost / a3_increase_cnt, 2) if a3_increase_cnt > 0 else None
|
||||
estimated_natural_cp_a3 = round(estimated_video_cost / natural_a3_increase_cnt, 2) if natural_a3_increase_cnt > 0 else None
|
||||
estimated_cp_search = round(api_cost / after_view_search_uv, 2) if after_view_search_uv > 0 else None
|
||||
estimated_natural_cp_search = round(estimated_video_cost / estimated_natural_search_uv, 2) if estimated_natural_search_uv and estimated_natural_search_uv > 0 else None
|
||||
|
||||
return {
|
||||
"item_id": video.item_id,
|
||||
"star_nickname": video.star_nickname or "",
|
||||
"title": video.title or "",
|
||||
"video_url": video.video_url or "",
|
||||
"create_date": video.publish_time.isoformat() if video.publish_time else None,
|
||||
"hot_type": video.viral_type or "",
|
||||
"industry_id": video.industry_id or "",
|
||||
"brand_id": video.brand_id or "",
|
||||
"brand_name": brand_name,
|
||||
"total_new_a3_cnt": a3_increase_cnt,
|
||||
"heated_new_a3_cnt": ad_a3_increase_cnt,
|
||||
"natural_new_a3_cnt": natural_a3_increase_cnt,
|
||||
"estimated_natural_cpm": estimated_natural_cpm,
|
||||
"estimated_cp_a3": estimated_cp_a3,
|
||||
"estimated_natural_cp_a3": estimated_natural_cp_a3,
|
||||
"estimated_cp_search": estimated_cp_search,
|
||||
"estimated_natural_cp_search": estimated_natural_cp_search,
|
||||
}
|
||||
|
||||
|
||||
async def get_video_list_with_a3(
|
||||
session: AsyncSession, videos: list[KolVideo]
|
||||
) -> list[Dict[str, Any]]:
|
||||
"""
|
||||
获取视频列表的摘要数据。
|
||||
|
||||
缓存优先策略:
|
||||
- 数据库有 A3/Cost 数据 → 直接使用
|
||||
- 数据库无数据 → 并发调用云图 API(预分配不同 cookie)→ 写回数据库
|
||||
"""
|
||||
from app.services.brand_api import get_brand_names
|
||||
|
||||
# 批量获取品牌名称
|
||||
brand_ids = [video.brand_id for video in videos if video.brand_id]
|
||||
brand_map = await get_brand_names(brand_ids) if brand_ids else {}
|
||||
|
||||
# 分组:已有数据 vs 需要 API 调用
|
||||
cached_videos: list[tuple[int, KolVideo]] = [] # (原始索引, video)
|
||||
api_videos: list[tuple[int, KolVideo]] = [] # (原始索引, video)
|
||||
|
||||
for idx, video in enumerate(videos):
|
||||
if _needs_api_call(video):
|
||||
api_videos.append((idx, video))
|
||||
else:
|
||||
cached_videos.append((idx, video))
|
||||
|
||||
logger.info(
|
||||
f"Video list: {len(cached_videos)} cached, {len(api_videos)} need API"
|
||||
)
|
||||
|
||||
# 结果数组(按原始索引填充)
|
||||
results: list[Optional[Dict[str, Any]]] = [None] * len(videos)
|
||||
|
||||
# 组 A:直接用数据库数据
|
||||
for idx, video in cached_videos:
|
||||
brand_name = brand_map.get(video.brand_id, video.brand_id or "") if video.brand_id else ""
|
||||
results[idx] = _build_video_list_item(
|
||||
video=video,
|
||||
a3_increase_cnt=video.total_new_a3_cnt or 0,
|
||||
ad_a3_increase_cnt=video.heated_new_a3_cnt or 0,
|
||||
natural_a3_increase_cnt=video.natural_new_a3_cnt or 0,
|
||||
api_cost=video.total_cost or 0.0,
|
||||
brand_name=brand_name,
|
||||
)
|
||||
|
||||
# 组 B:并发调用 API(预分配不同 cookie)
|
||||
if api_videos:
|
||||
configs = await get_distinct_configs(len(api_videos))
|
||||
semaphore = asyncio.Semaphore(5)
|
||||
# 收集需要写回 DB 的数据(避免并发 session 操作)
|
||||
pending_updates: list[Dict[str, Any]] = []
|
||||
|
||||
async def _fetch_single(
|
||||
idx: int, video: KolVideo, config: Dict[str, Any]
|
||||
) -> None:
|
||||
a3_increase_cnt = 0
|
||||
ad_a3_increase_cnt = 0
|
||||
natural_a3_increase_cnt = 0
|
||||
api_cost = 0.0
|
||||
ad_cost_val = 0.0
|
||||
api_success = False
|
||||
|
||||
async with semaphore:
|
||||
try:
|
||||
publish_time = video.publish_time or datetime.now()
|
||||
industry_id = video.industry_id or ""
|
||||
|
||||
api_response = await call_yuntu_api(
|
||||
item_id=video.item_id,
|
||||
publish_time=publish_time,
|
||||
industry_id=industry_id,
|
||||
aadvid=config["aadvid"],
|
||||
auth_token=config["auth_token"],
|
||||
)
|
||||
api_data = parse_analysis_response(api_response)
|
||||
a3_increase_cnt = api_data.get("a3_increase_cnt", 0)
|
||||
ad_a3_increase_cnt = api_data.get("ad_a3_increase_cnt", 0)
|
||||
natural_a3_increase_cnt = api_data.get("natural_a3_increase_cnt", 0)
|
||||
api_cost = api_data.get("cost", 0)
|
||||
ad_cost_val = api_data.get("ad_cost", 0)
|
||||
api_success = True
|
||||
|
||||
except SessionInvalidError:
|
||||
# Session 失效,从池中移除,重新获取随机 config 重试
|
||||
session_pool.remove_by_auth_token(config["auth_token"])
|
||||
logger.warning(f"Session invalid for {video.item_id}, retrying")
|
||||
retry_config = await get_random_config()
|
||||
if retry_config:
|
||||
try:
|
||||
publish_time = video.publish_time or datetime.now()
|
||||
industry_id = video.industry_id or ""
|
||||
api_response = await call_yuntu_api(
|
||||
item_id=video.item_id,
|
||||
publish_time=publish_time,
|
||||
industry_id=industry_id,
|
||||
aadvid=retry_config["aadvid"],
|
||||
auth_token=retry_config["auth_token"],
|
||||
)
|
||||
api_data = parse_analysis_response(api_response)
|
||||
a3_increase_cnt = api_data.get("a3_increase_cnt", 0)
|
||||
ad_a3_increase_cnt = api_data.get("ad_a3_increase_cnt", 0)
|
||||
natural_a3_increase_cnt = api_data.get("natural_a3_increase_cnt", 0)
|
||||
api_cost = api_data.get("cost", 0)
|
||||
ad_cost_val = api_data.get("ad_cost", 0)
|
||||
api_success = True
|
||||
except Exception as e2:
|
||||
logger.warning(f"Retry failed for {video.item_id}: {e2}")
|
||||
a3_increase_cnt = video.total_new_a3_cnt or 0
|
||||
ad_a3_increase_cnt = video.heated_new_a3_cnt or 0
|
||||
natural_a3_increase_cnt = video.natural_new_a3_cnt or 0
|
||||
api_cost = video.total_cost or 0.0
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"API failed for {video.item_id}: {e}")
|
||||
a3_increase_cnt = video.total_new_a3_cnt or 0
|
||||
ad_a3_increase_cnt = video.heated_new_a3_cnt or 0
|
||||
natural_a3_increase_cnt = video.natural_new_a3_cnt or 0
|
||||
api_cost = video.total_cost or 0.0
|
||||
|
||||
# 收集待写回 DB 的数据(不在并发中操作 session)
|
||||
if api_success:
|
||||
pending_updates.append({
|
||||
"item_id": video.item_id,
|
||||
"total_new_a3_cnt": int(a3_increase_cnt),
|
||||
"heated_new_a3_cnt": int(ad_a3_increase_cnt),
|
||||
"natural_new_a3_cnt": int(natural_a3_increase_cnt),
|
||||
"total_cost": float(api_cost),
|
||||
"heated_cost": float(ad_cost_val),
|
||||
})
|
||||
|
||||
brand_name = brand_map.get(video.brand_id, video.brand_id or "") if video.brand_id else ""
|
||||
results[idx] = _build_video_list_item(
|
||||
video=video,
|
||||
a3_increase_cnt=a3_increase_cnt,
|
||||
ad_a3_increase_cnt=ad_a3_increase_cnt,
|
||||
natural_a3_increase_cnt=natural_a3_increase_cnt,
|
||||
api_cost=api_cost,
|
||||
brand_name=brand_name,
|
||||
)
|
||||
|
||||
# 为每个视频分配一个独立的 config,并发执行
|
||||
tasks = []
|
||||
for i, (idx, video) in enumerate(api_videos):
|
||||
config = configs[i] if i < len(configs) else configs[i % len(configs)] if configs else {}
|
||||
tasks.append(_fetch_single(idx, video, config))
|
||||
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
# 顺序写回 DB(避免并发 session 操作导致状态损坏)
|
||||
for upd in pending_updates:
|
||||
await update_video_a3_metrics(
|
||||
session=session,
|
||||
item_id=upd["item_id"],
|
||||
total_new_a3_cnt=upd["total_new_a3_cnt"],
|
||||
heated_new_a3_cnt=upd["heated_new_a3_cnt"],
|
||||
natural_new_a3_cnt=upd["natural_new_a3_cnt"],
|
||||
total_cost=upd["total_cost"],
|
||||
heated_cost=upd["heated_cost"],
|
||||
)
|
||||
|
||||
# 过滤 None(不应发生,防御性编程)
|
||||
return [r for r in results if r is not None]
|
||||
@ -1,267 +0,0 @@
|
||||
"""
|
||||
巨量云图API封装 (T-023, T-027)
|
||||
|
||||
封装GetContentMaterialAnalysisInfo接口调用,获取视频分析数据。
|
||||
|
||||
T-027 修复:
|
||||
1. 日期格式: YYYYMMDD (不是 YYYY-MM-DD)
|
||||
2. Cookie 头: 直接使用 auth_token 完整值
|
||||
3. industry_id: 字符串格式 ["12"]
|
||||
4. A3 指标: API 返回字符串,需转为整数
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Optional, Any, Union
|
||||
|
||||
import httpx
|
||||
|
||||
from app.config import settings
|
||||
from app.services.session_pool import (
|
||||
session_pool,
|
||||
get_random_config,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# 巨量云图API基础URL
|
||||
YUNTU_BASE_URL = "https://yuntu.oceanengine.com"
|
||||
|
||||
# 触发点ID列表(固定值)
|
||||
TRIGGER_POINT_IDS = ["610000", "610300", "610301"]
|
||||
|
||||
|
||||
class YuntuAPIError(Exception):
|
||||
"""巨量云图API错误"""
|
||||
|
||||
def __init__(self, message: str, status_code: int = 0, response_data: Any = None):
|
||||
self.message = message
|
||||
self.status_code = status_code
|
||||
self.response_data = response_data
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class SessionInvalidError(YuntuAPIError):
|
||||
"""SessionID失效错误"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _safe_int(value: Any, default: int = 0) -> int:
|
||||
"""安全转换为整数,处理字符串类型的数字"""
|
||||
if value is None:
|
||||
return default
|
||||
if isinstance(value, int):
|
||||
return value
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
return default
|
||||
return default
|
||||
|
||||
|
||||
async def call_yuntu_api(
|
||||
item_id: str,
|
||||
publish_time: Union[datetime, None],
|
||||
industry_id: str,
|
||||
aadvid: str,
|
||||
auth_token: str,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
调用巨量云图GetContentMaterialAnalysisInfo接口。
|
||||
|
||||
Args:
|
||||
item_id: 视频ID
|
||||
publish_time: 发布时间
|
||||
industry_id: 行业ID(字符串格式)
|
||||
aadvid: 广告主ID(URL参数)
|
||||
auth_token: Cookie完整值(如 "sessionid=xxx")
|
||||
|
||||
Returns:
|
||||
Dict: API响应数据
|
||||
|
||||
Raises:
|
||||
SessionInvalidError: SessionID失效时抛出
|
||||
YuntuAPIError: API调用失败时抛出
|
||||
"""
|
||||
# 处理 publish_time
|
||||
if publish_time is None:
|
||||
publish_time = datetime.now()
|
||||
|
||||
# T-027: 日期格式必须为 YYYYMMDD
|
||||
start_date = publish_time.strftime("%Y%m%d")
|
||||
end_date = (publish_time + timedelta(days=30)).strftime("%Y%m%d")
|
||||
|
||||
# T-027: industry_id_list 为字符串数组
|
||||
industry_id_list = [str(industry_id)] if industry_id else []
|
||||
|
||||
request_data = {
|
||||
"is_my_video": "0",
|
||||
"object_id": item_id,
|
||||
"object_type": 2,
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"assist_type": 3,
|
||||
"assist_video_type": 3,
|
||||
"industry_id_list": industry_id_list,
|
||||
"trigger_point_id_list": TRIGGER_POINT_IDS,
|
||||
}
|
||||
|
||||
# T-027: Cookie 直接使用 auth_token 完整值
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Cookie": auth_token,
|
||||
}
|
||||
|
||||
# URL 带 aadvid 参数
|
||||
url = f"{YUNTU_BASE_URL}/yuntu_common/api/content/trigger_analysis/GetContentMaterialAnalysisInfo?aadvid={aadvid}"
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=settings.YUNTU_API_TIMEOUT) as client:
|
||||
response = await client.post(
|
||||
url,
|
||||
json=request_data,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
# 检查SessionID是否失效
|
||||
if response.status_code in (401, 403):
|
||||
logger.warning(f"Session invalid: {auth_token[:20]}...")
|
||||
raise SessionInvalidError(
|
||||
f"Session invalid: {response.status_code}",
|
||||
status_code=response.status_code,
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise YuntuAPIError(
|
||||
f"API returned {response.status_code}",
|
||||
status_code=response.status_code,
|
||||
response_data=response.text,
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
|
||||
# 检查业务错误
|
||||
status = data.get("status", data.get("code", 0))
|
||||
if status != 0:
|
||||
error_msg = data.get("msg", data.get("message", "Unknown error"))
|
||||
raise YuntuAPIError(
|
||||
f"API business error: {error_msg}",
|
||||
status_code=response.status_code,
|
||||
response_data=data,
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
except httpx.TimeoutException:
|
||||
logger.error(f"Yuntu API timeout for item_id: {item_id}")
|
||||
raise YuntuAPIError("API request timeout")
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"Yuntu API request error: {e}")
|
||||
raise YuntuAPIError(f"API request error: {e}")
|
||||
|
||||
|
||||
async def get_video_analysis(
|
||||
item_id: str,
|
||||
publish_time: datetime,
|
||||
industry_id: str,
|
||||
max_retries: int = 3,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
获取视频分析数据(随机选取配置)。
|
||||
|
||||
T-027: 改为随机选取任意一组 aadvid/auth_token,不按 brand_id 匹配。
|
||||
|
||||
Args:
|
||||
item_id: 视频ID
|
||||
publish_time: 发布时间
|
||||
industry_id: 行业ID(来自数据库中的视频)
|
||||
max_retries: 最大重试次数
|
||||
|
||||
Returns:
|
||||
Dict: 视频分析数据
|
||||
|
||||
Raises:
|
||||
YuntuAPIError: API调用失败时抛出
|
||||
"""
|
||||
last_error = None
|
||||
|
||||
for attempt in range(max_retries):
|
||||
# T-027: 随机选取任意一组配置
|
||||
config = await get_random_config()
|
||||
if config is None:
|
||||
last_error = YuntuAPIError("No config available in session pool")
|
||||
logger.warning(f"No config available, attempt {attempt + 1}/{max_retries}")
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
f"Using random config: aadvid={config['aadvid']}, attempt {attempt + 1}"
|
||||
)
|
||||
|
||||
try:
|
||||
result = await call_yuntu_api(
|
||||
item_id=item_id,
|
||||
publish_time=publish_time,
|
||||
industry_id=industry_id, # T-027: 使用数据库中视频的 industry_id
|
||||
aadvid=config["aadvid"],
|
||||
auth_token=config["auth_token"],
|
||||
)
|
||||
return result
|
||||
|
||||
except SessionInvalidError:
|
||||
# SessionID失效,从池中移除
|
||||
session_pool.remove_by_auth_token(config["auth_token"])
|
||||
logger.info(
|
||||
f"Session invalid, attempt {attempt + 1}/{max_retries}"
|
||||
)
|
||||
last_error = SessionInvalidError("Session invalid after retries")
|
||||
continue
|
||||
|
||||
except YuntuAPIError as e:
|
||||
last_error = e
|
||||
logger.error(f"Yuntu API error on attempt {attempt + 1}: {e.message}")
|
||||
# 非 session 错误不重试
|
||||
break
|
||||
|
||||
raise last_error or YuntuAPIError("Unknown error after retries")
|
||||
|
||||
|
||||
def parse_analysis_response(data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
解析巨量云图API响应,提取关键指标。
|
||||
|
||||
T-027: A3 指标在 API 响应中是字符串类型,需要转为整数。
|
||||
|
||||
Args:
|
||||
data: API原始响应数据
|
||||
|
||||
Returns:
|
||||
Dict: 结构化的分析数据
|
||||
"""
|
||||
result_data = data.get("data", {}) or {}
|
||||
|
||||
return {
|
||||
# 触达指标
|
||||
"total_show_cnt": _safe_int(result_data.get("total_show_cnt")),
|
||||
"natural_show_cnt": _safe_int(result_data.get("natural_show_cnt")),
|
||||
"ad_show_cnt": _safe_int(result_data.get("ad_show_cnt")),
|
||||
"total_play_cnt": _safe_int(result_data.get("total_play_cnt")),
|
||||
"natural_play_cnt": _safe_int(result_data.get("natural_play_cnt")),
|
||||
"ad_play_cnt": _safe_int(result_data.get("ad_play_cnt")),
|
||||
"effective_play_cnt": _safe_int(result_data.get("effective_play_cnt")),
|
||||
# A3指标 - T-027: 转为整数
|
||||
"a3_increase_cnt": _safe_int(result_data.get("a3_increase_cnt")),
|
||||
"ad_a3_increase_cnt": _safe_int(result_data.get("ad_a3_increase_cnt")),
|
||||
"natural_a3_increase_cnt": _safe_int(result_data.get("natural_a3_increase_cnt")),
|
||||
# 搜索指标
|
||||
"after_view_search_uv": _safe_int(result_data.get("after_view_search_uv")),
|
||||
"after_view_search_pv": _safe_int(result_data.get("after_view_search_pv")),
|
||||
"brand_search_uv": _safe_int(result_data.get("brand_search_uv")),
|
||||
"product_search_uv": _safe_int(result_data.get("product_search_uv")),
|
||||
"return_search_cnt": _safe_int(result_data.get("return_search_cnt")),
|
||||
# 费用指标
|
||||
"cost": _safe_int(result_data.get("cost")),
|
||||
"natural_cost": _safe_int(result_data.get("natural_cost")),
|
||||
"ad_cost": _safe_int(result_data.get("ad_cost")),
|
||||
}
|
||||
@ -1,72 +0,0 @@
|
||||
[project]
|
||||
name = "kol-insight-backend"
|
||||
version = "0.1.0"
|
||||
description = "KOL Insight - KOL 数据查询与分析工具后端"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
# Web Framework
|
||||
"fastapi>=0.104.0",
|
||||
"uvicorn[standard]>=0.24.0",
|
||||
# Database
|
||||
"sqlalchemy>=2.0.0",
|
||||
"asyncpg>=0.29.0",
|
||||
"alembic>=1.12.0",
|
||||
"greenlet>=3.0.0",
|
||||
# HTTP Client
|
||||
"httpx[socks]>=0.25.0",
|
||||
# Data Validation
|
||||
"pydantic>=2.0.0",
|
||||
"pydantic-settings>=2.0.0",
|
||||
# Excel Export
|
||||
"openpyxl>=3.1.0",
|
||||
# Environment
|
||||
"python-dotenv>=1.0.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pytest>=7.4.0",
|
||||
"pytest-asyncio>=0.21.0",
|
||||
"pytest-cov>=4.1.0",
|
||||
"basedpyright>=1.20.0",
|
||||
"ruff>=0.8.0",
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
asyncio_mode = "auto"
|
||||
asyncio_default_fixture_loop_scope = "function"
|
||||
addopts = "-v --cov=app --cov-report=html --cov-report=term-missing"
|
||||
|
||||
[tool.basedpyright]
|
||||
typeCheckingMode = "basic"
|
||||
pythonVersion = "3.11"
|
||||
include = ["app"]
|
||||
exclude = [".venv", "**/__pycache__", "alembic", "tests"]
|
||||
extraPaths = ["app"]
|
||||
reportMissingTypeStubs = false
|
||||
reportUnknownMemberType = false
|
||||
reportUnknownArgumentType = false
|
||||
reportGeneralTypeIssues = false
|
||||
reportOptionalMemberAccess = "warning"
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 150
|
||||
target-version = "py311"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"E", # pycodestyle 错误
|
||||
"F", # Pyflakes 错误(未定义变量、未使用导入等)
|
||||
"B", # bugbear 安全警告
|
||||
"I", # import 排序
|
||||
]
|
||||
ignore = ["E501"]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"!app/**" = ["E", "F", "B", "I"]
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "double"
|
||||
indent-style = "space"
|
||||
line-ending = "auto"
|
||||
5
backend/pytest.ini
Normal file
5
backend/pytest.ini
Normal file
@ -0,0 +1,5 @@
|
||||
[pytest]
|
||||
testpaths = tests
|
||||
asyncio_mode = auto
|
||||
asyncio_default_fixture_loop_scope = function
|
||||
addopts = -v --cov=app --cov-report=html --cov-report=term-missing
|
||||
27
backend/requirements.txt
Normal file
27
backend/requirements.txt
Normal file
@ -0,0 +1,27 @@
|
||||
# Web Framework
|
||||
fastapi>=0.104.0
|
||||
uvicorn[standard]>=0.24.0
|
||||
|
||||
# Database
|
||||
sqlalchemy>=2.0.0
|
||||
asyncpg>=0.29.0
|
||||
alembic>=1.12.0
|
||||
|
||||
# HTTP Client
|
||||
httpx>=0.25.0
|
||||
|
||||
# Data Validation
|
||||
pydantic>=2.0.0
|
||||
pydantic-settings>=2.0.0
|
||||
|
||||
# Excel Export
|
||||
openpyxl>=3.1.0
|
||||
|
||||
# Testing
|
||||
pytest>=7.4.0
|
||||
pytest-asyncio>=0.21.0
|
||||
pytest-cov>=4.1.0
|
||||
httpx>=0.25.0
|
||||
|
||||
# Development
|
||||
python-dotenv>=1.0.0
|
||||
@ -1,9 +1,8 @@
|
||||
import pytest
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||
|
||||
from app.database import Base, get_db
|
||||
from app.database import Base
|
||||
from app.models import KolVideo
|
||||
from app.main import app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -48,29 +47,12 @@ async def test_engine():
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def async_session_factory(test_engine):
|
||||
"""Create async session factory."""
|
||||
return async_sessionmaker(
|
||||
async def test_session(test_engine):
|
||||
"""Create a test database session."""
|
||||
async_session = async_sessionmaker(
|
||||
test_engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def test_session(async_session_factory):
|
||||
"""Create a test database session."""
|
||||
async with async_session_factory() as session:
|
||||
async with async_session() as session:
|
||||
yield session
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def override_get_db(async_session_factory):
|
||||
"""Override get_db dependency for testing."""
|
||||
async def _get_db():
|
||||
async with async_session_factory() as session:
|
||||
yield session
|
||||
|
||||
app.dependency_overrides[get_db] = _get_db
|
||||
yield
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
@ -1,287 +0,0 @@
|
||||
import pytest
|
||||
import asyncio
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
import httpx
|
||||
|
||||
from app.services.brand_api import get_brand_names, fetch_brand_name
|
||||
|
||||
|
||||
class TestBrandAPI:
|
||||
"""Tests for Brand API integration."""
|
||||
|
||||
async def test_get_brand_names_success(self):
|
||||
"""Test successful brand name fetching."""
|
||||
with patch("app.services.brand_api.fetch_brand_name") as mock_fetch:
|
||||
mock_fetch.side_effect = [
|
||||
("brand_001", "品牌A"),
|
||||
("brand_002", "品牌B"),
|
||||
]
|
||||
|
||||
result = await get_brand_names(["brand_001", "brand_002"])
|
||||
|
||||
assert result["brand_001"] == "品牌A"
|
||||
assert result["brand_002"] == "品牌B"
|
||||
|
||||
async def test_get_brand_names_empty_list(self):
|
||||
"""Test with empty brand ID list."""
|
||||
result = await get_brand_names([])
|
||||
assert result == {}
|
||||
|
||||
async def test_get_brand_names_with_none_values(self):
|
||||
"""Test filtering out None values."""
|
||||
with patch("app.services.brand_api.fetch_brand_name") as mock_fetch:
|
||||
mock_fetch.return_value = ("brand_001", "品牌A")
|
||||
|
||||
result = await get_brand_names(["brand_001", None, ""])
|
||||
|
||||
assert "brand_001" in result
|
||||
assert len(result) == 1
|
||||
|
||||
async def test_get_brand_names_deduplication(self):
|
||||
"""Test that duplicate brand IDs are deduplicated."""
|
||||
with patch("app.services.brand_api.fetch_brand_name") as mock_fetch:
|
||||
mock_fetch.return_value = ("brand_001", "品牌A")
|
||||
|
||||
result = await get_brand_names(["brand_001", "brand_001", "brand_001"])
|
||||
|
||||
# Should only call once due to deduplication
|
||||
assert mock_fetch.call_count == 1
|
||||
|
||||
async def test_get_brand_names_partial_failure(self):
|
||||
"""Test that partial failures don't break the whole batch."""
|
||||
with patch("app.services.brand_api.fetch_brand_name") as mock_fetch:
|
||||
mock_fetch.side_effect = [
|
||||
("brand_001", "品牌A"),
|
||||
("brand_002", "brand_002"), # Fallback to ID
|
||||
("brand_003", "品牌C"),
|
||||
]
|
||||
|
||||
result = await get_brand_names(["brand_001", "brand_002", "brand_003"])
|
||||
|
||||
assert result["brand_001"] == "品牌A"
|
||||
assert result["brand_002"] == "brand_002" # Fallback
|
||||
assert result["brand_003"] == "品牌C"
|
||||
|
||||
async def test_fetch_brand_name_success(self):
|
||||
"""Test successful single brand fetch via get_brand_names."""
|
||||
# 使用更高层的 mock,测试整个流程
|
||||
with patch("app.services.brand_api.fetch_brand_name") as mock_fetch:
|
||||
mock_fetch.return_value = ("test_id", "测试品牌")
|
||||
|
||||
result = await get_brand_names(["test_id"])
|
||||
|
||||
assert result["test_id"] == "测试品牌"
|
||||
|
||||
async def test_fetch_brand_name_failure(self):
|
||||
"""Test brand fetch failure returns ID as fallback."""
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.side_effect = httpx.TimeoutException("Timeout")
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
brand_id, brand_name = await fetch_brand_name("test_id", semaphore)
|
||||
|
||||
assert brand_id == "test_id"
|
||||
assert brand_name == "test_id" # Fallback to ID
|
||||
|
||||
async def test_fetch_brand_name_404(self):
|
||||
"""Test brand fetch with 404 returns ID as fallback."""
|
||||
mock_response = AsyncMock()
|
||||
mock_response.status_code = 404
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
brand_id, brand_name = await fetch_brand_name("nonexistent", semaphore)
|
||||
|
||||
assert brand_id == "nonexistent"
|
||||
assert brand_name == "nonexistent"
|
||||
|
||||
async def test_concurrency_limit(self):
|
||||
"""Test that concurrency is limited."""
|
||||
with patch("app.services.brand_api.fetch_brand_name") as mock_fetch:
|
||||
# 创建 15 个品牌 ID
|
||||
brand_ids = [f"brand_{i:03d}" for i in range(15)]
|
||||
mock_fetch.side_effect = [(id, f"名称_{id}") for id in brand_ids]
|
||||
|
||||
result = await get_brand_names(brand_ids)
|
||||
|
||||
assert len(result) == 15
|
||||
# 验证所有调用都完成了
|
||||
assert mock_fetch.call_count == 15
|
||||
|
||||
async def test_fetch_brand_name_200_with_array_data(self):
|
||||
"""Test successful brand fetch with array data structure (T-019 fix)."""
|
||||
# 正确的API响应格式: data是数组,从data[0].brand_name获取品牌名称
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"total": 1,
|
||||
"last_updated": "2025-12-30T11:28:40.738185",
|
||||
"has_more": 0,
|
||||
"data": [
|
||||
{
|
||||
"industry_id": 20,
|
||||
"industry_name": "母婴",
|
||||
"brand_id": 533661,
|
||||
"brand_name": "Giving/启初"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
brand_id, brand_name = await fetch_brand_name("533661", semaphore)
|
||||
|
||||
assert brand_id == "533661"
|
||||
assert brand_name == "Giving/启初"
|
||||
|
||||
async def test_fetch_brand_name_200_with_empty_data_array(self):
|
||||
"""Test brand fetch with 200 but empty data array (T-019 edge case)."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"total": 0,
|
||||
"data": []
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
brand_id, brand_name = await fetch_brand_name("unknown_brand", semaphore)
|
||||
|
||||
assert brand_id == "unknown_brand"
|
||||
assert brand_name == "unknown_brand" # Fallback
|
||||
|
||||
async def test_fetch_brand_name_200_no_brand_name_field(self):
|
||||
"""Test brand fetch with 200 but no brand_name in data item."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"total": 1,
|
||||
"data": [{"brand_id": 123}] # No brand_name field
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
brand_id, brand_name = await fetch_brand_name("brand_no_name", semaphore)
|
||||
|
||||
assert brand_id == "brand_no_name"
|
||||
assert brand_name == "brand_no_name" # Fallback
|
||||
|
||||
async def test_fetch_brand_name_with_auth_header(self):
|
||||
"""Test that Authorization header is sent (T-020)."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"total": 1,
|
||||
"data": [{"brand_id": 123, "brand_name": "测试品牌"}]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
with patch("app.services.brand_api.settings") as mock_settings:
|
||||
mock_settings.BRAND_API_TIMEOUT = 3.0
|
||||
mock_settings.BRAND_API_BASE_URL = "https://api.test.com"
|
||||
mock_settings.BRAND_API_TOKEN = "test_token_123"
|
||||
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
await fetch_brand_name("123", semaphore)
|
||||
|
||||
# 验证请求包含 Authorization header
|
||||
mock_client.get.assert_called_once()
|
||||
call_args = mock_client.get.call_args
|
||||
assert "headers" in call_args.kwargs
|
||||
assert call_args.kwargs["headers"]["Authorization"] == "Bearer test_token_123"
|
||||
|
||||
async def test_fetch_brand_name_request_error(self):
|
||||
"""Test brand fetch with request error."""
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.side_effect = httpx.RequestError("Connection failed")
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
brand_id, brand_name = await fetch_brand_name("brand_error", semaphore)
|
||||
|
||||
assert brand_id == "brand_error"
|
||||
assert brand_name == "brand_error" # Fallback
|
||||
|
||||
async def test_fetch_brand_name_unexpected_error(self):
|
||||
"""Test brand fetch with unexpected error."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.side_effect = ValueError("Invalid JSON")
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
brand_id, brand_name = await fetch_brand_name("brand_json_error", semaphore)
|
||||
|
||||
assert brand_id == "brand_json_error"
|
||||
assert brand_name == "brand_json_error" # Fallback
|
||||
|
||||
async def test_get_brand_names_with_exception_in_gather(self):
|
||||
"""Test that exceptions in asyncio.gather are handled."""
|
||||
with patch("app.services.brand_api.fetch_brand_name") as mock_fetch:
|
||||
# 第二个调用抛出异常
|
||||
mock_fetch.side_effect = [
|
||||
("brand_001", "品牌A"),
|
||||
Exception("Unexpected error"),
|
||||
("brand_003", "品牌C"),
|
||||
]
|
||||
|
||||
result = await get_brand_names(["brand_001", "brand_002", "brand_003"])
|
||||
|
||||
# 成功的应该在结果中
|
||||
assert result["brand_001"] == "品牌A"
|
||||
assert result["brand_003"] == "品牌C"
|
||||
# 失败的不应该在结果中(因为是 Exception,不是 tuple)
|
||||
assert "brand_002" not in result
|
||||
|
||||
async def test_fetch_brand_name_non_dict_response(self):
|
||||
"""Test brand fetch with non-dict response."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = ["not", "a", "dict"] # Array instead of dict
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
brand_id, brand_name = await fetch_brand_name("brand_array", semaphore)
|
||||
|
||||
assert brand_id == "brand_array"
|
||||
assert brand_name == "brand_array" # Fallback because response is not dict
|
||||
@ -1,99 +0,0 @@
|
||||
import pytest
|
||||
from app.services.calculator import (
|
||||
calculate_natural_cpm,
|
||||
calculate_natural_search_uv,
|
||||
calculate_natural_search_cost,
|
||||
calculate_metrics,
|
||||
)
|
||||
|
||||
|
||||
class TestCalculator:
|
||||
"""Tests for calculator functions."""
|
||||
|
||||
def test_calculate_natural_cpm_normal(self):
|
||||
"""Test normal CPM calculation."""
|
||||
result = calculate_natural_cpm(10000.0, 100000)
|
||||
assert result == 100.0 # 10000 / 100000 * 1000 = 100
|
||||
|
||||
def test_calculate_natural_cpm_zero_play(self):
|
||||
"""Test CPM with zero plays returns None."""
|
||||
result = calculate_natural_cpm(10000.0, 0)
|
||||
assert result is None
|
||||
|
||||
def test_calculate_natural_cpm_decimal(self):
|
||||
"""Test CPM returns 2 decimal places."""
|
||||
result = calculate_natural_cpm(1234.56, 50000)
|
||||
assert result == 24.69 # round(1234.56 / 50000 * 1000, 2)
|
||||
|
||||
def test_calculate_natural_search_uv_normal(self):
|
||||
"""Test normal search UV calculation."""
|
||||
result = calculate_natural_search_uv(100000, 150000, 500)
|
||||
expected = round((100000 / 150000) * 500, 2)
|
||||
assert result == expected
|
||||
|
||||
def test_calculate_natural_search_uv_zero_total(self):
|
||||
"""Test search UV with zero total plays returns None."""
|
||||
result = calculate_natural_search_uv(100000, 0, 500)
|
||||
assert result is None
|
||||
|
||||
def test_calculate_natural_search_uv_zero_natural(self):
|
||||
"""Test search UV with zero natural plays."""
|
||||
result = calculate_natural_search_uv(0, 150000, 500)
|
||||
assert result == 0.0
|
||||
|
||||
def test_calculate_natural_search_cost_normal(self):
|
||||
"""Test normal search cost calculation."""
|
||||
result = calculate_natural_search_cost(10000.0, 333.33)
|
||||
assert result == 30.0 # round(10000 / 333.33, 2)
|
||||
|
||||
def test_calculate_natural_search_cost_zero_uv(self):
|
||||
"""Test search cost with zero UV returns None."""
|
||||
result = calculate_natural_search_cost(10000.0, 0)
|
||||
assert result is None
|
||||
|
||||
def test_calculate_natural_search_cost_none_uv(self):
|
||||
"""Test search cost with None UV returns None."""
|
||||
result = calculate_natural_search_cost(10000.0, None)
|
||||
assert result is None
|
||||
|
||||
def test_calculate_metrics_all_normal(self):
|
||||
"""Test calculate_metrics with all normal values."""
|
||||
result = calculate_metrics(
|
||||
estimated_video_cost=10000.0,
|
||||
natural_play_cnt=100000,
|
||||
total_play_cnt=150000,
|
||||
after_view_search_uv=500,
|
||||
)
|
||||
|
||||
assert result["estimated_natural_cpm"] == 100.0
|
||||
assert result["estimated_natural_search_uv"] == round((100000 / 150000) * 500, 2)
|
||||
expected_cost = round(10000.0 / result["estimated_natural_search_uv"], 2)
|
||||
assert result["estimated_natural_search_cost"] == expected_cost
|
||||
|
||||
def test_calculate_metrics_zero_plays(self):
|
||||
"""Test calculate_metrics with zero plays."""
|
||||
result = calculate_metrics(
|
||||
estimated_video_cost=10000.0,
|
||||
natural_play_cnt=0,
|
||||
total_play_cnt=0,
|
||||
after_view_search_uv=500,
|
||||
)
|
||||
|
||||
assert result["estimated_natural_cpm"] is None
|
||||
assert result["estimated_natural_search_uv"] is None
|
||||
assert result["estimated_natural_search_cost"] is None
|
||||
|
||||
def test_calculate_metrics_partial_zero(self):
|
||||
"""Test calculate_metrics with partial zero values."""
|
||||
result = calculate_metrics(
|
||||
estimated_video_cost=10000.0,
|
||||
natural_play_cnt=100000,
|
||||
total_play_cnt=0, # Zero total plays
|
||||
after_view_search_uv=500,
|
||||
)
|
||||
|
||||
# CPM can still be calculated
|
||||
assert result["estimated_natural_cpm"] == 100.0
|
||||
# But search UV and cost cannot
|
||||
assert result["estimated_natural_search_uv"] is None
|
||||
assert result["estimated_natural_search_cost"] is None
|
||||
@ -2,22 +2,6 @@ import pytest
|
||||
from sqlalchemy import select
|
||||
|
||||
from app.models import KolVideo
|
||||
from app.core.logging import setup_logging, get_logger
|
||||
|
||||
|
||||
class TestLogging:
|
||||
"""Tests for logging module."""
|
||||
|
||||
def test_setup_logging(self):
|
||||
"""Test logging setup."""
|
||||
# Should not raise
|
||||
setup_logging()
|
||||
|
||||
def test_get_logger(self):
|
||||
"""Test getting a logger."""
|
||||
logger = get_logger("test")
|
||||
assert logger is not None
|
||||
assert logger.name == "test"
|
||||
|
||||
|
||||
class TestKolVideoModel:
|
||||
@ -179,14 +163,3 @@ class TestKolVideoModel:
|
||||
select(KolVideo).where(KolVideo.item_id == sample_video_data["item_id"])
|
||||
)
|
||||
assert result.scalar_one_or_none() is None
|
||||
|
||||
async def test_video_repr(self, test_session, sample_video_data):
|
||||
"""Test KolVideo __repr__ method."""
|
||||
video = KolVideo(**sample_video_data)
|
||||
test_session.add(video)
|
||||
await test_session.commit()
|
||||
|
||||
repr_str = repr(video)
|
||||
assert "KolVideo" in repr_str
|
||||
assert sample_video_data["item_id"] in repr_str
|
||||
assert sample_video_data["title"] in repr_str
|
||||
|
||||
@ -1,163 +0,0 @@
|
||||
import pytest
|
||||
from unittest.mock import patch, AsyncMock, MagicMock
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from app.main import app
|
||||
from app.models import KolVideo
|
||||
from app.api.v1.export import set_export_data, get_export_data
|
||||
|
||||
|
||||
class TestErrorHandling:
|
||||
"""Tests for error handling scenarios."""
|
||||
|
||||
@pytest.fixture
|
||||
async def client(self, override_get_db):
|
||||
"""Create test client with dependency override."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(transport=transport, base_url="http://test") as ac:
|
||||
yield ac
|
||||
|
||||
@pytest.fixture
|
||||
async def seed_data(self, test_session, sample_video_data):
|
||||
"""Seed test data."""
|
||||
data = sample_video_data.copy()
|
||||
data["item_id"] = "error_test_001"
|
||||
data["star_id"] = "error_star_001"
|
||||
video = KolVideo(**data)
|
||||
test_session.add(video)
|
||||
await test_session.commit()
|
||||
return video
|
||||
|
||||
# Query API error handling tests
|
||||
|
||||
@patch("app.api.v1.query.query_videos")
|
||||
@patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock)
|
||||
async def test_query_database_error(
|
||||
self, mock_brand, mock_query, client
|
||||
):
|
||||
"""Test query returns error on database failure."""
|
||||
mock_brand.return_value = {}
|
||||
mock_query.side_effect = SQLAlchemyError("Database connection failed")
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id", "values": ["test_id"]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is False
|
||||
assert "数据库连接失败" in data["error"]
|
||||
|
||||
@patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock)
|
||||
async def test_query_brand_api_failure_fallback(
|
||||
self, mock_brand, client, test_session, seed_data
|
||||
):
|
||||
"""Test query continues with fallback when brand API fails."""
|
||||
mock_brand.side_effect = Exception("Brand API timeout")
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id", "values": ["error_star_001"]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
# Should still succeed, brand name falls back to brand_id
|
||||
assert data["success"] is True
|
||||
assert data["total"] >= 0
|
||||
|
||||
@patch("app.api.v1.query.query_videos")
|
||||
@patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock)
|
||||
async def test_query_generic_error(
|
||||
self, mock_brand, mock_query, client
|
||||
):
|
||||
"""Test query returns error on unexpected exception."""
|
||||
mock_brand.return_value = {}
|
||||
mock_query.side_effect = Exception("Unexpected error")
|
||||
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id", "values": ["test_id"]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is False
|
||||
assert "Unexpected error" in data["error"]
|
||||
|
||||
# Export API error handling tests
|
||||
|
||||
async def test_export_no_data_error(self, client):
|
||||
"""Test export returns error when no data is cached."""
|
||||
# Clear cached data
|
||||
set_export_data([])
|
||||
|
||||
response = await client.get("/api/v1/export?format=xlsx")
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert data["success"] is False
|
||||
assert "无数据可导出" in data["error"]
|
||||
|
||||
async def test_export_with_data_success(self, client, sample_video_data):
|
||||
"""Test export succeeds when data is cached."""
|
||||
# Set cached data
|
||||
set_export_data([sample_video_data])
|
||||
|
||||
response = await client.get("/api/v1/export?format=xlsx")
|
||||
assert response.status_code == 200
|
||||
assert "application/vnd.openxmlformats" in response.headers["content-type"]
|
||||
|
||||
async def test_export_csv_with_data_success(self, client, sample_video_data):
|
||||
"""Test CSV export succeeds when data is cached."""
|
||||
set_export_data([sample_video_data])
|
||||
|
||||
response = await client.get("/api/v1/export?format=csv")
|
||||
assert response.status_code == 200
|
||||
assert "text/csv" in response.headers["content-type"]
|
||||
|
||||
@patch("app.api.v1.export.generate_excel")
|
||||
async def test_export_generation_error(
|
||||
self, mock_generate, client, sample_video_data
|
||||
):
|
||||
"""Test export returns error when file generation fails."""
|
||||
mock_generate.side_effect = Exception("Excel generation failed")
|
||||
set_export_data([sample_video_data])
|
||||
|
||||
response = await client.get("/api/v1/export?format=xlsx")
|
||||
assert response.status_code == 500
|
||||
data = response.json()
|
||||
assert data["success"] is False
|
||||
assert "导出失败" in data["error"]
|
||||
|
||||
# Input validation tests
|
||||
|
||||
async def test_query_validation_empty_values(self, client):
|
||||
"""Test query returns 422 for empty values."""
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id", "values": []},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
async def test_query_validation_invalid_type(self, client):
|
||||
"""Test query returns 422 for invalid query type."""
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "invalid_type", "values": ["test"]},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
async def test_query_validation_missing_type(self, client):
|
||||
"""Test query returns 422 for missing type field."""
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"values": ["test"]},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
async def test_query_validation_missing_values(self, client):
|
||||
"""Test query returns 422 for missing values field."""
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id"},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
@ -1,169 +0,0 @@
|
||||
import pytest
|
||||
from io import BytesIO
|
||||
from openpyxl import load_workbook
|
||||
|
||||
from app.services.export_service import generate_excel, generate_csv, COLUMN_HEADERS
|
||||
|
||||
|
||||
class TestExportService:
|
||||
"""Tests for Export Service."""
|
||||
|
||||
@pytest.fixture
|
||||
def sample_export_data(self):
|
||||
"""Sample data for export testing."""
|
||||
return [
|
||||
{
|
||||
"item_id": "item_001",
|
||||
"title": "测试视频1",
|
||||
"viral_type": "爆款",
|
||||
"video_url": "https://example.com/1",
|
||||
"star_id": "star_001",
|
||||
"star_unique_id": "unique_001",
|
||||
"star_nickname": "测试达人1",
|
||||
"publish_time": "2026-01-28T10:00:00",
|
||||
"natural_play_cnt": 100000,
|
||||
"heated_play_cnt": 50000,
|
||||
"total_play_cnt": 150000,
|
||||
"total_interact": 5000,
|
||||
"like_cnt": 3000,
|
||||
"share_cnt": 1000,
|
||||
"comment_cnt": 1000,
|
||||
"new_a3_rate": 0.05,
|
||||
"after_view_search_uv": 500,
|
||||
"return_search_cnt": 200,
|
||||
"industry_id": "ind_001",
|
||||
"industry_name": "美妆",
|
||||
"brand_id": "brand_001",
|
||||
"brand_name": "测试品牌",
|
||||
"estimated_video_cost": 10000.0,
|
||||
"estimated_natural_cpm": 100.0,
|
||||
"estimated_natural_search_uv": 333.33,
|
||||
"estimated_natural_search_cost": 30.0,
|
||||
}
|
||||
]
|
||||
|
||||
def test_generate_excel_success(self, sample_export_data):
|
||||
"""Test Excel generation."""
|
||||
content = generate_excel(sample_export_data)
|
||||
|
||||
assert content is not None
|
||||
assert len(content) > 0
|
||||
|
||||
# 验证可以被 openpyxl 读取
|
||||
wb = load_workbook(BytesIO(content))
|
||||
ws = wb.active
|
||||
|
||||
# 验证表头
|
||||
assert ws.cell(row=1, column=1).value == "视频ID"
|
||||
assert ws.cell(row=1, column=2).value == "视频标题"
|
||||
|
||||
# 验证数据行
|
||||
assert ws.cell(row=2, column=1).value == "item_001"
|
||||
assert ws.cell(row=2, column=2).value == "测试视频1"
|
||||
|
||||
def test_generate_excel_empty_data(self):
|
||||
"""Test Excel generation with empty data."""
|
||||
content = generate_excel([])
|
||||
|
||||
assert content is not None
|
||||
wb = load_workbook(BytesIO(content))
|
||||
ws = wb.active
|
||||
|
||||
# 应该只有表头
|
||||
assert ws.max_row == 1
|
||||
|
||||
def test_generate_csv_success(self, sample_export_data):
|
||||
"""Test CSV generation."""
|
||||
content = generate_csv(sample_export_data)
|
||||
|
||||
assert content is not None
|
||||
assert len(content) > 0
|
||||
|
||||
# 验证 CSV 内容
|
||||
lines = content.decode("utf-8-sig").split("\n")
|
||||
assert len(lines) >= 2 # 表头 + 至少一行数据
|
||||
|
||||
# 验证表头
|
||||
assert "视频ID" in lines[0]
|
||||
assert "视频标题" in lines[0]
|
||||
|
||||
def test_generate_csv_empty_data(self):
|
||||
"""Test CSV generation with empty data."""
|
||||
content = generate_csv([])
|
||||
|
||||
assert content is not None
|
||||
lines = content.decode("utf-8-sig").split("\n")
|
||||
|
||||
# 应该只有表头
|
||||
assert len(lines) == 2 # 表头 + 空行
|
||||
|
||||
def test_generate_csv_comma_escape(self):
|
||||
"""Test CSV properly escapes commas."""
|
||||
data = [
|
||||
{
|
||||
"item_id": "item_001",
|
||||
"title": "标题,包含,逗号",
|
||||
"viral_type": None,
|
||||
"video_url": None,
|
||||
"star_id": "star_001",
|
||||
"star_unique_id": "unique_001",
|
||||
"star_nickname": "测试达人",
|
||||
"publish_time": None,
|
||||
"natural_play_cnt": 0,
|
||||
"heated_play_cnt": 0,
|
||||
"total_play_cnt": 0,
|
||||
"total_interact": 0,
|
||||
"like_cnt": 0,
|
||||
"share_cnt": 0,
|
||||
"comment_cnt": 0,
|
||||
"new_a3_rate": None,
|
||||
"after_view_search_uv": 0,
|
||||
"return_search_cnt": 0,
|
||||
"industry_id": None,
|
||||
"industry_name": None,
|
||||
"brand_id": None,
|
||||
"brand_name": None,
|
||||
"estimated_video_cost": 0,
|
||||
"estimated_natural_cpm": None,
|
||||
"estimated_natural_search_uv": None,
|
||||
"estimated_natural_search_cost": None,
|
||||
}
|
||||
]
|
||||
content = generate_csv(data)
|
||||
csv_text = content.decode("utf-8-sig")
|
||||
|
||||
# 包含逗号的字段应该被引号包裹
|
||||
assert '"标题,包含,逗号"' in csv_text
|
||||
|
||||
def test_column_headers_complete(self):
|
||||
"""Test that all required columns are defined."""
|
||||
expected_columns = [
|
||||
"视频ID",
|
||||
"视频标题",
|
||||
"爆文类型",
|
||||
"视频链接",
|
||||
"新增A3率",
|
||||
"看后搜人数",
|
||||
"回搜次数",
|
||||
"自然曝光数",
|
||||
"加热曝光数",
|
||||
"总曝光数",
|
||||
"总互动",
|
||||
"点赞",
|
||||
"转发",
|
||||
"评论",
|
||||
"合作行业ID",
|
||||
"合作行业",
|
||||
"合作品牌ID",
|
||||
"合作品牌",
|
||||
"发布时间",
|
||||
"达人昵称",
|
||||
"达人unique_id",
|
||||
"预估视频价格",
|
||||
"预估自然CPM",
|
||||
"预估自然看后搜人数",
|
||||
"预估自然看后搜人数成本",
|
||||
]
|
||||
|
||||
for col in expected_columns:
|
||||
assert col in [h[0] for h in COLUMN_HEADERS], f"Missing column: {col}"
|
||||
@ -1,30 +0,0 @@
|
||||
import pytest
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
|
||||
from app.main import app
|
||||
|
||||
|
||||
class TestMainApp:
|
||||
"""Tests for main app endpoints."""
|
||||
|
||||
@pytest.fixture
|
||||
async def client(self):
|
||||
"""Create test client."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(transport=transport, base_url="http://test") as ac:
|
||||
yield ac
|
||||
|
||||
async def test_root_endpoint(self, client):
|
||||
"""Test root endpoint returns app info."""
|
||||
response = await client.get("/")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["message"] == "KOL Insight API"
|
||||
assert data["version"] == "1.0.0"
|
||||
|
||||
async def test_health_endpoint(self, client):
|
||||
"""Test health check endpoint."""
|
||||
response = await client.get("/health")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "healthy"
|
||||
@ -1,139 +0,0 @@
|
||||
import pytest
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
from unittest.mock import patch, AsyncMock
|
||||
|
||||
from app.main import app
|
||||
from app.models import KolVideo
|
||||
from app.database import get_db
|
||||
|
||||
|
||||
class TestQueryAPI:
|
||||
"""Tests for Query API."""
|
||||
|
||||
@pytest.fixture
|
||||
async def client(self, override_get_db):
|
||||
"""Create test client with dependency override."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(transport=transport, base_url="http://test") as ac:
|
||||
yield ac
|
||||
|
||||
@pytest.fixture
|
||||
async def seed_data(self, test_session, sample_video_data):
|
||||
"""Seed test data."""
|
||||
videos = []
|
||||
for i in range(3):
|
||||
data = sample_video_data.copy()
|
||||
data["item_id"] = f"item_{i:03d}"
|
||||
data["star_id"] = f"star_{i:03d}"
|
||||
data["star_unique_id"] = f"unique_{i:03d}"
|
||||
data["star_nickname"] = f"测试达人{i}"
|
||||
videos.append(KolVideo(**data))
|
||||
test_session.add_all(videos)
|
||||
await test_session.commit()
|
||||
return videos
|
||||
|
||||
@patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock)
|
||||
async def test_query_by_star_id_success(
|
||||
self, mock_brand, client, test_session, seed_data
|
||||
):
|
||||
"""Test querying by star_id returns correct results."""
|
||||
mock_brand.return_value = {}
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id", "values": ["star_000", "star_001"]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert data["total"] == 2
|
||||
|
||||
@patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock)
|
||||
async def test_query_by_unique_id_success(
|
||||
self, mock_brand, client, test_session, seed_data
|
||||
):
|
||||
"""Test querying by unique_id returns correct results."""
|
||||
mock_brand.return_value = {}
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "unique_id", "values": ["unique_000"]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert data["total"] == 1
|
||||
|
||||
@patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock)
|
||||
async def test_query_by_nickname_like(
|
||||
self, mock_brand, client, test_session, seed_data
|
||||
):
|
||||
"""Test querying by nickname using fuzzy match."""
|
||||
mock_brand.return_value = {}
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "nickname", "values": ["测试达人"]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert data["total"] == 3 # 所有包含 "测试达人" 的记录
|
||||
|
||||
async def test_query_empty_values(self, client):
|
||||
"""Test querying with empty values returns error."""
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id", "values": []},
|
||||
)
|
||||
assert response.status_code == 422 # Validation error
|
||||
|
||||
async def test_query_invalid_type(self, client):
|
||||
"""Test querying with invalid type returns error."""
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "invalid_type", "values": ["test"]},
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
@patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock)
|
||||
async def test_query_no_results(self, mock_brand, client, test_session, seed_data):
|
||||
"""Test querying with no matching results."""
|
||||
mock_brand.return_value = {}
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id", "values": ["nonexistent_id"]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert data["total"] == 0
|
||||
assert data["data"] == []
|
||||
|
||||
@patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock)
|
||||
async def test_query_limit_enforcement(self, mock_brand, client, test_session):
|
||||
"""Test that query limit is enforced."""
|
||||
mock_brand.return_value = {}
|
||||
# 创建超过 1000 条记录的情况在测试中略过
|
||||
# 这里只测试 API 能正常工作
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id", "values": ["star_000"]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
@patch("app.api.v1.query.get_brand_names", new_callable=AsyncMock)
|
||||
async def test_query_returns_calculated_fields(
|
||||
self, mock_brand, client, test_session, seed_data
|
||||
):
|
||||
"""Test that calculated fields are returned."""
|
||||
mock_brand.return_value = {}
|
||||
response = await client.post(
|
||||
"/api/v1/query",
|
||||
json={"type": "star_id", "values": ["star_000"]},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
if data["total"] > 0:
|
||||
video = data["data"][0]
|
||||
# 检查计算字段存在
|
||||
assert "estimated_natural_cpm" in video
|
||||
assert "estimated_natural_search_uv" in video
|
||||
assert "estimated_natural_search_cost" in video
|
||||
@ -1,701 +0,0 @@
|
||||
"""
|
||||
Tests for SessionID Pool Service (T-021, T-022, T-027)
|
||||
|
||||
T-027 更新:
|
||||
- 改为 CookieConfig 数据结构
|
||||
- get_random_config() 随机选取配置
|
||||
- remove_by_auth_token() 移除失效配置
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
import httpx
|
||||
|
||||
from app.services.session_pool import (
|
||||
SessionPool,
|
||||
CookieConfig,
|
||||
session_pool,
|
||||
get_session_with_retry,
|
||||
get_random_config,
|
||||
get_distinct_configs,
|
||||
)
|
||||
|
||||
|
||||
class TestSessionPool:
|
||||
"""Tests for SessionPool class."""
|
||||
|
||||
async def test_refresh_success(self):
|
||||
"""Test successful session pool refresh (T-027 format)."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"data": [
|
||||
{
|
||||
"brand_id": "533661",
|
||||
"aadvid": "1648829117232140",
|
||||
"auth_token": "sessionid=session_001",
|
||||
"industry_id": 20,
|
||||
"brand_name": "Brand1",
|
||||
},
|
||||
{
|
||||
"brand_id": "10186612",
|
||||
"aadvid": "9876543210",
|
||||
"auth_token": "sessionid=session_002",
|
||||
"industry_id": 30,
|
||||
"brand_name": "Brand2",
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await pool.refresh()
|
||||
|
||||
assert result is True
|
||||
assert pool.size == 2
|
||||
assert not pool.is_empty
|
||||
|
||||
async def test_refresh_with_sessionid_cookie_field(self):
|
||||
"""Test refresh using sessionid_cookie field (fallback)."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"data": [
|
||||
{
|
||||
"brand_id": "533661",
|
||||
"aadvid": "1648829117232140",
|
||||
"sessionid_cookie": "sessionid=session_001",
|
||||
"industry_id": 20,
|
||||
"brand_name": "Brand1",
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await pool.refresh()
|
||||
|
||||
assert result is True
|
||||
assert pool.size == 1
|
||||
|
||||
async def test_refresh_empty_data(self):
|
||||
"""Test refresh with empty data array."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"data": []}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await pool.refresh()
|
||||
|
||||
assert result is False
|
||||
assert pool.size == 0
|
||||
|
||||
async def test_refresh_api_error(self):
|
||||
"""Test refresh with API error."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 500
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await pool.refresh()
|
||||
|
||||
assert result is False
|
||||
|
||||
async def test_refresh_timeout(self):
|
||||
"""Test refresh with timeout."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.side_effect = httpx.TimeoutException("Timeout")
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await pool.refresh()
|
||||
|
||||
assert result is False
|
||||
|
||||
async def test_refresh_request_error(self):
|
||||
"""Test refresh with request error."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.side_effect = httpx.RequestError("Connection failed")
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await pool.refresh()
|
||||
|
||||
assert result is False
|
||||
|
||||
async def test_refresh_unexpected_error(self):
|
||||
"""Test refresh with unexpected error."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.side_effect = ValueError("Unexpected")
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await pool.refresh()
|
||||
|
||||
assert result is False
|
||||
|
||||
async def test_refresh_with_auth_header(self):
|
||||
"""Test that refresh includes Authorization header."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"data": [
|
||||
{
|
||||
"brand_id": "123",
|
||||
"aadvid": "456",
|
||||
"auth_token": "sessionid=test",
|
||||
"industry_id": 20,
|
||||
"brand_name": "Test",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
with patch("app.services.session_pool.settings") as mock_settings:
|
||||
mock_settings.YUNTU_API_TOKEN = "test_token"
|
||||
mock_settings.YUNTU_API_TIMEOUT = 10.0
|
||||
mock_settings.BRAND_API_BASE_URL = "https://api.test.com"
|
||||
|
||||
await pool.refresh()
|
||||
|
||||
mock_client.get.assert_called_once()
|
||||
call_args = mock_client.get.call_args
|
||||
assert "headers" in call_args.kwargs
|
||||
assert call_args.kwargs["headers"]["Authorization"] == "Bearer test_token"
|
||||
|
||||
def test_get_random_config_from_pool(self):
|
||||
"""Test getting random config from pool (T-027)."""
|
||||
pool = SessionPool()
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="533661",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=session_1",
|
||||
industry_id=20,
|
||||
brand_name="Brand1",
|
||||
),
|
||||
CookieConfig(
|
||||
brand_id="10186612",
|
||||
aadvid="9876543210",
|
||||
auth_token="sessionid=session_2",
|
||||
industry_id=30,
|
||||
brand_name="Brand2",
|
||||
),
|
||||
]
|
||||
|
||||
config = pool.get_random_config()
|
||||
|
||||
assert config is not None
|
||||
assert "aadvid" in config
|
||||
assert "auth_token" in config
|
||||
assert config["auth_token"] in ["sessionid=session_1", "sessionid=session_2"]
|
||||
|
||||
def test_get_random_config_from_empty_pool(self):
|
||||
"""Test getting random config from empty pool."""
|
||||
pool = SessionPool()
|
||||
|
||||
config = pool.get_random_config()
|
||||
|
||||
assert config is None
|
||||
|
||||
def test_get_random_from_pool_compat(self):
|
||||
"""Test get_random compatibility method."""
|
||||
pool = SessionPool()
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="533661",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=session_1",
|
||||
industry_id=20,
|
||||
brand_name="Brand1",
|
||||
),
|
||||
]
|
||||
|
||||
session = pool.get_random()
|
||||
|
||||
assert session == "session_1"
|
||||
|
||||
def test_get_random_from_empty_pool_compat(self):
|
||||
"""Test get_random from empty pool."""
|
||||
pool = SessionPool()
|
||||
|
||||
session = pool.get_random()
|
||||
|
||||
assert session is None
|
||||
|
||||
def test_remove_by_auth_token(self):
|
||||
"""Test removing config by auth_token (T-027)."""
|
||||
pool = SessionPool()
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="533661",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=session_1",
|
||||
industry_id=20,
|
||||
brand_name="Brand1",
|
||||
),
|
||||
CookieConfig(
|
||||
brand_id="10186612",
|
||||
aadvid="9876543210",
|
||||
auth_token="sessionid=session_2",
|
||||
industry_id=30,
|
||||
brand_name="Brand2",
|
||||
),
|
||||
]
|
||||
|
||||
pool.remove_by_auth_token("sessionid=session_1")
|
||||
|
||||
assert pool.size == 1
|
||||
config = pool.get_random_config()
|
||||
assert config["auth_token"] == "sessionid=session_2"
|
||||
|
||||
def test_remove_session_compat(self):
|
||||
"""Test remove compatibility method."""
|
||||
pool = SessionPool()
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="533661",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=session_1",
|
||||
industry_id=20,
|
||||
brand_name="Brand1",
|
||||
),
|
||||
CookieConfig(
|
||||
brand_id="10186612",
|
||||
aadvid="9876543210",
|
||||
auth_token="sessionid=session_2",
|
||||
industry_id=30,
|
||||
brand_name="Brand2",
|
||||
),
|
||||
]
|
||||
|
||||
pool.remove("session_1")
|
||||
|
||||
assert pool.size == 1
|
||||
|
||||
def test_remove_nonexistent_session(self):
|
||||
"""Test removing a session that doesn't exist."""
|
||||
pool = SessionPool()
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="533661",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=session_1",
|
||||
industry_id=20,
|
||||
brand_name="Brand1",
|
||||
),
|
||||
]
|
||||
|
||||
# Should not raise
|
||||
pool.remove_by_auth_token("nonexistent")
|
||||
|
||||
assert pool.size == 1
|
||||
|
||||
def test_size_property(self):
|
||||
"""Test size property."""
|
||||
pool = SessionPool()
|
||||
assert pool.size == 0
|
||||
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="123",
|
||||
aadvid="456",
|
||||
auth_token="sessionid=a",
|
||||
industry_id=20,
|
||||
brand_name="A",
|
||||
),
|
||||
CookieConfig(
|
||||
brand_id="789",
|
||||
aadvid="012",
|
||||
auth_token="sessionid=b",
|
||||
industry_id=30,
|
||||
brand_name="B",
|
||||
),
|
||||
]
|
||||
assert pool.size == 2
|
||||
|
||||
def test_is_empty_property(self):
|
||||
"""Test is_empty property."""
|
||||
pool = SessionPool()
|
||||
assert pool.is_empty is True
|
||||
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="123",
|
||||
aadvid="456",
|
||||
auth_token="sessionid=a",
|
||||
industry_id=20,
|
||||
brand_name="A",
|
||||
),
|
||||
]
|
||||
assert pool.is_empty is False
|
||||
|
||||
|
||||
class TestGetRandomConfig:
|
||||
"""Tests for get_random_config function (T-027)."""
|
||||
|
||||
async def test_get_config_success(self):
|
||||
"""Test successful config retrieval."""
|
||||
pool = SessionPool()
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="533661",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=session_1",
|
||||
industry_id=20,
|
||||
brand_name="Brand1",
|
||||
),
|
||||
]
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
result = await get_random_config()
|
||||
|
||||
assert result is not None
|
||||
assert result["aadvid"] == "1648829117232140"
|
||||
assert result["auth_token"] == "sessionid=session_1"
|
||||
|
||||
async def test_get_config_refresh_on_empty(self):
|
||||
"""Test that pool is refreshed when empty."""
|
||||
pool = SessionPool()
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
with patch.object(pool, "refresh") as mock_refresh:
|
||||
async def refresh_side_effect():
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="123",
|
||||
aadvid="456",
|
||||
auth_token="sessionid=new_session",
|
||||
industry_id=20,
|
||||
brand_name="New",
|
||||
),
|
||||
]
|
||||
return True
|
||||
|
||||
mock_refresh.side_effect = refresh_side_effect
|
||||
|
||||
result = await get_random_config()
|
||||
|
||||
assert mock_refresh.called
|
||||
assert result["auth_token"] == "sessionid=new_session"
|
||||
|
||||
async def test_get_config_retry_on_refresh_failure(self):
|
||||
"""Test retry behavior when refresh fails."""
|
||||
pool = SessionPool()
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
with patch.object(pool, "refresh") as mock_refresh:
|
||||
mock_refresh.return_value = False
|
||||
|
||||
result = await get_random_config(max_retries=3)
|
||||
|
||||
assert result is None
|
||||
assert mock_refresh.call_count == 3
|
||||
|
||||
|
||||
class TestGetSessionWithRetry:
|
||||
"""Tests for get_session_with_retry function (T-022 compat)."""
|
||||
|
||||
async def test_get_session_success(self):
|
||||
"""Test successful session retrieval."""
|
||||
pool = SessionPool()
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="533661",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=session_1",
|
||||
industry_id=20,
|
||||
brand_name="Brand1",
|
||||
),
|
||||
]
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
result = await get_session_with_retry()
|
||||
|
||||
assert result == "session_1"
|
||||
|
||||
async def test_get_session_refresh_on_empty(self):
|
||||
"""Test that pool is refreshed when empty."""
|
||||
pool = SessionPool()
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
with patch.object(pool, "refresh") as mock_refresh:
|
||||
async def refresh_side_effect():
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="123",
|
||||
aadvid="456",
|
||||
auth_token="sessionid=new_session",
|
||||
industry_id=20,
|
||||
brand_name="New",
|
||||
),
|
||||
]
|
||||
return True
|
||||
|
||||
mock_refresh.side_effect = refresh_side_effect
|
||||
|
||||
result = await get_session_with_retry()
|
||||
|
||||
assert mock_refresh.called
|
||||
assert result == "new_session"
|
||||
|
||||
async def test_get_session_retry_on_refresh_failure(self):
|
||||
"""Test retry behavior when refresh fails."""
|
||||
pool = SessionPool()
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
with patch.object(pool, "refresh") as mock_refresh:
|
||||
mock_refresh.return_value = False
|
||||
|
||||
result = await get_session_with_retry(max_retries=3)
|
||||
|
||||
assert result is None
|
||||
assert mock_refresh.call_count == 3
|
||||
|
||||
async def test_get_session_max_retries(self):
|
||||
"""Test max retries limit."""
|
||||
pool = SessionPool()
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
with patch.object(pool, "refresh") as mock_refresh:
|
||||
mock_refresh.return_value = False
|
||||
|
||||
result = await get_session_with_retry(max_retries=5)
|
||||
|
||||
assert result is None
|
||||
assert mock_refresh.call_count == 5
|
||||
|
||||
|
||||
class TestSessionPoolIntegration:
|
||||
"""Integration tests for session pool."""
|
||||
|
||||
async def test_refresh_filters_invalid_items(self):
|
||||
"""Test that refresh filters out invalid items (T-027 format)."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"data": [
|
||||
{
|
||||
"brand_id": "533661",
|
||||
"aadvid": "1648829117232140",
|
||||
"auth_token": "sessionid=valid_session",
|
||||
"industry_id": 20,
|
||||
"brand_name": "Valid1",
|
||||
},
|
||||
{"no_auth_token": "missing"},
|
||||
None,
|
||||
{
|
||||
"brand_id": "10186612",
|
||||
"aadvid": "", # Empty aadvid should be filtered
|
||||
"auth_token": "sessionid=xxx",
|
||||
"industry_id": 30,
|
||||
"brand_name": "Invalid",
|
||||
},
|
||||
{
|
||||
"brand_id": "789012",
|
||||
"aadvid": "9876543210",
|
||||
"auth_token": "sessionid=another_valid",
|
||||
"industry_id": 40,
|
||||
"brand_name": "Valid2",
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await pool.refresh()
|
||||
|
||||
assert result is True
|
||||
assert pool.size == 2
|
||||
|
||||
async def test_refresh_handles_non_dict_data(self):
|
||||
"""Test refresh with non-dict response."""
|
||||
pool = SessionPool()
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = ["not", "a", "dict"]
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await pool.refresh()
|
||||
|
||||
assert result is False
|
||||
|
||||
|
||||
def _make_configs(count: int) -> list[CookieConfig]:
|
||||
"""创建 count 个不同的 CookieConfig 用于测试。"""
|
||||
return [
|
||||
CookieConfig(
|
||||
brand_id=f"brand_{i}",
|
||||
aadvid=f"aadvid_{i}",
|
||||
auth_token=f"sessionid=session_{i}",
|
||||
industry_id=20 + i,
|
||||
brand_name=f"Brand{i}",
|
||||
)
|
||||
for i in range(count)
|
||||
]
|
||||
|
||||
|
||||
class TestGetDistinctConfigs:
|
||||
"""Tests for SessionPool.get_distinct_configs and module-level get_distinct_configs."""
|
||||
|
||||
def test_enough_configs_returns_distinct(self):
|
||||
"""池中配置 >= count → 返回不重复的"""
|
||||
pool = SessionPool()
|
||||
pool._configs = _make_configs(5)
|
||||
|
||||
result = pool.get_distinct_configs(3)
|
||||
|
||||
assert len(result) == 3
|
||||
tokens = [r["auth_token"] for r in result]
|
||||
assert len(set(tokens)) == 3
|
||||
|
||||
def test_exact_count(self):
|
||||
"""池中配置 == count → 全部返回"""
|
||||
pool = SessionPool()
|
||||
pool._configs = _make_configs(3)
|
||||
|
||||
result = pool.get_distinct_configs(3)
|
||||
|
||||
assert len(result) == 3
|
||||
tokens = {r["auth_token"] for r in result}
|
||||
assert len(tokens) == 3
|
||||
|
||||
def test_fewer_configs_wraps_around(self):
|
||||
"""池中配置 < count → 循环复用补足"""
|
||||
pool = SessionPool()
|
||||
pool._configs = _make_configs(2)
|
||||
|
||||
result = pool.get_distinct_configs(5)
|
||||
|
||||
assert len(result) == 5
|
||||
# 前 2 个一定不重复
|
||||
first_two_tokens = {result[0]["auth_token"], result[1]["auth_token"]}
|
||||
assert len(first_two_tokens) == 2
|
||||
|
||||
def test_empty_pool_returns_empty(self):
|
||||
"""空池 → 返回空列表"""
|
||||
pool = SessionPool()
|
||||
|
||||
result = pool.get_distinct_configs(3)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_zero_count_returns_empty(self):
|
||||
"""count=0 → 返回空列表"""
|
||||
pool = SessionPool()
|
||||
pool._configs = _make_configs(3)
|
||||
|
||||
result = pool.get_distinct_configs(0)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_result_contains_all_fields(self):
|
||||
"""验证返回的 dict 包含所有必要字段"""
|
||||
pool = SessionPool()
|
||||
pool._configs = _make_configs(1)
|
||||
|
||||
result = pool.get_distinct_configs(1)
|
||||
|
||||
assert len(result) == 1
|
||||
item = result[0]
|
||||
assert "brand_id" in item
|
||||
assert "aadvid" in item
|
||||
assert "auth_token" in item
|
||||
assert "industry_id" in item
|
||||
assert "brand_name" in item
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_module_level_get_distinct_configs(self):
|
||||
"""测试模块级 get_distinct_configs 异步函数"""
|
||||
pool = SessionPool()
|
||||
pool._configs = _make_configs(3)
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
result = await get_distinct_configs(2)
|
||||
|
||||
assert len(result) == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_module_level_refreshes_on_empty(self):
|
||||
"""池为空时自动刷新"""
|
||||
pool = SessionPool()
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
with patch.object(pool, "refresh") as mock_refresh:
|
||||
async def refresh_side_effect():
|
||||
pool._configs = _make_configs(3)
|
||||
return True
|
||||
|
||||
mock_refresh.side_effect = refresh_side_effect
|
||||
|
||||
result = await get_distinct_configs(2)
|
||||
|
||||
assert mock_refresh.called
|
||||
assert len(result) == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_module_level_returns_empty_on_refresh_failure(self):
|
||||
"""刷新失败 → 返回空列表"""
|
||||
pool = SessionPool()
|
||||
|
||||
with patch("app.services.session_pool.session_pool", pool):
|
||||
with patch.object(pool, "refresh") as mock_refresh:
|
||||
mock_refresh.return_value = False
|
||||
|
||||
result = await get_distinct_configs(2, max_retries=2)
|
||||
|
||||
assert result == []
|
||||
assert mock_refresh.call_count == 2
|
||||
@ -1,760 +0,0 @@
|
||||
"""
|
||||
Tests for Video Analysis Service (T-024)
|
||||
|
||||
覆盖:
|
||||
- calculate_cost_metrics 计算
|
||||
- _needs_api_call 缓存判断
|
||||
- get_video_analysis_data 详情页(缓存命中 / API 调用 / API 失败降级)
|
||||
- update_video_a3_metrics(含 heated_cost)
|
||||
- get_video_list_with_a3 列表页(混合缓存 + 并发 API)
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from datetime import datetime
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
|
||||
from app.services.video_analysis import (
|
||||
_build_video_list_item,
|
||||
_needs_api_call,
|
||||
calculate_cost_metrics,
|
||||
get_video_analysis_data,
|
||||
get_video_list_with_a3,
|
||||
update_video_a3_metrics,
|
||||
)
|
||||
from app.services.yuntu_api import YuntuAPIError
|
||||
|
||||
|
||||
def _make_mock_video(**overrides):
|
||||
"""创建标准 mock video 对象,带合理默认值。"""
|
||||
defaults = {
|
||||
"item_id": "video_123",
|
||||
"title": "测试视频",
|
||||
"video_url": "https://example.com/video",
|
||||
"vid": "vid_123",
|
||||
"star_id": "star_001",
|
||||
"star_unique_id": "unique_001",
|
||||
"star_nickname": "测试达人",
|
||||
"star_uid": "uid_001",
|
||||
"star_fans_cnt": 100000,
|
||||
"star_mcn": "MCN1",
|
||||
"publish_time": datetime(2025, 1, 15),
|
||||
"create_date": datetime(2025, 1, 15),
|
||||
"industry_name": "母婴",
|
||||
"industry_id": "20",
|
||||
"brand_id": "brand_001",
|
||||
"hot_type": "爆款",
|
||||
"viral_type": "爆款",
|
||||
"is_hot": True,
|
||||
"has_cart": False,
|
||||
"total_play_cnt": 50000,
|
||||
"natural_play_cnt": 40000,
|
||||
"heated_play_cnt": 10000,
|
||||
"total_interaction_cnt": 5000,
|
||||
"total_interact": 5000,
|
||||
"natural_interaction_cnt": 3000,
|
||||
"heated_interaction_cnt": 2000,
|
||||
"digg_cnt": 3000,
|
||||
"like_cnt": 3000,
|
||||
"share_cnt": 1000,
|
||||
"comment_cnt": 1000,
|
||||
"play_over_cnt": 20000,
|
||||
"play_over_rate": 0.4,
|
||||
"after_view_search_uv": 1000,
|
||||
"after_view_search_cnt": 1200,
|
||||
"after_view_search_rate": 0.02,
|
||||
"back_search_cnt": 50,
|
||||
"back_search_uv": 50,
|
||||
"return_search_cnt": 50,
|
||||
"new_a3_rate": 0.05,
|
||||
"total_new_a3_cnt": 0,
|
||||
"heated_new_a3_cnt": 0,
|
||||
"natural_new_a3_cnt": 0,
|
||||
"total_cost": 0.0,
|
||||
"heated_cost": 0.0,
|
||||
"star_task_cost": 0.0,
|
||||
"search_cost": 0.0,
|
||||
"ad_hot_roi": 0.0,
|
||||
"estimated_video_cost": 10000.0,
|
||||
"order_id": None,
|
||||
"content_type": None,
|
||||
"industry_tags": None,
|
||||
"ad_hot_type": None,
|
||||
"trend": None,
|
||||
"trend_daily": None,
|
||||
"trend_total": None,
|
||||
"component_metric_list": None,
|
||||
"key_word_after_search_infos": None,
|
||||
"index_map": None,
|
||||
"search_keywords": None,
|
||||
"keywords": None,
|
||||
"price_under_20s": None,
|
||||
"price_20_60s": None,
|
||||
"price_over_60s": None,
|
||||
"video_duration": None,
|
||||
"data_date": None,
|
||||
"created_at": None,
|
||||
"updated_at": None,
|
||||
}
|
||||
defaults.update(overrides)
|
||||
mock = MagicMock()
|
||||
for k, v in defaults.items():
|
||||
setattr(mock, k, v)
|
||||
return mock
|
||||
|
||||
|
||||
class TestCalculateCostMetrics:
|
||||
"""Tests for calculate_cost_metrics function."""
|
||||
|
||||
def test_all_metrics_calculated(self):
|
||||
result = calculate_cost_metrics(
|
||||
cost=10000,
|
||||
natural_play_cnt=40000,
|
||||
a3_increase_cnt=500,
|
||||
natural_a3_increase_cnt=400,
|
||||
after_view_search_uv=1000,
|
||||
total_play_cnt=50000,
|
||||
)
|
||||
|
||||
assert result["cpm"] == 200.0
|
||||
assert result["natural_cpm"] == 250.0
|
||||
assert result["cpa3"] == 20.0
|
||||
assert result["natural_cpa3"] == 25.0
|
||||
assert result["cp_search"] == 10.0
|
||||
assert result["estimated_natural_search_uv"] == 800.0
|
||||
assert result["natural_cp_search"] == 12.5
|
||||
|
||||
def test_zero_total_play_cnt(self):
|
||||
result = calculate_cost_metrics(
|
||||
cost=10000,
|
||||
natural_play_cnt=0,
|
||||
a3_increase_cnt=500,
|
||||
natural_a3_increase_cnt=400,
|
||||
after_view_search_uv=1000,
|
||||
total_play_cnt=0,
|
||||
)
|
||||
|
||||
assert result["cpm"] is None
|
||||
assert result["natural_cpm"] is None
|
||||
assert result["estimated_natural_search_uv"] is None
|
||||
assert result["natural_cp_search"] is None
|
||||
|
||||
def test_zero_a3_counts(self):
|
||||
result = calculate_cost_metrics(
|
||||
cost=10000,
|
||||
natural_play_cnt=40000,
|
||||
a3_increase_cnt=0,
|
||||
natural_a3_increase_cnt=0,
|
||||
after_view_search_uv=1000,
|
||||
total_play_cnt=50000,
|
||||
)
|
||||
|
||||
assert result["cpa3"] is None
|
||||
assert result["natural_cpa3"] is None
|
||||
assert result["cpm"] == 200.0
|
||||
|
||||
def test_zero_search_uv(self):
|
||||
result = calculate_cost_metrics(
|
||||
cost=10000,
|
||||
natural_play_cnt=40000,
|
||||
a3_increase_cnt=500,
|
||||
natural_a3_increase_cnt=400,
|
||||
after_view_search_uv=0,
|
||||
total_play_cnt=50000,
|
||||
)
|
||||
|
||||
assert result["cp_search"] is None
|
||||
assert result["estimated_natural_search_uv"] is None
|
||||
assert result["natural_cp_search"] is None
|
||||
|
||||
def test_all_zeros(self):
|
||||
result = calculate_cost_metrics(
|
||||
cost=0,
|
||||
natural_play_cnt=0,
|
||||
a3_increase_cnt=0,
|
||||
natural_a3_increase_cnt=0,
|
||||
after_view_search_uv=0,
|
||||
total_play_cnt=0,
|
||||
)
|
||||
|
||||
assert result["cpm"] is None
|
||||
assert result["natural_cpm"] is None
|
||||
assert result["cpa3"] is None
|
||||
assert result["natural_cpa3"] is None
|
||||
assert result["cp_search"] is None
|
||||
assert result["estimated_natural_search_uv"] is None
|
||||
assert result["natural_cp_search"] is None
|
||||
|
||||
def test_decimal_precision(self):
|
||||
result = calculate_cost_metrics(
|
||||
cost=10000,
|
||||
natural_play_cnt=30000,
|
||||
a3_increase_cnt=333,
|
||||
natural_a3_increase_cnt=111,
|
||||
after_view_search_uv=777,
|
||||
total_play_cnt=70000,
|
||||
)
|
||||
|
||||
assert isinstance(result["cpm"], float)
|
||||
assert len(str(result["cpm"]).split(".")[-1]) <= 2
|
||||
|
||||
|
||||
class TestNeedsApiCall:
|
||||
"""Tests for _needs_api_call helper."""
|
||||
|
||||
def test_needs_call_when_no_data(self):
|
||||
"""A3=0 且 cost=0 → 需要调 API"""
|
||||
video = _make_mock_video(total_new_a3_cnt=0, total_cost=0.0)
|
||||
assert _needs_api_call(video) is True
|
||||
|
||||
def test_needs_call_when_none_values(self):
|
||||
"""A3=None 且 cost=None → 需要调 API"""
|
||||
video = _make_mock_video(total_new_a3_cnt=None, total_cost=None)
|
||||
assert _needs_api_call(video) is True
|
||||
|
||||
def test_no_call_when_a3_exists(self):
|
||||
"""有 A3 数据 → 不需要调 API"""
|
||||
video = _make_mock_video(total_new_a3_cnt=500, total_cost=0.0)
|
||||
assert _needs_api_call(video) is False
|
||||
|
||||
def test_no_call_when_cost_exists(self):
|
||||
"""有 cost 数据 → 不需要调 API"""
|
||||
video = _make_mock_video(total_new_a3_cnt=0, total_cost=10000.0)
|
||||
assert _needs_api_call(video) is False
|
||||
|
||||
def test_no_call_when_both_exist(self):
|
||||
"""A3 和 cost 都有 → 不需要调 API"""
|
||||
video = _make_mock_video(total_new_a3_cnt=500, total_cost=10000.0)
|
||||
assert _needs_api_call(video) is False
|
||||
|
||||
|
||||
class TestGetVideoAnalysisData:
|
||||
"""Tests for get_video_analysis_data function."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_uses_db_when_cached(self):
|
||||
"""数据库已有 A3/Cost → 直接使用,不调 API"""
|
||||
mock_video = _make_mock_video(
|
||||
total_new_a3_cnt=500,
|
||||
heated_new_a3_cnt=100,
|
||||
natural_new_a3_cnt=400,
|
||||
total_cost=10000.0,
|
||||
heated_cost=5000.0,
|
||||
)
|
||||
|
||||
mock_session = AsyncMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.scalar_one_or_none.return_value = mock_video
|
||||
mock_session.execute.return_value = mock_result
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {"brand_001": "品牌A"}
|
||||
|
||||
with patch("app.services.video_analysis.fetch_yuntu_analysis") as mock_api:
|
||||
result = await get_video_analysis_data(mock_session, "video_123")
|
||||
|
||||
# API 不应被调用
|
||||
mock_api.assert_not_called()
|
||||
|
||||
# 验证使用了数据库数据
|
||||
assert result["a3_metrics"]["total_new_a3_cnt"] == 500
|
||||
assert result["a3_metrics"]["heated_new_a3_cnt"] == 100
|
||||
assert result["a3_metrics"]["natural_new_a3_cnt"] == 400
|
||||
assert result["cost_metrics"]["total_cost"] == 10000.0
|
||||
assert result["cost_metrics"]["heated_cost"] == 5000.0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calls_api_and_saves_to_db(self):
|
||||
"""数据库无数据 → 调 API → 写回 DB"""
|
||||
mock_video = _make_mock_video(
|
||||
total_new_a3_cnt=0,
|
||||
total_cost=0.0,
|
||||
heated_cost=0.0,
|
||||
)
|
||||
|
||||
mock_session = AsyncMock()
|
||||
mock_select_result = MagicMock()
|
||||
mock_select_result.scalar_one_or_none.return_value = mock_video
|
||||
|
||||
mock_update_result = MagicMock()
|
||||
mock_update_result.rowcount = 1
|
||||
|
||||
call_count = [0]
|
||||
|
||||
async def mock_execute(stmt):
|
||||
stmt_str = str(stmt)
|
||||
if "SELECT" in stmt_str.upper() or call_count[0] == 0:
|
||||
call_count[0] += 1
|
||||
return mock_select_result
|
||||
return mock_update_result
|
||||
|
||||
mock_session.execute.side_effect = mock_execute
|
||||
|
||||
api_response = {
|
||||
"code": 0,
|
||||
"data": {
|
||||
"a3_increase_cnt": "500",
|
||||
"ad_a3_increase_cnt": "100",
|
||||
"natural_a3_increase_cnt": "400",
|
||||
"cost": 15000,
|
||||
"ad_cost": 8000,
|
||||
"natural_cost": 0,
|
||||
},
|
||||
}
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {"brand_001": "品牌A"}
|
||||
|
||||
with patch("app.services.video_analysis.fetch_yuntu_analysis") as mock_api:
|
||||
mock_api.return_value = api_response
|
||||
|
||||
result = await get_video_analysis_data(mock_session, "video_123")
|
||||
|
||||
# API 应被调用
|
||||
mock_api.assert_called_once_with(
|
||||
item_id="video_123",
|
||||
publish_time=datetime(2025, 1, 15),
|
||||
industry_id="20",
|
||||
)
|
||||
|
||||
# 验证 A3 数据
|
||||
assert result["a3_metrics"]["total_new_a3_cnt"] == 500
|
||||
assert result["a3_metrics"]["heated_new_a3_cnt"] == 100
|
||||
assert result["a3_metrics"]["natural_new_a3_cnt"] == 400
|
||||
|
||||
# 验证 cost
|
||||
assert result["cost_metrics"]["total_cost"] == 15000
|
||||
assert result["cost_metrics"]["heated_cost"] == 8000
|
||||
|
||||
# 验证计算指标存在
|
||||
assert "estimated_cpm" in result["calculated_metrics"]
|
||||
assert "estimated_natural_cpm" in result["calculated_metrics"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_video_not_found(self):
|
||||
mock_session = AsyncMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.scalar_one_or_none.return_value = None
|
||||
mock_session.execute.return_value = mock_result
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
await get_video_analysis_data(mock_session, "nonexistent")
|
||||
|
||||
assert "not found" in str(exc_info.value).lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_fallback_on_api_failure(self):
|
||||
"""API 失败 → 降级使用数据库数据"""
|
||||
mock_video = _make_mock_video(
|
||||
total_new_a3_cnt=0,
|
||||
heated_new_a3_cnt=0,
|
||||
natural_new_a3_cnt=0,
|
||||
total_cost=0.0,
|
||||
heated_cost=0.0,
|
||||
)
|
||||
|
||||
mock_session = AsyncMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.scalar_one_or_none.return_value = mock_video
|
||||
mock_session.execute.return_value = mock_result
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {}
|
||||
|
||||
with patch("app.services.video_analysis.fetch_yuntu_analysis") as mock_api:
|
||||
mock_api.side_effect = YuntuAPIError("API Error")
|
||||
|
||||
result = await get_video_analysis_data(mock_session, "video_123")
|
||||
|
||||
# 降级使用 DB 数据(都是 0)
|
||||
assert result["a3_metrics"]["total_new_a3_cnt"] == 0
|
||||
assert result["cost_metrics"]["total_cost"] == 0.0
|
||||
|
||||
# 基础信息仍然正常
|
||||
assert result["base_info"]["vid"] == "video_123"
|
||||
assert result["reach_metrics"]["total_play_cnt"] == 50000
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_null_publish_time(self):
|
||||
mock_video = _make_mock_video(
|
||||
publish_time=None,
|
||||
create_date=None,
|
||||
total_new_a3_cnt=0,
|
||||
total_cost=0.0,
|
||||
total_play_cnt=0,
|
||||
natural_play_cnt=0,
|
||||
heated_play_cnt=0,
|
||||
after_view_search_uv=0,
|
||||
)
|
||||
|
||||
mock_session = AsyncMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.scalar_one_or_none.return_value = mock_video
|
||||
mock_session.execute.return_value = mock_result
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {}
|
||||
|
||||
with patch("app.services.video_analysis.fetch_yuntu_analysis") as mock_api:
|
||||
mock_api.return_value = {"code": 0, "data": {}}
|
||||
|
||||
result = await get_video_analysis_data(mock_session, "video_123")
|
||||
|
||||
assert result["base_info"]["create_date"] is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_response_structure(self):
|
||||
"""验证返回数据包含所有 6 大类"""
|
||||
mock_video = _make_mock_video(total_new_a3_cnt=500, total_cost=10000.0)
|
||||
|
||||
mock_session = AsyncMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.scalar_one_or_none.return_value = mock_video
|
||||
mock_session.execute.return_value = mock_result
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {}
|
||||
|
||||
result = await get_video_analysis_data(mock_session, "video_123")
|
||||
|
||||
assert "base_info" in result
|
||||
assert "reach_metrics" in result
|
||||
assert "a3_metrics" in result
|
||||
assert "search_metrics" in result
|
||||
assert "cost_metrics" in result
|
||||
assert "calculated_metrics" in result
|
||||
|
||||
# base_info 关键字段
|
||||
assert "star_nickname" in result["base_info"]
|
||||
assert "vid" in result["base_info"]
|
||||
assert "brand_name" in result["base_info"]
|
||||
|
||||
# reach_metrics 关键字段
|
||||
assert "total_play_cnt" in result["reach_metrics"]
|
||||
assert "natural_play_cnt" in result["reach_metrics"]
|
||||
|
||||
|
||||
class TestUpdateVideoA3Metrics:
|
||||
"""Tests for update_video_a3_metrics function (T-025)."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_success(self):
|
||||
mock_session = AsyncMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.rowcount = 1
|
||||
mock_session.execute.return_value = mock_result
|
||||
|
||||
result = await update_video_a3_metrics(
|
||||
session=mock_session,
|
||||
item_id="video_123",
|
||||
total_new_a3_cnt=500,
|
||||
heated_new_a3_cnt=100,
|
||||
natural_new_a3_cnt=400,
|
||||
total_cost=10000.0,
|
||||
)
|
||||
|
||||
assert result is True
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_with_heated_cost(self):
|
||||
"""验证 heated_cost 参数正常传递"""
|
||||
mock_session = AsyncMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.rowcount = 1
|
||||
mock_session.execute.return_value = mock_result
|
||||
|
||||
result = await update_video_a3_metrics(
|
||||
session=mock_session,
|
||||
item_id="video_123",
|
||||
total_new_a3_cnt=500,
|
||||
heated_new_a3_cnt=100,
|
||||
natural_new_a3_cnt=400,
|
||||
total_cost=15000.0,
|
||||
heated_cost=8000.0,
|
||||
)
|
||||
|
||||
assert result is True
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_video_not_found(self):
|
||||
mock_session = AsyncMock()
|
||||
mock_result = MagicMock()
|
||||
mock_result.rowcount = 0
|
||||
mock_session.execute.return_value = mock_result
|
||||
|
||||
result = await update_video_a3_metrics(
|
||||
session=mock_session,
|
||||
item_id="nonexistent",
|
||||
total_new_a3_cnt=500,
|
||||
heated_new_a3_cnt=100,
|
||||
natural_new_a3_cnt=400,
|
||||
total_cost=10000.0,
|
||||
)
|
||||
|
||||
assert result is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_database_error(self):
|
||||
mock_session = AsyncMock()
|
||||
mock_session.execute.side_effect = Exception("Database error")
|
||||
|
||||
result = await update_video_a3_metrics(
|
||||
session=mock_session,
|
||||
item_id="video_123",
|
||||
total_new_a3_cnt=500,
|
||||
heated_new_a3_cnt=100,
|
||||
natural_new_a3_cnt=400,
|
||||
total_cost=10000.0,
|
||||
)
|
||||
|
||||
assert result is False
|
||||
mock_session.rollback.assert_called_once()
|
||||
|
||||
|
||||
class TestBuildVideoListItem:
|
||||
"""Tests for _build_video_list_item helper."""
|
||||
|
||||
def test_build_item_with_full_data(self):
|
||||
video = _make_mock_video(
|
||||
total_play_cnt=50000,
|
||||
natural_play_cnt=40000,
|
||||
after_view_search_uv=1000,
|
||||
estimated_video_cost=10000.0,
|
||||
)
|
||||
|
||||
result = _build_video_list_item(
|
||||
video=video,
|
||||
a3_increase_cnt=500,
|
||||
ad_a3_increase_cnt=100,
|
||||
natural_a3_increase_cnt=400,
|
||||
api_cost=15000.0,
|
||||
brand_name="品牌A",
|
||||
)
|
||||
|
||||
assert result["item_id"] == "video_123"
|
||||
assert result["brand_name"] == "品牌A"
|
||||
assert result["total_new_a3_cnt"] == 500
|
||||
assert result["estimated_natural_cpm"] is not None
|
||||
assert result["estimated_cp_a3"] == 30.0 # 15000/500
|
||||
|
||||
def test_build_item_zero_division(self):
|
||||
"""分母为 0 时应返回 None"""
|
||||
video = _make_mock_video(
|
||||
total_play_cnt=0,
|
||||
natural_play_cnt=0,
|
||||
after_view_search_uv=0,
|
||||
estimated_video_cost=0.0,
|
||||
)
|
||||
|
||||
result = _build_video_list_item(
|
||||
video=video,
|
||||
a3_increase_cnt=0,
|
||||
ad_a3_increase_cnt=0,
|
||||
natural_a3_increase_cnt=0,
|
||||
api_cost=0.0,
|
||||
brand_name="",
|
||||
)
|
||||
|
||||
assert result["estimated_natural_cpm"] is None
|
||||
assert result["estimated_cp_a3"] is None
|
||||
assert result["estimated_natural_cp_a3"] is None
|
||||
assert result["estimated_cp_search"] is None
|
||||
assert result["estimated_natural_cp_search"] is None
|
||||
|
||||
|
||||
class TestGetVideoListWithA3:
|
||||
"""Tests for get_video_list_with_a3 function."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_all_cached(self):
|
||||
"""所有视频都有缓存 → 不调 API"""
|
||||
videos = [
|
||||
_make_mock_video(
|
||||
item_id="v1", total_new_a3_cnt=500, total_cost=10000.0, brand_id="b1"
|
||||
),
|
||||
_make_mock_video(
|
||||
item_id="v2", total_new_a3_cnt=300, total_cost=8000.0, brand_id="b2"
|
||||
),
|
||||
]
|
||||
|
||||
mock_session = AsyncMock()
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {"b1": "品牌1", "b2": "品牌2"}
|
||||
|
||||
with patch("app.services.video_analysis.call_yuntu_api") as mock_api:
|
||||
result = await get_video_list_with_a3(mock_session, videos)
|
||||
|
||||
mock_api.assert_not_called()
|
||||
assert len(result) == 2
|
||||
assert result[0]["item_id"] == "v1"
|
||||
assert result[0]["total_new_a3_cnt"] == 500
|
||||
assert result[1]["item_id"] == "v2"
|
||||
assert result[1]["total_new_a3_cnt"] == 300
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_all_need_api(self):
|
||||
"""所有视频都需要 API → 并发调用 → 首次即返回正确数据 → gather 后顺序写 DB"""
|
||||
videos = [
|
||||
_make_mock_video(
|
||||
item_id="v1", total_new_a3_cnt=0, total_cost=0.0, brand_id="b1"
|
||||
),
|
||||
_make_mock_video(
|
||||
item_id="v2", total_new_a3_cnt=0, total_cost=0.0, brand_id="b2"
|
||||
),
|
||||
]
|
||||
|
||||
mock_session = AsyncMock()
|
||||
mock_update_result = MagicMock()
|
||||
mock_update_result.rowcount = 1
|
||||
mock_session.execute.return_value = mock_update_result
|
||||
|
||||
api_response = {
|
||||
"data": {
|
||||
"a3_increase_cnt": "200",
|
||||
"ad_a3_increase_cnt": "50",
|
||||
"natural_a3_increase_cnt": "150",
|
||||
"cost": 5000,
|
||||
"ad_cost": 3000,
|
||||
}
|
||||
}
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {}
|
||||
|
||||
with patch("app.services.video_analysis.call_yuntu_api", new_callable=AsyncMock) as mock_api:
|
||||
mock_api.return_value = api_response
|
||||
|
||||
with patch("app.services.video_analysis.get_distinct_configs", new_callable=AsyncMock) as mock_configs:
|
||||
mock_configs.return_value = [
|
||||
{"aadvid": "aad1", "auth_token": "tok1"},
|
||||
{"aadvid": "aad2", "auth_token": "tok2"},
|
||||
]
|
||||
|
||||
with patch("app.services.video_analysis.update_video_a3_metrics", new_callable=AsyncMock) as mock_update:
|
||||
mock_update.return_value = True
|
||||
|
||||
result = await get_video_list_with_a3(mock_session, videos)
|
||||
|
||||
assert len(result) == 2
|
||||
assert mock_api.call_count == 2
|
||||
|
||||
# 首次查询即返回正确 API 数据(核心:不依赖 DB 写入成功)
|
||||
assert result[0]["total_new_a3_cnt"] == 200
|
||||
assert result[1]["total_new_a3_cnt"] == 200
|
||||
|
||||
# 验证两个视频用了不同 config
|
||||
api_calls = mock_api.call_args_list
|
||||
tokens = {c.kwargs["auth_token"] for c in api_calls}
|
||||
assert len(tokens) == 2
|
||||
|
||||
# DB 写入在 gather 之后顺序执行
|
||||
assert mock_update.call_count == 2
|
||||
update_item_ids = [c.kwargs["item_id"] for c in mock_update.call_args_list]
|
||||
assert "v1" in update_item_ids
|
||||
assert "v2" in update_item_ids
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mixed_cached_and_api(self):
|
||||
"""混合场景:部分缓存,部分需 API → 只对 API 成功的写 DB"""
|
||||
videos = [
|
||||
_make_mock_video(
|
||||
item_id="v1", total_new_a3_cnt=500, total_cost=10000.0, brand_id="b1"
|
||||
),
|
||||
_make_mock_video(
|
||||
item_id="v2", total_new_a3_cnt=0, total_cost=0.0, brand_id="b2"
|
||||
),
|
||||
_make_mock_video(
|
||||
item_id="v3", total_new_a3_cnt=300, total_cost=5000.0, brand_id="b3"
|
||||
),
|
||||
]
|
||||
|
||||
mock_session = AsyncMock()
|
||||
|
||||
api_response = {
|
||||
"data": {
|
||||
"a3_increase_cnt": "200",
|
||||
"ad_a3_increase_cnt": "50",
|
||||
"natural_a3_increase_cnt": "150",
|
||||
"cost": 5000,
|
||||
"ad_cost": 3000,
|
||||
}
|
||||
}
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {}
|
||||
|
||||
with patch("app.services.video_analysis.call_yuntu_api", new_callable=AsyncMock) as mock_api:
|
||||
mock_api.return_value = api_response
|
||||
|
||||
with patch("app.services.video_analysis.get_distinct_configs", new_callable=AsyncMock) as mock_configs:
|
||||
mock_configs.return_value = [
|
||||
{"aadvid": "aad1", "auth_token": "tok1"},
|
||||
]
|
||||
|
||||
with patch("app.services.video_analysis.update_video_a3_metrics", new_callable=AsyncMock) as mock_update:
|
||||
mock_update.return_value = True
|
||||
|
||||
result = await get_video_list_with_a3(mock_session, videos)
|
||||
|
||||
# 保持原始排序
|
||||
assert len(result) == 3
|
||||
assert result[0]["item_id"] == "v1"
|
||||
assert result[0]["total_new_a3_cnt"] == 500 # from DB
|
||||
assert result[1]["item_id"] == "v2"
|
||||
assert result[1]["total_new_a3_cnt"] == 200 # from API
|
||||
assert result[2]["item_id"] == "v3"
|
||||
assert result[2]["total_new_a3_cnt"] == 300 # from DB
|
||||
|
||||
# 只有 v2 调了 API
|
||||
assert mock_api.call_count == 1
|
||||
|
||||
# 只对 v2 写回 DB
|
||||
assert mock_update.call_count == 1
|
||||
assert mock_update.call_args.kwargs["item_id"] == "v2"
|
||||
assert mock_update.call_args.kwargs["total_new_a3_cnt"] == 200
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_empty_list(self):
|
||||
"""空列表 → 返回空"""
|
||||
mock_session = AsyncMock()
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {}
|
||||
|
||||
result = await get_video_list_with_a3(mock_session, [])
|
||||
|
||||
assert result == []
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_failure_fallback(self):
|
||||
"""API 调用失败 → 降级使用 DB 数据 → 不写回 DB"""
|
||||
videos = [
|
||||
_make_mock_video(
|
||||
item_id="v1", total_new_a3_cnt=0, total_cost=0.0, brand_id="b1"
|
||||
),
|
||||
]
|
||||
|
||||
mock_session = AsyncMock()
|
||||
|
||||
with patch("app.services.brand_api.get_brand_names", new_callable=AsyncMock) as mock_brands:
|
||||
mock_brands.return_value = {}
|
||||
|
||||
with patch("app.services.video_analysis.call_yuntu_api", new_callable=AsyncMock) as mock_api:
|
||||
mock_api.side_effect = YuntuAPIError("API Error")
|
||||
|
||||
with patch("app.services.video_analysis.get_distinct_configs", new_callable=AsyncMock) as mock_configs:
|
||||
mock_configs.return_value = [
|
||||
{"aadvid": "aad1", "auth_token": "tok1"},
|
||||
]
|
||||
|
||||
with patch("app.services.video_analysis.update_video_a3_metrics", new_callable=AsyncMock) as mock_update:
|
||||
result = await get_video_list_with_a3(mock_session, videos)
|
||||
|
||||
# 降级到 DB 数据
|
||||
assert len(result) == 1
|
||||
assert result[0]["total_new_a3_cnt"] == 0
|
||||
|
||||
# API 失败不应写回 DB
|
||||
mock_update.assert_not_called()
|
||||
@ -1,414 +0,0 @@
|
||||
"""
|
||||
Tests for Yuntu API Service (T-023, T-027)
|
||||
|
||||
T-027 更新:
|
||||
- call_yuntu_api 参数改为 auth_token(完整 cookie 值)
|
||||
- 日期格式改为 YYYYMMDD
|
||||
- industry_id 改为字符串
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from datetime import datetime
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
import httpx
|
||||
|
||||
from app.services.yuntu_api import (
|
||||
call_yuntu_api,
|
||||
get_video_analysis,
|
||||
parse_analysis_response,
|
||||
YuntuAPIError,
|
||||
SessionInvalidError,
|
||||
)
|
||||
|
||||
|
||||
class TestCallYuntuAPI:
|
||||
"""Tests for call_yuntu_api function."""
|
||||
|
||||
async def test_call_success(self):
|
||||
"""Test successful API call."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"status": 0,
|
||||
"msg": "ok",
|
||||
"data": {
|
||||
"total_show_cnt": 100000,
|
||||
"a3_increase_cnt": "500",
|
||||
},
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await call_yuntu_api(
|
||||
item_id="test_item_123",
|
||||
publish_time=datetime(2025, 1, 1),
|
||||
industry_id="20",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=test_session",
|
||||
)
|
||||
|
||||
assert result["status"] == 0
|
||||
assert result["data"]["total_show_cnt"] == 100000
|
||||
|
||||
async def test_call_with_correct_parameters(self):
|
||||
"""Test that API is called with correct parameters (T-027 format)."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"status": 0, "data": {}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
await call_yuntu_api(
|
||||
item_id="video_001",
|
||||
publish_time=datetime(2025, 1, 15),
|
||||
industry_id="30",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=session_abc",
|
||||
)
|
||||
|
||||
mock_client.post.assert_called_once()
|
||||
call_args = mock_client.post.call_args
|
||||
|
||||
# 验证URL包含aadvid
|
||||
assert "GetContentMaterialAnalysisInfo" in call_args.args[0]
|
||||
assert "aadvid=1648829117232140" in call_args.args[0]
|
||||
|
||||
# 验证请求体 - T-027: 日期格式 YYYYMMDD
|
||||
json_data = call_args.kwargs["json"]
|
||||
assert json_data["object_id"] == "video_001"
|
||||
assert json_data["start_date"] == "20250115" # YYYYMMDD
|
||||
assert json_data["end_date"] == "20250214" # +30天
|
||||
assert json_data["industry_id_list"] == ["30"] # 字符串数组
|
||||
|
||||
# 验证headers - T-027: 直接使用 auth_token
|
||||
headers = call_args.kwargs["headers"]
|
||||
assert headers["Cookie"] == "sessionid=session_abc"
|
||||
|
||||
async def test_call_session_invalid_401(self):
|
||||
"""Test handling of 401 response (session invalid)."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 401
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
with pytest.raises(SessionInvalidError) as exc_info:
|
||||
await call_yuntu_api(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
aadvid="123",
|
||||
auth_token="sessionid=invalid_session",
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 401
|
||||
|
||||
async def test_call_session_invalid_403(self):
|
||||
"""Test handling of 403 response (session invalid)."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 403
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
with pytest.raises(SessionInvalidError):
|
||||
await call_yuntu_api(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
aadvid="123",
|
||||
auth_token="sessionid=invalid_session",
|
||||
)
|
||||
|
||||
async def test_call_api_error_500(self):
|
||||
"""Test handling of 500 response."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 500
|
||||
mock_response.text = "Internal Server Error"
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
with pytest.raises(YuntuAPIError) as exc_info:
|
||||
await call_yuntu_api(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
aadvid="123",
|
||||
auth_token="sessionid=session",
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 500
|
||||
|
||||
async def test_call_business_error(self):
|
||||
"""Test handling of business error (status != 0)."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"status": 1001,
|
||||
"msg": "Invalid parameter",
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
with pytest.raises(YuntuAPIError) as exc_info:
|
||||
await call_yuntu_api(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
aadvid="123",
|
||||
auth_token="sessionid=session",
|
||||
)
|
||||
|
||||
assert "Invalid parameter" in exc_info.value.message
|
||||
|
||||
async def test_call_timeout(self):
|
||||
"""Test handling of timeout."""
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.side_effect = httpx.TimeoutException("Timeout")
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
with pytest.raises(YuntuAPIError) as exc_info:
|
||||
await call_yuntu_api(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
aadvid="123",
|
||||
auth_token="sessionid=session",
|
||||
)
|
||||
|
||||
assert "timeout" in exc_info.value.message.lower()
|
||||
|
||||
async def test_call_request_error(self):
|
||||
"""Test handling of request error."""
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.side_effect = httpx.RequestError("Connection failed")
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
with pytest.raises(YuntuAPIError):
|
||||
await call_yuntu_api(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
aadvid="123",
|
||||
auth_token="sessionid=session",
|
||||
)
|
||||
|
||||
|
||||
class TestGetVideoAnalysis:
|
||||
"""Tests for get_video_analysis function with retry logic (T-022, T-027)."""
|
||||
|
||||
async def test_success_first_try(self):
|
||||
"""Test successful call on first attempt."""
|
||||
with patch("app.services.yuntu_api.get_random_config") as mock_config:
|
||||
mock_config.return_value = {
|
||||
"aadvid": "123",
|
||||
"auth_token": "sessionid=valid_session",
|
||||
}
|
||||
|
||||
with patch("app.services.yuntu_api.call_yuntu_api") as mock_call:
|
||||
mock_call.return_value = {"status": 0, "data": {"a3_increase_cnt": "100"}}
|
||||
|
||||
result = await get_video_analysis(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
)
|
||||
|
||||
assert result["data"]["a3_increase_cnt"] == "100"
|
||||
assert mock_call.call_count == 1
|
||||
|
||||
async def test_retry_on_session_invalid(self):
|
||||
"""Test retry when session is invalid."""
|
||||
with patch("app.services.yuntu_api.get_random_config") as mock_config:
|
||||
mock_config.side_effect = [
|
||||
{"aadvid": "123", "auth_token": "sessionid=session_1"},
|
||||
{"aadvid": "456", "auth_token": "sessionid=session_2"},
|
||||
{"aadvid": "789", "auth_token": "sessionid=session_3"},
|
||||
]
|
||||
|
||||
with patch("app.services.yuntu_api.call_yuntu_api") as mock_call:
|
||||
# 前两次失败,第三次成功
|
||||
mock_call.side_effect = [
|
||||
SessionInvalidError("Invalid"),
|
||||
SessionInvalidError("Invalid"),
|
||||
{"status": 0, "data": {}},
|
||||
]
|
||||
|
||||
with patch("app.services.yuntu_api.session_pool") as mock_pool:
|
||||
result = await get_video_analysis(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
max_retries=3,
|
||||
)
|
||||
|
||||
assert result["status"] == 0
|
||||
assert mock_call.call_count == 3
|
||||
# 验证失效的session被移除
|
||||
assert mock_pool.remove_by_auth_token.call_count == 2
|
||||
|
||||
async def test_max_retries_exceeded(self):
|
||||
"""Test that error is raised after max retries."""
|
||||
with patch("app.services.yuntu_api.get_random_config") as mock_config:
|
||||
mock_config.return_value = {"aadvid": "123", "auth_token": "sessionid=session"}
|
||||
|
||||
with patch("app.services.yuntu_api.call_yuntu_api") as mock_call:
|
||||
mock_call.side_effect = SessionInvalidError("Invalid")
|
||||
|
||||
with patch("app.services.yuntu_api.session_pool"):
|
||||
with pytest.raises(SessionInvalidError):
|
||||
await get_video_analysis(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
max_retries=3,
|
||||
)
|
||||
|
||||
assert mock_call.call_count == 3
|
||||
|
||||
async def test_no_retry_on_api_error(self):
|
||||
"""Test that non-session errors don't trigger retry."""
|
||||
with patch("app.services.yuntu_api.get_random_config") as mock_config:
|
||||
mock_config.return_value = {"aadvid": "123", "auth_token": "sessionid=session"}
|
||||
|
||||
with patch("app.services.yuntu_api.call_yuntu_api") as mock_call:
|
||||
mock_call.side_effect = YuntuAPIError("Server error", status_code=500)
|
||||
|
||||
with pytest.raises(YuntuAPIError) as exc_info:
|
||||
await get_video_analysis(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
)
|
||||
|
||||
assert mock_call.call_count == 1
|
||||
assert exc_info.value.status_code == 500
|
||||
|
||||
async def test_no_config_available(self):
|
||||
"""Test error when no config is available."""
|
||||
with patch("app.services.yuntu_api.get_random_config") as mock_config:
|
||||
mock_config.return_value = None
|
||||
|
||||
with pytest.raises(YuntuAPIError):
|
||||
await get_video_analysis(
|
||||
item_id="test",
|
||||
publish_time=datetime.now(),
|
||||
industry_id="20",
|
||||
)
|
||||
|
||||
|
||||
class TestParseAnalysisResponse:
|
||||
"""Tests for parse_analysis_response function."""
|
||||
|
||||
def test_parse_complete_response(self):
|
||||
"""Test parsing complete response data (T-027: handles string values)."""
|
||||
response = {
|
||||
"data": {
|
||||
"total_show_cnt": 100000,
|
||||
"natural_show_cnt": 80000,
|
||||
"ad_show_cnt": 20000,
|
||||
"total_play_cnt": 50000,
|
||||
"natural_play_cnt": 40000,
|
||||
"ad_play_cnt": 10000,
|
||||
"effective_play_cnt": 30000,
|
||||
"a3_increase_cnt": "500", # 字符串
|
||||
"ad_a3_increase_cnt": "100",
|
||||
"natural_a3_increase_cnt": "400",
|
||||
"after_view_search_uv": 1000,
|
||||
"after_view_search_pv": 1500,
|
||||
"brand_search_uv": 200,
|
||||
"product_search_uv": 300,
|
||||
"return_search_cnt": 50,
|
||||
"cost": 10000,
|
||||
"natural_cost": 0,
|
||||
"ad_cost": 10000,
|
||||
}
|
||||
}
|
||||
|
||||
result = parse_analysis_response(response)
|
||||
|
||||
assert result["total_show_cnt"] == 100000
|
||||
assert result["natural_show_cnt"] == 80000
|
||||
assert result["a3_increase_cnt"] == 500 # 转为整数
|
||||
assert result["ad_a3_increase_cnt"] == 100
|
||||
assert result["natural_a3_increase_cnt"] == 400
|
||||
assert result["after_view_search_uv"] == 1000
|
||||
assert result["cost"] == 10000
|
||||
|
||||
def test_parse_empty_response(self):
|
||||
"""Test parsing empty response."""
|
||||
response = {"data": {}}
|
||||
|
||||
result = parse_analysis_response(response)
|
||||
|
||||
assert result["total_show_cnt"] == 0
|
||||
assert result["a3_increase_cnt"] == 0
|
||||
assert result["cost"] == 0
|
||||
|
||||
def test_parse_missing_data_key(self):
|
||||
"""Test parsing response without data key."""
|
||||
response = {}
|
||||
|
||||
result = parse_analysis_response(response)
|
||||
|
||||
assert result["total_show_cnt"] == 0
|
||||
|
||||
def test_parse_partial_response(self):
|
||||
"""Test parsing partial response."""
|
||||
response = {
|
||||
"data": {
|
||||
"total_show_cnt": 50000,
|
||||
"a3_increase_cnt": "100",
|
||||
}
|
||||
}
|
||||
|
||||
result = parse_analysis_response(response)
|
||||
|
||||
assert result["total_show_cnt"] == 50000
|
||||
assert result["a3_increase_cnt"] == 100
|
||||
assert result["natural_show_cnt"] == 0 # Default value
|
||||
assert result["cost"] == 0 # Default value
|
||||
|
||||
def test_parse_string_numbers(self):
|
||||
"""Test parsing string numbers to int (T-027)."""
|
||||
response = {
|
||||
"data": {
|
||||
"a3_increase_cnt": "1689071",
|
||||
"ad_a3_increase_cnt": "36902",
|
||||
"natural_a3_increase_cnt": "1652169",
|
||||
"cost": 785000,
|
||||
}
|
||||
}
|
||||
|
||||
result = parse_analysis_response(response)
|
||||
|
||||
assert result["a3_increase_cnt"] == 1689071
|
||||
assert result["ad_a3_increase_cnt"] == 36902
|
||||
assert result["natural_a3_increase_cnt"] == 1652169
|
||||
assert result["cost"] == 785000
|
||||
@ -1,316 +0,0 @@
|
||||
"""
|
||||
Tests for Yuntu API Parameter Format (T-027)
|
||||
|
||||
根据 doc/temp 的正确格式:
|
||||
1. 日期格式: YYYYMMDD (如 20251014),不是 YYYY-MM-DD
|
||||
2. Cookie 头: 直接使用 auth_token 完整值
|
||||
3. industry_id: 字符串格式 ["20"],不是整数
|
||||
4. Cookie 获取: 随机选取任意一组 aadvid/auth_token
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from datetime import datetime, date
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
import httpx
|
||||
|
||||
|
||||
class TestYuntuAPIParameterFormat:
|
||||
"""验证 API 调用参数格式正确性 (T-027)"""
|
||||
|
||||
async def test_date_format_yyyymmdd(self):
|
||||
"""日期格式必须为 YYYYMMDD,不是 YYYY-MM-DD"""
|
||||
from app.services.yuntu_api import call_yuntu_api
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"status": 0, "data": {"a3_increase_cnt": "100"}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
await call_yuntu_api(
|
||||
item_id="video_001",
|
||||
publish_time=datetime(2025, 10, 14),
|
||||
industry_id="12",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=f9dfd57df6935afd1255bdc8f0dd0e4b",
|
||||
)
|
||||
|
||||
call_args = mock_client.post.call_args
|
||||
json_data = call_args.kwargs["json"]
|
||||
|
||||
# 关键验证:日期格式是 YYYYMMDD
|
||||
assert json_data["start_date"] == "20251014", f"Expected '20251014', got '{json_data['start_date']}'"
|
||||
assert json_data["end_date"] == "20251113", f"Expected '20251113', got '{json_data['end_date']}'"
|
||||
|
||||
async def test_cookie_header_uses_auth_token_directly(self):
|
||||
"""Cookie 头应直接使用 auth_token 完整值"""
|
||||
from app.services.yuntu_api import call_yuntu_api
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"status": 0, "data": {}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
auth_token = "sessionid=f9dfd57df6935afd1255bdc8f0dd0e4b"
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
await call_yuntu_api(
|
||||
item_id="video_001",
|
||||
publish_time=datetime(2025, 10, 14),
|
||||
industry_id="12",
|
||||
aadvid="1648829117232140",
|
||||
auth_token=auth_token,
|
||||
)
|
||||
|
||||
call_args = mock_client.post.call_args
|
||||
headers = call_args.kwargs["headers"]
|
||||
|
||||
# 关键验证:Cookie 直接使用 auth_token 完整值
|
||||
assert headers["Cookie"] == auth_token, f"Expected Cookie='{auth_token}', got '{headers['Cookie']}'"
|
||||
|
||||
async def test_industry_id_as_string_array(self):
|
||||
"""industry_id_list 应为字符串数组 ["12"],不是整数"""
|
||||
from app.services.yuntu_api import call_yuntu_api
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"status": 0, "data": {}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
await call_yuntu_api(
|
||||
item_id="video_001",
|
||||
publish_time=datetime(2025, 10, 14),
|
||||
industry_id="12", # 字符串
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=xxx",
|
||||
)
|
||||
|
||||
call_args = mock_client.post.call_args
|
||||
json_data = call_args.kwargs["json"]
|
||||
|
||||
# 关键验证:industry_id_list 是字符串数组
|
||||
assert json_data["industry_id_list"] == ["12"], f"Expected ['12'], got {json_data['industry_id_list']}"
|
||||
|
||||
async def test_url_contains_aadvid(self):
|
||||
"""URL 必须包含 aadvid 参数"""
|
||||
from app.services.yuntu_api import call_yuntu_api
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"status": 0, "data": {}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
aadvid = "1648829117232140"
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
await call_yuntu_api(
|
||||
item_id="video_001",
|
||||
publish_time=datetime(2025, 10, 14),
|
||||
industry_id="12",
|
||||
aadvid=aadvid,
|
||||
auth_token="sessionid=xxx",
|
||||
)
|
||||
|
||||
call_args = mock_client.post.call_args
|
||||
url = call_args.args[0]
|
||||
|
||||
# 关键验证:URL 包含 aadvid
|
||||
assert f"aadvid={aadvid}" in url, f"URL should contain 'aadvid={aadvid}', got '{url}'"
|
||||
|
||||
async def test_fixed_parameters(self):
|
||||
"""验证固定参数值正确"""
|
||||
from app.services.yuntu_api import call_yuntu_api
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"status": 0, "data": {}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
await call_yuntu_api(
|
||||
item_id="video_001",
|
||||
publish_time=datetime(2025, 10, 14),
|
||||
industry_id="12",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=xxx",
|
||||
)
|
||||
|
||||
call_args = mock_client.post.call_args
|
||||
json_data = call_args.kwargs["json"]
|
||||
|
||||
# 验证固定参数
|
||||
assert json_data["is_my_video"] == "0"
|
||||
assert json_data["object_type"] == 2
|
||||
assert json_data["assist_type"] == 3
|
||||
assert json_data["assist_video_type"] == 3
|
||||
assert json_data["trigger_point_id_list"] == ["610000", "610300", "610301"]
|
||||
|
||||
async def test_end_date_is_start_plus_30_days(self):
|
||||
"""end_date 应为 start_date + 30 天"""
|
||||
from app.services.yuntu_api import call_yuntu_api
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"status": 0, "data": {}}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
# 测试日期:2025-01-15,+30天 = 2025-02-14
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
await call_yuntu_api(
|
||||
item_id="video_001",
|
||||
publish_time=datetime(2025, 1, 15),
|
||||
industry_id="12",
|
||||
aadvid="123",
|
||||
auth_token="sessionid=xxx",
|
||||
)
|
||||
|
||||
call_args = mock_client.post.call_args
|
||||
json_data = call_args.kwargs["json"]
|
||||
|
||||
assert json_data["start_date"] == "20250115"
|
||||
assert json_data["end_date"] == "20250214"
|
||||
|
||||
async def test_parse_a3_metrics_as_strings(self):
|
||||
"""API 返回的 A3 指标是字符串类型,需正确解析"""
|
||||
from app.services.yuntu_api import parse_analysis_response
|
||||
|
||||
# 实际 API 响应示例(A3 是字符串)
|
||||
response = {
|
||||
"status": 0,
|
||||
"msg": "ok",
|
||||
"data": {
|
||||
"object_id": "7560751618711457062",
|
||||
"cost": 785000,
|
||||
"ad_a3_increase_cnt": "36902",
|
||||
"natural_a3_increase_cnt": "1652169",
|
||||
"a3_increase_cnt": "1689071",
|
||||
}
|
||||
}
|
||||
|
||||
result = parse_analysis_response(response)
|
||||
|
||||
# 解析后应转为整数
|
||||
assert result["a3_increase_cnt"] == 1689071
|
||||
assert result["ad_a3_increase_cnt"] == 36902
|
||||
assert result["natural_a3_increase_cnt"] == 1652169
|
||||
assert result["cost"] == 785000
|
||||
|
||||
|
||||
class TestSessionPoolRandomSelection:
|
||||
"""验证 Cookie 池随机选取逻辑 (T-027)"""
|
||||
|
||||
async def test_get_random_config(self):
|
||||
"""应随机选取任意一组配置,不按 brand_id 匹配"""
|
||||
from app.services.session_pool import SessionPool, CookieConfig
|
||||
|
||||
pool = SessionPool()
|
||||
|
||||
# 模拟刷新后的数据
|
||||
pool._configs = [
|
||||
CookieConfig(
|
||||
brand_id="533661",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=aaa",
|
||||
industry_id=20,
|
||||
brand_name="Test1",
|
||||
),
|
||||
CookieConfig(
|
||||
brand_id="10186612",
|
||||
aadvid="1234567890",
|
||||
auth_token="sessionid=bbb",
|
||||
industry_id=30,
|
||||
brand_name="Test2",
|
||||
),
|
||||
]
|
||||
|
||||
# 调用随机获取
|
||||
config = pool.get_random_config()
|
||||
|
||||
# 应返回一个有效配置
|
||||
assert config is not None
|
||||
assert "aadvid" in config
|
||||
assert "auth_token" in config
|
||||
|
||||
|
||||
class TestIntegrationWithRealFormat:
|
||||
"""集成测试:验证完整调用流程"""
|
||||
|
||||
async def test_full_api_call_format(self):
|
||||
"""完整验证 API 调用格式"""
|
||||
from app.services.yuntu_api import call_yuntu_api
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {
|
||||
"status": 0,
|
||||
"msg": "ok",
|
||||
"data": {
|
||||
"object_id": "7560751618711457062",
|
||||
"cost": 785000,
|
||||
"ad_a3_increase_cnt": "36902",
|
||||
"natural_a3_increase_cnt": "1652169",
|
||||
"a3_increase_cnt": "1689071",
|
||||
}
|
||||
}
|
||||
|
||||
mock_client = AsyncMock()
|
||||
mock_client.post.return_value = mock_response
|
||||
mock_client.__aenter__.return_value = mock_client
|
||||
mock_client.__aexit__.return_value = None
|
||||
|
||||
with patch("httpx.AsyncClient", return_value=mock_client):
|
||||
result = await call_yuntu_api(
|
||||
item_id="7560751618711457062",
|
||||
publish_time=datetime(2025, 10, 14),
|
||||
industry_id="12",
|
||||
aadvid="1648829117232140",
|
||||
auth_token="sessionid=f9dfd57df6935afd1255bdc8f0dd0e4b",
|
||||
)
|
||||
|
||||
# 验证调用参数
|
||||
call_args = mock_client.post.call_args
|
||||
url = call_args.args[0]
|
||||
json_data = call_args.kwargs["json"]
|
||||
headers = call_args.kwargs["headers"]
|
||||
|
||||
# 1. URL 包含 aadvid
|
||||
assert "aadvid=1648829117232140" in url
|
||||
|
||||
# 2. 日期格式 YYYYMMDD
|
||||
assert json_data["start_date"] == "20251014"
|
||||
assert json_data["end_date"] == "20251113"
|
||||
|
||||
# 3. industry_id 字符串数组
|
||||
assert json_data["industry_id_list"] == ["12"]
|
||||
|
||||
# 4. Cookie 直接使用 auth_token
|
||||
assert headers["Cookie"] == "sessionid=f9dfd57df6935afd1255bdc8f0dd0e4b"
|
||||
|
||||
# 验证返回结果
|
||||
assert result["status"] == 0
|
||||
assert result["data"]["a3_increase_cnt"] == "1689071"
|
||||
1207
backend/uv.lock
generated
1207
backend/uv.lock
generated
File diff suppressed because it is too large
Load Diff
103
doc/PRD.md
103
doc/PRD.md
@ -64,15 +64,6 @@ KOL Insight 旨在解决这一痛点,提供批量数据查询和智能成本
|
||||
|----|----------|----------|
|
||||
| US-007 | 作为运营人员,我想要点击视频链接直接跳转,以便快速查看原视频 | 1. 视频链接可点击<br>2. 新窗口打开视频页面 |
|
||||
|
||||
<!-- ITER: 2026-01-28 - 新增视频分析用户故事 -->
|
||||
<!-- NEW START -->
|
||||
#### P0 - 视频分析增强
|
||||
|
||||
| ID | 用户故事 | 验收标准 |
|
||||
|----|----------|----------|
|
||||
| US-008 | 作为运营人员,我想要查看视频的详细分析数据(触达、A3、搜索、费用、成本指标),以便全面评估视频投放效果 | 1. 调用巨量云图API获取实时数据<br>2. 展示6大类25+指标<br>3. 成本指标自动计算<br>4. A3指标更新到数据库 |
|
||||
<!-- NEW END -->
|
||||
|
||||
### 2.3 用户旅程
|
||||
|
||||
**核心用户旅程:批量查询 KOL 数据**
|
||||
@ -95,7 +86,6 @@ KOL Insight 旨在解决这一痛点,提供批量数据查询和智能成本
|
||||
### 3.1 功能架构
|
||||
|
||||
<!-- MODIFIED: 统一术语为"预估自然看后搜人数" -->
|
||||
<!-- ITER: 2026-01-28 - 新增巨量云图视频分析模块 -->
|
||||
```
|
||||
KOL Insight
|
||||
├── 数据查询模块
|
||||
@ -109,13 +99,8 @@ KOL Insight
|
||||
├── 数据展示模块
|
||||
│ ├── 结果列表展示
|
||||
│ └── 视频链接跳转
|
||||
├── 数据导出模块
|
||||
│ └── Excel/CSV导出
|
||||
└── 视频分析模块 (NEW)
|
||||
├── SessionID池管理
|
||||
├── 巨量云图API集成
|
||||
├── 实时数据获取与更新
|
||||
└── 视频分析报表展示
|
||||
└── 数据导出模块
|
||||
└── Excel/CSV导出
|
||||
```
|
||||
|
||||
### 3.2 功能详情
|
||||
@ -150,19 +135,6 @@ KOL Insight
|
||||
|--------|------|--------------|--------|----------|
|
||||
| 数据导出 | 将查询结果导出为 Excel/CSV 格式 | US-005 | P1 | 文件可下载,数据完整,中文列名 |
|
||||
|
||||
<!-- ITER: 2026-01-28 - 新增巨量云图视频分析模块 -->
|
||||
<!-- NEW START -->
|
||||
#### 3.2.5 视频分析模块
|
||||
|
||||
| 功能点 | 描述 | 关联用户故事 | 优先级 | 验收标准 |
|
||||
|--------|------|--------------|--------|----------|
|
||||
| SessionID池管理 | 从内部API获取Cookie列表,随机选取sessionid用于请求 | US-008 | P0 | 1. 调用内部API获取100个sessionid<br>2. 随机选取机制实现<br>3. 失败自动切换重试(最多3次) |
|
||||
| 巨量云图API封装 | 调用GetContentMaterialAnalysisInfo获取视频分析数据 | US-008 | P0 | 1. 正确构造请求参数<br>2. 超时设置10秒<br>3. 错误处理和日志记录 |
|
||||
| 视频分析接口 | GET /api/v1/videos/{item_id}/analysis | US-008 | P0 | 1. 返回6大类指标<br>2. 计算指标准确<br>3. 除零返回null |
|
||||
| 数据库A3指标更新 | 从API获取数据后更新数据库对应字段 | US-008 | P1 | 1. 更新total_new_a3_cnt<br>2. 更新heated_new_a3_cnt<br>3. 更新natural_new_a3_cnt<br>4. 更新total_cost |
|
||||
| 视频分析报表 | 前端展示6大类25+指标 | US-008 | P1 | 1. 基础信息展示<br>2. 触达/A3/搜索/费用/成本指标展示<br>3. 数值格式化 |
|
||||
<!-- NEW END -->
|
||||
|
||||
## 4. 非功能需求
|
||||
|
||||
### 4.1 性能需求
|
||||
@ -259,8 +231,6 @@ KOL Insight
|
||||
|------|------|--------|
|
||||
| PostgreSQL | 数据存储与查询 | 自建数据库 |
|
||||
| 品牌API | 根据品牌ID获取品牌名称 | 内部API (api.internal.intelligrow.cn) |
|
||||
| Cookie池API | 获取巨量云图SessionID列表 | 内部API (api.internal.intelligrow.cn) |
|
||||
| 巨量云图API | 获取视频分析数据 | 巨量云图 (yuntu.oceanengine.com) |
|
||||
|
||||
<!-- NEW START -->
|
||||
**品牌API详情**:
|
||||
@ -270,74 +240,6 @@ KOL Insight
|
||||
- 文档:https://api.internal.intelligrow.cn/docs#/云图
|
||||
<!-- NEW END -->
|
||||
|
||||
<!-- ITER: 2026-01-28 - 修复品牌API响应解析+添加认证 -->
|
||||
<!-- NEW START -->
|
||||
**品牌API认证与响应格式**:
|
||||
- 认证方式:Bearer Token(`Authorization: Bearer {token}`)
|
||||
- Token配置:通过环境变量 `BRAND_API_TOKEN` 配置
|
||||
- 响应格式:
|
||||
```json
|
||||
{
|
||||
"total": 1,
|
||||
"last_updated": "2025-12-30T11:28:40.738185",
|
||||
"has_more": 0,
|
||||
"data": [
|
||||
{"industry_id": 20, "industry_name": "母婴", "brand_id": 533661, "brand_name": "Giving/启初"}
|
||||
]
|
||||
}
|
||||
```
|
||||
- 解析方式:从 `data[0].brand_name` 获取品牌名称
|
||||
<!-- NEW END -->
|
||||
|
||||
<!-- ITER: 2026-01-28 - 新增巨量云图API和Cookie池API -->
|
||||
<!-- ITER: 2026-01-28 - 修复API参数格式问题 -->
|
||||
<!-- NEW START -->
|
||||
**Cookie池API详情**:
|
||||
- 接口地址:`/v1/yuntu/get_cookie`
|
||||
- 请求方式:GET
|
||||
- 认证方式:Bearer Token(`Authorization: Bearer {YUNTU_API_TOKEN}`)
|
||||
- 用途:获取巨量云图认证信息列表(aadvid + auth_token)
|
||||
- **使用方式**:随机选取任意一组 aadvid/auth_token,避免限流
|
||||
- 示例:
|
||||
```bash
|
||||
curl -X 'GET' \
|
||||
'https://api.internal.intelligrow.cn/v1/yuntu/get_cookie?page=1&page_size=100' \
|
||||
-H 'Authorization: Bearer {YUNTU_API_TOKEN}'
|
||||
```
|
||||
- 响应关键字段:
|
||||
- `data[].aadvid` - 云图API的URL参数
|
||||
- `data[].auth_token` - Cookie头完整值(格式:`sessionid=xxx`)
|
||||
|
||||
**巨量云图API详情**:
|
||||
- 接口地址:`POST /yuntu_common/api/content/trigger_analysis/GetContentMaterialAnalysisInfo?aadvid={AADVID}`
|
||||
- 基础URL:`https://yuntu.oceanengine.com`
|
||||
- 认证方式:Cookie头直接使用 `auth_token` 完整值
|
||||
- 用途:获取视频触达、A3、搜索、费用等分析数据
|
||||
- 请求参数:
|
||||
```json
|
||||
{
|
||||
"is_my_video": "0",
|
||||
"object_id": "{item_id}",
|
||||
"object_type": 2,
|
||||
"start_date": "{YYYYMMDD格式}",
|
||||
"end_date": "{start_date+30天,YYYYMMDD格式}",
|
||||
"assist_type": 3,
|
||||
"assist_video_type": 3,
|
||||
"industry_id_list": ["{数据库中视频的industry_id,字符串格式}"],
|
||||
"trigger_point_id_list": ["610000", "610300", "610301"]
|
||||
}
|
||||
```
|
||||
- **⚠️ 参数格式要求**:
|
||||
- 日期格式必须为 `YYYYMMDD`(如 `20251014`),不是 `YYYY-MM-DD`
|
||||
- `industry_id_list` 使用数据库中视频的 industry_id,传字符串数组
|
||||
- Cookie 头直接使用 `auth_token` 值(已包含 `sessionid=xxx`)
|
||||
- 关键响应字段:
|
||||
- `data.a3_increase_cnt` - 新增A3(字符串类型)
|
||||
- `data.ad_a3_increase_cnt` - 加热新增A3(字符串类型)
|
||||
- `data.natural_a3_increase_cnt` - 自然新增A3(字符串类型)
|
||||
- `data.cost` - 总花费(单位可能是分)
|
||||
<!-- NEW END -->
|
||||
|
||||
### 6.2 内部接口
|
||||
|
||||
<!-- MODIFIED: 补充核心API端点,改用 FastAPI RESTful 风格 -->
|
||||
@ -345,7 +247,6 @@ curl -X 'GET' \
|
||||
|------|------|------|------|
|
||||
| /api/v1/query | POST | 批量查询KOL视频数据 | FastAPI 后端服务提供 |
|
||||
| /api/v1/export | GET | 导出查询结果为Excel/CSV | FastAPI 后端服务提供 |
|
||||
| /api/v1/videos/{item_id}/analysis | GET | 获取单个视频分析数据 | FastAPI 后端服务提供 (NEW) |
|
||||
|
||||
<!-- NEW START -->
|
||||
**API 架构说明**:
|
||||
|
||||
@ -7,25 +7,25 @@
|
||||
| 版本 | v1.0 |
|
||||
| 创建日期 | 2026-01-28 |
|
||||
| 来源文档 | DevelopmentPlan.md, PRD.md, FeatureSummary.md |
|
||||
| 品牌主体 | 秒思AI制作 |
|
||||
| 品牌主体 | 麦秒思AI制作 |
|
||||
|
||||
## 1. 设计概述
|
||||
|
||||
### 1.1 设计原则
|
||||
|
||||
**秒思AI设计语言**
|
||||
**麦秒思AI设计语言**
|
||||
|
||||
| 原则 | 说明 | 应用 |
|
||||
|------|------|------|
|
||||
| 优雅简洁 | 去除冗余元素,聚焦核心功能 | 单页应用,扁平化设计 |
|
||||
| 专业可信 | 体现数据分析的专业性 | 稳重色系,清晰的信息层级 |
|
||||
| 高效直观 | 减少用户学习成本 | 明确的操作流程,即时反馈 |
|
||||
| 品牌一致 | 强化秒思AI品牌形象 | 统一使用品牌标识和色彩 |
|
||||
| 品牌一致 | 强化麦秒思AI品牌形象 | 统一使用品牌标识和色彩 |
|
||||
|
||||
**品牌元素**
|
||||
|
||||
- **Logo**: doc/ui/muse.svg (秒思AI品牌标识)
|
||||
- **Slogan**: "秒思AI制作" (展示在关键位置)
|
||||
- **Logo**: doc/ui/muse.svg (麦秒思AI品牌标识)
|
||||
- **Slogan**: "麦秒思AI制作" (展示在关键位置)
|
||||
- **色调**: 专业、现代、科技感
|
||||
|
||||
### 1.2 页面总览
|
||||
@ -76,7 +76,7 @@
|
||||
┌────────────────────────────────────────────────────────────────────────────┐
|
||||
│ ┌──────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ Header (品牌头部) │ │
|
||||
│ │ ┌──────┐ [秒思AI制作] │ │
|
||||
│ │ ┌──────┐ [麦秒思AI制作] │ │
|
||||
│ │ │ MUSE │ KOL Insight - 云图数据查询分析 │ │
|
||||
│ │ │ Logo │ (品牌标识 + 产品名称) │ │
|
||||
│ │ └──────┘ │ │
|
||||
@ -123,7 +123,7 @@
|
||||
├────────────────────────────────────────────────────────────────────────────┤
|
||||
│ ┌──────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ Footer │ │
|
||||
│ │ © 2026 秒思AI制作 | KOL Insight v1.0 │ │
|
||||
│ │ © 2026 麦秒思AI制作 | KOL Insight v1.0 │ │
|
||||
│ └──────────────────────────────────────────────────────────────────────┘ │
|
||||
└────────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
@ -132,7 +132,7 @@
|
||||
|
||||
| 组件ID | 组件名称 | 类型 | 说明 | 交互 |
|
||||
|--------|----------|------|------|------|
|
||||
| C-001 | 品牌头部 | Header | 展示秒思AI品牌Logo和产品名称 | 静态展示 |
|
||||
| C-001 | 品牌头部 | Header | 展示麦秒思AI品牌Logo和产品名称 | 静态展示 |
|
||||
| C-002 | 查询方式选择器 | Radio Group | 三种查询方式单选 | 点击切换查询方式 |
|
||||
| C-003 | 查询输入框 | Textarea | 批量输入或昵称输入 | 文本输入 |
|
||||
| C-004 | 查询按钮组 | Button Group | 清空、开始查询 | 点击执行操作 |
|
||||
@ -523,7 +523,7 @@
|
||||
|
||||
### 5.1 色彩规范
|
||||
|
||||
**秒思AI品牌色系**
|
||||
**麦秒思AI品牌色系**
|
||||
|
||||
| 用途 | 色值 | 示例 | 说明 |
|
||||
|------|------|------|------|
|
||||
@ -665,21 +665,21 @@ Mobile (< 768px):
|
||||
|
||||
| 位置 | 尺寸 | 说明 |
|
||||
|------|------|------|
|
||||
| Header 左侧 | 高度 40px | 秒思AI Logo (doc/ui/muse.svg) |
|
||||
| Header 左侧 | 高度 40px | 麦秒思AI Logo (doc/ui/muse.svg) |
|
||||
| Favicon | 32x32px | 简化版 Logo 图标 |
|
||||
| 加载动画 | - | 可选:Logo 动效 |
|
||||
|
||||
**品牌声明位置**
|
||||
|
||||
- Header 右上角:"秒思AI制作"
|
||||
- Footer 中央:"© 2026 秒思AI制作 | KOL Insight v1.0"
|
||||
- Header 右上角:"麦秒思AI制作"
|
||||
- Footer 中央:"© 2026 麦秒思AI制作 | KOL Insight v1.0"
|
||||
|
||||
**Header 品牌区域详细设计**
|
||||
|
||||
```
|
||||
┌────────────────────────────────────────────────────────────────┐
|
||||
│ ┌──────┐ │
|
||||
│ │ │ KOL Insight 秒思AI制作 │
|
||||
│ │ │ KOL Insight 麦秒思AI制作 │
|
||||
│ │ MUSE │ 云图数据查询分析 │
|
||||
│ │ Logo │ (产品名称 + Slogan) (品牌声明) │
|
||||
│ │ │ │
|
||||
@ -838,7 +838,7 @@ Mobile (< 768px):
|
||||
**关键设计决策**
|
||||
|
||||
- **单页应用**: 简化交互流程,提升用户体验
|
||||
- **品牌强化**: 多处展示"秒思AI制作",建立品牌认知
|
||||
- **品牌强化**: 多处展示"麦秒思AI制作",建立品牌认知
|
||||
- **数据优先**: 核心是数据展示,UI 简洁不干扰
|
||||
- **响应式**: 支持桌面/平板/移动端访问
|
||||
|
||||
@ -846,5 +846,5 @@ Mobile (< 768px):
|
||||
|
||||
**文档版本**: v1.0
|
||||
**最后更新**: 2026-01-28
|
||||
**设计团队**: 秒思AI
|
||||
**设计团队**: 麦秒思AI
|
||||
**审核状态**: 待审核 (建议运行 `/ru` 进行评审)
|
||||
|
||||
@ -57,7 +57,7 @@
|
||||
| 布局风格统一 | ✅ | 垂直布局,从上到下:Header → 查询区 → 结果区 → Footer |
|
||||
| 交互模式一致 | ✅ | 查询 → 展示 → 导出流程清晰 |
|
||||
| 状态覆盖完整 | ✅ | 默认态、输入态、查询中、结果态、空结果态、错误态 |
|
||||
| 品牌元素应用 | ✅ | 秒思AI Logo、Slogan、品牌色系统一应用 |
|
||||
| 品牌元素应用 | ✅ | 麦秒思AI Logo、Slogan、品牌色系统一应用 |
|
||||
| 设计规范完整 | ✅ | 色彩、字体、间距、圆角、阴影规范完整 |
|
||||
| 响应式设计 | ✅ | 考虑了 Mobile/Tablet/Desktop 三种断点 |
|
||||
|
||||
@ -143,7 +143,7 @@
|
||||
| 交互说明清晰 | ✅ | 8种交互场景全部说明 |
|
||||
| 用户流程图 | ✅ | 核心流程、辅助流程、异常流程全部包含 |
|
||||
| 设计规范统一 | ✅ | 色彩、字体、间距、圆角、阴影规范完整 |
|
||||
| 品牌元素应用 | ✅ | 秒思AI Logo、Slogan、品牌色完整应用 |
|
||||
| 品牌元素应用 | ✅ | 麦秒思AI Logo、Slogan、品牌色完整应用 |
|
||||
| 数据展示规范 | ✅ | 26个字段完整列出,格式化规则明确 |
|
||||
| 响应式设计 | ✅ | Mobile/Tablet/Desktop 三种断点考虑 |
|
||||
|
||||
@ -155,7 +155,7 @@
|
||||
- 符合开发计划的技术架构(Next.js App Router)
|
||||
|
||||
2. **品牌一致性强** ⭐⭐⭐
|
||||
- 秒思AI品牌元素贯穿整个设计
|
||||
- 麦秒思AI品牌元素贯穿整个设计
|
||||
- Logo、Slogan、品牌色系统一应用
|
||||
- Header 和 Footer 强化品牌认知
|
||||
|
||||
@ -193,7 +193,7 @@
|
||||
| 操作效率 | ⭐⭐⭐⭐⭐ | 批量查询、一键导出,效率高 |
|
||||
| 错误提示 | ⭐⭐⭐⭐ | 错误态有明确提示和重试引导 |
|
||||
| 视觉层次 | ⭐⭐⭐⭐⭐ | 查询区 → 结果区层次清晰 |
|
||||
| 品牌认知 | ⭐⭐⭐⭐⭐ | 多处展示秒思AI品牌元素 |
|
||||
| 品牌认知 | ⭐⭐⭐⭐⭐ | 多处展示麦秒思AI品牌元素 |
|
||||
| 响应式体验 | ⭐⭐⭐⭐ | 考虑了移动端适配 |
|
||||
|
||||
## 评审结论
|
||||
@ -211,7 +211,7 @@ UIDesign 文档整体质量优秀,设计完整、规范统一、品牌一致
|
||||
|
||||
**优点总结**:
|
||||
- ✅ 单页应用设计合理,操作流程简洁高效
|
||||
- ✅ 品牌元素应用完整,强化秒思AI品牌认知
|
||||
- ✅ 品牌元素应用完整,强化麦秒思AI品牌认知
|
||||
- ✅ 设计规范详细,便于开发实现
|
||||
- ✅ 状态覆盖全面,用户体验考虑周到
|
||||
- ✅ 与开发计划高度契合
|
||||
|
||||
@ -4,72 +4,751 @@
|
||||
|
||||
| 项目 | 内容 |
|
||||
|------|------|
|
||||
| 评审时间 | 2026-01-28 17:35 |
|
||||
| 目标文档 | doc/tasks.md |
|
||||
| 参照文档 | doc/UIDesign.md, doc/DevelopmentPlan.md |
|
||||
| 问题统计 | 0 个严重 / 4 个一般 / 2 个建议 |
|
||||
| 评审时间 | 2026-01-28 15:30 |
|
||||
| 目标文档 | [doc/tasks.md](doc/tasks.md) |
|
||||
| 参照文档 | [doc/UIDesign.md](doc/UIDesign.md), [doc/DevelopmentPlan.md](doc/DevelopmentPlan.md) |
|
||||
| 问题统计 | **4 个严重 / 6 个一般 / 5 个建议** |
|
||||
| 评审结论 | 🟡 **需修改后通过** |
|
||||
|
||||
## 覆盖度分析
|
||||
|
||||
### DevelopmentPlan 覆盖
|
||||
|
||||
| 开发项 | 对应任务 | 状态 |
|
||||
|--------|----------|------|
|
||||
| T-001 前端项目初始化 | T-001A | ✅ |
|
||||
| T-002 后端项目初始化 | T-001B | ✅ |
|
||||
| T-003 数据库配置 | T-002 | ✅ |
|
||||
| T-004 基础 UI 框架 | T-003 | ✅ |
|
||||
| T-005 环境变量配置 | T-004 | ✅ |
|
||||
| T-006 查询 API 开发 | T-005 | ✅ |
|
||||
| T-007 计算逻辑实现 | T-006 | ✅ |
|
||||
| T-008 品牌 API 批量集成 | T-007 | ✅ |
|
||||
| T-009 导出 API 开发 | T-010 | ✅ |
|
||||
| T-010 查询表单组件 | T-008 | ✅ |
|
||||
| T-011 结果表格组件 | T-009 | ✅ |
|
||||
| T-012 导出按钮组件 | T-011 | ✅ |
|
||||
| T-013 错误处理 | T-013 | ✅ |
|
||||
| T-014 性能优化 | T-014 | ✅ |
|
||||
| T-015 视频链接跳转 | T-015 | ✅ |
|
||||
| T-016 部署配置 | T-016 | ✅ |
|
||||
| T-017 集成测试 | T-017 | ✅ |
|
||||
#### Phase 1: 基础架构搭建
|
||||
|
||||
| 开发项 (DevelopmentPlan) | 对应任务 (tasks.md) | 状态 | 说明 |
|
||||
|---------------------------|---------------------|------|------|
|
||||
| T-001 前端项目初始化 + T-002 后端项目初始化 | **T-001 项目初始化** | ⚠️ | **合并为一个任务,粒度过大** |
|
||||
| T-003 数据库配置 | T-002 数据库配置 | ✅ | 完全覆盖,含TDD要求 |
|
||||
| T-004 基础 UI 框架 | T-003 基础 UI 框架 | ✅ | 完全覆盖,含品牌元素 |
|
||||
| T-005 环境变量配置 | T-004 环境变量配置 | ✅ | 完全覆盖 |
|
||||
|
||||
#### Phase 2: 核心功能开发
|
||||
|
||||
| 开发项 (DevelopmentPlan) | 对应任务 (tasks.md) | 状态 | 说明 |
|
||||
|---------------------------|---------------------|------|------|
|
||||
| T-006 查询 API 开发 (后端) | **T-005 查询 API 开发** | ✅ | 含TDD要求和100%覆盖率 |
|
||||
| T-007 计算逻辑实现 (后端) | **T-006 计算逻辑实现** | ✅ | 含TDD要求和100%覆盖率 |
|
||||
| T-008 品牌 API 批量集成 (后端) | **T-007 品牌 API 批量集成** | ✅ | 含TDD要求和100%覆盖率 |
|
||||
| T-009 导出 API 开发 (后端) | **T-010 导出 API 开发** | ⚠️ | **依赖T-009前端组件,不合理** |
|
||||
| T-010 查询表单组件 (前端) | T-008 查询表单组件 | ✅ | 标注"粗略实现" |
|
||||
| T-011 结果表格组件 (前端) | T-009 结果表格组件 | ✅ | 标注"粗略实现" |
|
||||
| T-012 导出按钮组件 (前端) | T-011 导出按钮组件 | ✅ | 标注"粗略实现" |
|
||||
| **(未在 DevelopmentPlan 中)** | **T-012 主页面集成** | ⚠️ | **新增任务,导致编号错位** |
|
||||
|
||||
#### Phase 3: 优化与测试
|
||||
|
||||
| 开发项 (DevelopmentPlan) | 对应任务 (tasks.md) | 状态 | 说明 |
|
||||
|---------------------------|---------------------|------|------|
|
||||
| T-013 错误处理 (前后端) | **T-013 错误处理** | ❌ | **编号错位** |
|
||||
| T-014 性能优化 (后端) | **T-014 性能优化** | ❌ | **编号错位** |
|
||||
| T-015 视频链接跳转 (前端) | **T-015 视频链接跳转** | ❌ | **编号错位** |
|
||||
| T-016 部署配置 (前后端) | **T-016 部署配置** | ❌ | **编号错位** |
|
||||
| T-017 集成测试 | **T-017 集成测试** | ❌ | **编号错位** |
|
||||
|
||||
**总覆盖率**: 17/16 (tasks.md 新增1个任务)
|
||||
|
||||
**关键问题**:
|
||||
1. ❌ **任务编号不一致**: Phase 3 的5个任务编号都向后偏移一位
|
||||
2. ⚠️ **T-001 粒度过大**: 前后端初始化合并为一个任务
|
||||
3. ⚠️ **T-010 依赖错误**: 后端 API 不应依赖前端组件 T-009
|
||||
4. ⚠️ **T-012 新增任务**: DevelopmentPlan 中没有对应项
|
||||
|
||||
---
|
||||
|
||||
### UIDesign 覆盖
|
||||
|
||||
| UI 页面 | 对应任务 | 状态 |
|
||||
|---------|----------|------|
|
||||
| P-001 数据查询主页 | T-011A (集成), T-008/009/011/015 | ✅ |
|
||||
| UI 页面/组件 | 对应任务 | 状态 | 说明 |
|
||||
|-------------|----------|------|------|
|
||||
| **P-001: 数据查询主页** | T-012 主页面集成 | ✅ | 单页应用集成 |
|
||||
| **组件覆盖** | | | |
|
||||
| C-001: 品牌头部 | T-003 基础 UI 框架 | ✅ | 包含 Logo 和品牌声明 |
|
||||
| C-002: 查询方式选择器 | T-008 查询表单组件 | ✅ | Radio Group |
|
||||
| C-003: 查询输入框 | T-008 查询表单组件 | ✅ | Textarea |
|
||||
| C-004: 查询按钮组 | T-008 查询表单组件 | ✅ | 清空/开始查询 |
|
||||
| C-005: 结果表格 | T-009 结果表格组件 | ✅ | 26字段表格 |
|
||||
| C-006: 导出按钮组 | T-011 导出按钮组件 | ✅ | Excel/CSV 导出 |
|
||||
| C-007: 分页器 | T-009 结果表格组件 | ✅ | 验收标准第9条 |
|
||||
| C-008: 视频链接 | T-015 视频链接跳转 | ✅ | 新窗口打开 |
|
||||
| C-009: Footer | T-003 基础 UI 框架 | ✅ | 版权信息 |
|
||||
| **页面状态** | | | |
|
||||
| 6种状态 | T-012 主页面集成 | ✅ | 验收标准第6-8条 |
|
||||
|
||||
**总覆盖率**: 1/1
|
||||
**总覆盖率**: 10/10 (100%)
|
||||
|
||||
**UI覆盖评价**: ✅ 所有 UI 页面、组件、状态都有对应任务
|
||||
|
||||
---
|
||||
|
||||
## 任务质量分析
|
||||
|
||||
| 检查项 | 通过数 | 总数 |
|
||||
|--------|--------|------|
|
||||
| 有明确描述 | 27 | 27 |
|
||||
| 有验收标准 | 27 | 27 |
|
||||
| 粒度合适 | 25 | 27 |
|
||||
| 检查项 | 通过数 | 总数 | 通过率 |
|
||||
|--------|--------|------|--------|
|
||||
| 有明确描述 | 17 | 17 | 100% |
|
||||
| 有验收标准 | 17 | 17 | 100% |
|
||||
| 验收标准清晰 | 17 | 17 | 100% |
|
||||
| 依赖关系明确 | 16 | 17 | 94% |
|
||||
| 粒度合适 | 16 | 17 | 94% |
|
||||
| TDD 要求明确 | 7 | 12 | 58% |
|
||||
| 测试覆盖率要求 | 7 | 12 | 58% |
|
||||
|
||||
**质量问题**:
|
||||
- ⚠️ **T-001 粒度过大**: 前后端初始化合并,无法并行开发
|
||||
- ⚠️ **后端任务 TDD 覆盖不全**: 仅 7/12 的后端任务有明确 TDD 要求
|
||||
- ❌ **缺少测试独立任务**: 100% 覆盖率嵌入开发任务,难以单独验收
|
||||
|
||||
---
|
||||
|
||||
## 问题清单
|
||||
|
||||
### 严重问题 (Critical)
|
||||
无。
|
||||
|
||||
#### C-1: T-001 任务粒度过大,前后端无法并行
|
||||
**位置**: [doc/tasks.md:43](doc/tasks.md:43)
|
||||
|
||||
**问题描述**:
|
||||
```markdown
|
||||
| T-001 | 项目初始化 | 前后端分离架构:前端 Next.js,后端 FastAPI,配置 TypeScript、ESLint、Prettier | P0 | - |
|
||||
```
|
||||
|
||||
T-001 包含:
|
||||
1. 前端 Next.js 14.x 项目创建
|
||||
2. 后端 FastAPI 0.104+ 项目创建
|
||||
3. 前端 TypeScript、ESLint、Prettier 配置
|
||||
4. 后端 Python 依赖管理配置
|
||||
5. 验收标准6条(前端3条+后端3条)
|
||||
|
||||
**影响**:
|
||||
- 🚫 **无法并行开发**: 前端和后端开发者可能是不同人员,合并为一个任务导致无法同时开工
|
||||
- 🚫 **验收标准过多**: 6条验收标准涉及不同技术栈,验收时需要同时检查前后端
|
||||
- 🚫 **依赖关系不清晰**: T-002 数据库配置依赖 T-001,但实际只依赖后端部分
|
||||
|
||||
**建议修复**:
|
||||
拆分为两个独立任务:
|
||||
- **T-001A: 前端项目初始化** (依赖: 无)
|
||||
- 创建 Next.js 14.x 项目
|
||||
- 配置 TypeScript、ESLint、Prettier
|
||||
- 验收: 可运行 `pnpm dev`
|
||||
|
||||
- **T-001B: 后端项目初始化** (依赖: 无)
|
||||
- 创建 FastAPI 0.104+ 项目
|
||||
- 配置 Poetry/pip
|
||||
- 验收: 可运行 `uvicorn main:app --reload`
|
||||
|
||||
**优点**:
|
||||
- ✅ 前后端可并行开发,节省时间
|
||||
- ✅ 验收标准更聚焦
|
||||
- ✅ 依赖关系更清晰(T-002 只依赖 T-001B)
|
||||
|
||||
---
|
||||
|
||||
#### C-2: T-010 依赖关系错误
|
||||
**位置**: [doc/tasks.md:67](doc/tasks.md:67)
|
||||
|
||||
**问题描述**:
|
||||
```markdown
|
||||
| T-010 | 导出 API 开发 | ... | P1 | T-006, T-007, T-009 | ...
|
||||
```
|
||||
|
||||
T-010 (后端导出 API) 依赖 T-009 (前端结果表格组件),这是**逻辑错误**。
|
||||
|
||||
**分析**:
|
||||
- T-010 是**后端 FastAPI** 接口,负责生成 Excel/CSV 文件
|
||||
- T-009 是**前端 React** 组件,负责展示表格
|
||||
- 后端 API 不应该依赖前端组件的实现
|
||||
|
||||
**实际依赖**:
|
||||
- T-010 应该依赖 **T-006 (计算逻辑实现)** 和 **T-007 (品牌API集成)**
|
||||
- 因为导出的数据需要包含计算后的指标和品牌名称
|
||||
|
||||
**验收标准第5条**:
|
||||
```
|
||||
5. 使用中文列名作为表头 **(与 T-009 ResultTable 字段一致)**
|
||||
```
|
||||
这说明是要求"字段一致性",而不是"依赖关系"。
|
||||
|
||||
**影响**:
|
||||
- 🚫 **执行顺序混乱**: 开发者可能误以为要先完成前端表格才能开发后端导出API
|
||||
- 🚫 **前后端耦合**: 后端依赖前端,违反分离架构原则
|
||||
|
||||
**建议修复**:
|
||||
1. 修改依赖: `T-010 依赖: T-006, T-007` (移除 T-009)
|
||||
2. 修改验收标准第5条: "使用中文列名作为表头 **(字段顺序和命名与前端 ResultTable 保持一致,参考共享的字段定义)**"
|
||||
3. 建议: 创建共享的字段定义文件(如 `types/fields.ts`),前后端都引用
|
||||
|
||||
---
|
||||
|
||||
#### C-3: 缺少单元测试独立任务
|
||||
**位置**: 整个 tasks.md
|
||||
|
||||
**问题描述**:
|
||||
tasks.md 中有 **7个任务** 要求 TDD 和 100% 测试覆盖率:
|
||||
- T-002: 数据库配置 (验收标准 7-8 条)
|
||||
- T-005: 查询 API 开发 (验收标准 9-10 条)
|
||||
- T-006: 计算逻辑实现 (验收标准 7-8 条)
|
||||
- T-007: 品牌 API 批量集成 (验收标准 8-9 条)
|
||||
- T-010: 导出 API 开发 (验收标准 10-11 条)
|
||||
- T-013: 错误处理 (验收标准 8-9 条)
|
||||
- T-017: 集成测试 (验收标准 9-11 条)
|
||||
|
||||
但**没有单独的测试任务**,所有测试要求都嵌入在开发任务中。
|
||||
|
||||
**影响**:
|
||||
- 🚫 **测试容易被忽略**: 开发进度紧张时,测试可能被压缩或跳过
|
||||
- 🚫 **无法单独追踪测试进度**: 测试覆盖率没有独立的验收里程碑
|
||||
- 🚫 **100% 覆盖率难以保证**: 嵌入在开发任务中,验收时可能只检查功能,不检查覆盖率
|
||||
- 🚫 **测试报告缺失**: T-017 要求生成覆盖率报告,但其他任务没有明确要求
|
||||
|
||||
**建议修复**:
|
||||
在 Phase 3 增加测试里程碑任务:
|
||||
|
||||
**方案A: 增加独立测试任务**
|
||||
```markdown
|
||||
| T-018 | 测试覆盖率验收 | 验证所有后端代码测试覆盖率 ≥ 100% | P1 | T-002, T-005~007, T-010, T-013 |
|
||||
验收标准:
|
||||
1. 数据库操作测试覆盖率 100% (T-002)
|
||||
2. API集成测试覆盖率 100% (T-005)
|
||||
3. 计算逻辑单元测试覆盖率 100% (T-006)
|
||||
4. 品牌API单元测试覆盖率 100% (T-007)
|
||||
5. 导出功能单元测试覆盖率 100% (T-010)
|
||||
6. 错误处理分支覆盖率 100% (T-013)
|
||||
7. 使用 pytest-cov 生成覆盖率报告
|
||||
8. 覆盖率报告上传到 CI/CD
|
||||
```
|
||||
|
||||
**方案B: 在每个 Phase 结束增加测试验收点**
|
||||
```markdown
|
||||
## 3. Phase 2 任务 - 核心功能开发
|
||||
|
||||
### 3.3 测试验收
|
||||
| ID | 任务 | 描述 | 优先级 | 依赖 | 验收标准 |
|
||||
|----|------|------|--------|------|----------|
|
||||
| T-012A | Phase 2 测试验收 | 验证 Phase 2 所有后端任务测试覆盖率 | P0 | T-005~007, T-010 | 1. 所有后端代码覆盖率 ≥ 100%<br>2. 生成覆盖率报告 |
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### C-4: 任务编号与 DevelopmentPlan 不一致
|
||||
**位置**: Phase 3 所有任务 ([doc/tasks.md:88-101](doc/tasks.md))
|
||||
|
||||
**问题描述**:
|
||||
tasks.md 新增了 T-012 (主页面集成),导致 Phase 3 的所有任务编号向后偏移一位:
|
||||
|
||||
| DevelopmentPlan | tasks.md | 差异 |
|
||||
|-----------------|----------|------|
|
||||
| T-013 错误处理 | **T-013 错误处理** | ❌ 编号错位 |
|
||||
| T-014 性能优化 | **T-014 性能优化** | ❌ 编号错位 |
|
||||
| T-015 视频链接跳转 | **T-015 视频链接跳转** | ❌ 编号错位 |
|
||||
| T-016 部署配置 | **T-016 部署配置** | ❌ 编号错位 |
|
||||
| T-017 集成测试 | **T-017 集成测试** | ❌ 编号错位 |
|
||||
|
||||
**影响**:
|
||||
- 🚫 **文档引用混乱**: 在 DevelopmentPlan 中看到的 T-013 和 tasks.md 中的 T-013 不是同一个任务
|
||||
- 🚫 **沟通成本高**: 开发人员需要在两个文档之间切换时手动对照编号
|
||||
- 🚫 **代码注释/提交信息错误**: Git 提交信息中的任务 ID 可能指向错误的任务
|
||||
|
||||
**建议修复**:
|
||||
|
||||
**方案A (推荐): 将 T-012 改为 T-008A**
|
||||
```markdown
|
||||
| T-008 | 查询表单组件 | ... | P0 | T-003 |
|
||||
| T-008A | 主页面集成 | ... | P0 | T-008, T-009, T-011 |
|
||||
| T-009 | 结果表格组件 | ... | P1 | T-003, T-006, T-007 |
|
||||
```
|
||||
- 优点: Phase 3 编号与 DevelopmentPlan 完全一致
|
||||
- 缺点: 引入子编号
|
||||
|
||||
**方案B: 更新 DevelopmentPlan.md**
|
||||
在 DevelopmentPlan.md 的 Phase 2 增加 T-012 任务
|
||||
- 优点: 保持 tasks.md 不变
|
||||
- 缺点: 需要修改 DevelopmentPlan.md
|
||||
|
||||
**方案C: 在 tasks.md 增加对照表**
|
||||
```markdown
|
||||
## 附录: 与 DevelopmentPlan 任务编号对照
|
||||
|
||||
| tasks.md | DevelopmentPlan | 任务名称 |
|
||||
|----------|-----------------|----------|
|
||||
| T-013 | T-013 | 错误处理 |
|
||||
| T-014 | T-014 | 性能优化 |
|
||||
...
|
||||
```
|
||||
- 优点: 不修改编号,只增加对照表
|
||||
- 缺点: 需要手动查表,增加认知负担
|
||||
|
||||
---
|
||||
|
||||
### 一般问题 (Major)
|
||||
1. 任务统计与优先级说明与实际任务清单不一致,且缺少迭代任务计数,导致计划与执行口径不统一,影响排期与资源分配。参考: doc/tasks.md:29-35, doc/tasks.md:189-201, doc/tasks.md:337-357
|
||||
2. 依赖图、执行检查清单、里程碑均未覆盖 T-019~T-026 迭代任务,迭代工作缺少清晰执行路径与交付节点,容易被遗漏或排期错误。参考: doc/tasks.md:113-187, doc/tasks.md:337-357
|
||||
3. 迭代任务(T-019~T-026)未在上游 DevelopmentPlan/UIDesign 中体现,且 T-026 为新页面无 UI 设计依据,存在范围漂移与验收依据不一致风险。参考: doc/tasks.md:337-357, doc/DevelopmentPlan.md:246-318, doc/UIDesign.md:31-128
|
||||
4. 多处任务要求真实数据库/性能/覆盖率验证,但未定义数据准备与测试环境前置条件,可能导致 T-002/T-014/T-017/T-018 无法直接执行。参考: doc/tasks.md:49, doc/tasks.md:103, doc/tasks.md:108-110
|
||||
|
||||
#### M-1: T-002 真实数据库测试要求缺少环境准备说明
|
||||
**位置**: [doc/tasks.md:46](doc/tasks.md:46)
|
||||
|
||||
**问题描述**:
|
||||
```markdown
|
||||
6. **真实数据库测试**: 使用 .env 中的连接字符串连接真实数据库并验证
|
||||
```
|
||||
|
||||
验收标准要求连接"真实数据库",但没有说明:
|
||||
- 真实数据库是否已经准备好?
|
||||
- 数据库中是否有测试数据?
|
||||
- 需要什么权限?
|
||||
|
||||
**影响**:
|
||||
- 开发者执行到 T-002 时可能发现数据库环境未就绪
|
||||
- 导致任务阻塞,无法继续
|
||||
|
||||
**建议修复**:
|
||||
1. 在 T-002 依赖中增加: `依赖: T-001B (后端初始化), 数据库环境准备 (DBA)`
|
||||
2. 在 T-004 环境变量配置中增加验收标准: "数据库连接字符串配置完成,数据库可访问"
|
||||
3. 或在任务描述中明确标注: "需提前准备测试数据库环境,包含表结构和测试数据"
|
||||
|
||||
---
|
||||
|
||||
#### M-2: T-012 主页面集成缺少状态管理方案说明
|
||||
**位置**: [doc/tasks.md:85](doc/tasks.md:85)
|
||||
|
||||
**问题描述**:
|
||||
```markdown
|
||||
6. 页面状态管理: 默认态/输入态/查询中/结果态/空结果态/错误态
|
||||
```
|
||||
|
||||
验收标准提到"页面状态管理",但没有说明使用何种状态管理方案:
|
||||
- React useState?
|
||||
- Zustand?
|
||||
- Redux Toolkit?
|
||||
- Context API?
|
||||
|
||||
**影响**:
|
||||
- 前端开发者需要自行决定状态管理方案
|
||||
- 可能导致过度设计(引入 Redux)或过于简单(难以维护)
|
||||
|
||||
**建议修复**:
|
||||
在验收标准第6条补充说明:
|
||||
```markdown
|
||||
6. 页面状态管理: 默认态/输入态/查询中/结果态/空结果态/错误态 **(使用 React useState 管理,无需第三方库)**
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### M-3: T-007 品牌API并发限制和超时参数硬编码
|
||||
**位置**: [doc/tasks.md:64](doc/tasks.md:64)
|
||||
|
||||
**问题描述**:
|
||||
```markdown
|
||||
3. 使用 asyncio.gather 批量并发请求(限制 10 并发)
|
||||
6. 超时设置: 3秒
|
||||
```
|
||||
|
||||
验收标准硬编码了"10 并发"和"3 秒",未说明这些参数是否可配置。
|
||||
|
||||
**影响**:
|
||||
- 生产环境可能需要调整并发数(如品牌API限流时降低并发)
|
||||
- 超时时间可能需要根据网络环境调整
|
||||
- 硬编码参数难以适应不同环境
|
||||
|
||||
**建议修复**:
|
||||
1. 将并发限制和超时时间配置到环境变量或配置文件
|
||||
2. 修改验收标准:
|
||||
```markdown
|
||||
3. 使用 asyncio.gather 批量并发请求,并发数可配置(默认 10)
|
||||
6. 超时时间可配置(默认 3 秒)
|
||||
7. 从环境变量读取配置: BRAND_API_CONCURRENCY, BRAND_API_TIMEOUT
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### M-4: T-009 与 T-010 字段一致性验证缺失
|
||||
**位置**: [doc/tasks.md:76](doc/tasks.md:76)
|
||||
|
||||
**问题描述**:
|
||||
T-009 (前端表格) 和 T-010 (后端导出) 都要求"使用中文列名",但没有明确如何保证字段一致性。
|
||||
|
||||
**当前状态**:
|
||||
- T-009 验收标准: "展示 26 个字段,使用中文列名"
|
||||
- T-010 验收标准: "使用中文列名作为表头 **(与 T-009 ResultTable 字段一致)**"
|
||||
|
||||
**问题**:
|
||||
- "字段一致"如何验证?
|
||||
- 前端和后端是否共享字段定义?
|
||||
|
||||
**影响**:
|
||||
- 前端展示和导出文件的列名可能不一致
|
||||
- 导致用户混淆
|
||||
|
||||
**建议修复**:
|
||||
1. 创建共享的字段定义文件:
|
||||
```typescript
|
||||
// shared/types/fields.ts
|
||||
export const VIDEO_FIELDS = [
|
||||
{ key: 'item_id', label: '视频ID', width: 120 },
|
||||
{ key: 'title', label: '视频标题', width: 200 },
|
||||
// ... 24 more fields
|
||||
] as const;
|
||||
```
|
||||
|
||||
2. 修改 T-009 验收标准:
|
||||
```markdown
|
||||
2. 展示 26 个字段,使用共享字段定义文件 (shared/types/fields.ts)
|
||||
```
|
||||
|
||||
3. 修改 T-010 验收标准:
|
||||
```markdown
|
||||
5. 使用共享字段定义文件作为表头,保证与前端表格字段顺序和命名完全一致
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### M-5: T-014 性能优化缺少性能测试脚本
|
||||
**位置**: [doc/tasks.md:96](doc/tasks.md:96)
|
||||
|
||||
**问题描述**:
|
||||
T-014 定义了明确的性能指标:
|
||||
- 查询响应时间 ≤ 3秒 (100条)
|
||||
- 页面加载时间 ≤ 2秒
|
||||
- 导出响应时间 ≤ 5秒 (1000条)
|
||||
|
||||
但验收标准只有"验证索引已创建",没有要求编写性能测试脚本。
|
||||
|
||||
**影响**:
|
||||
- 性能指标难以自动化验证
|
||||
- 依赖人工测试,可能遗漏
|
||||
- 回归测试时无法快速验证性能
|
||||
|
||||
**建议修复**:
|
||||
增加验收标准:
|
||||
```markdown
|
||||
6. **后端性能测试**: 编写性能测试脚本,验证响应时间指标
|
||||
7. **真实数据库测试**: 使用真实数据库和测试数据进行性能测试
|
||||
8. 性能测试报告: 生成性能测试报告,记录实际响应时间
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### M-6: T-017 集成测试缺少性能测试用例
|
||||
**位置**: [doc/tasks.md:101](doc/tasks.md:101)
|
||||
|
||||
**问题描述**:
|
||||
T-017 集成测试有 8 个功能测试用例,但未包含 T-014 定义的性能指标验证。
|
||||
|
||||
**建议修复**:
|
||||
在验收标准中增加性能测试用例:
|
||||
```markdown
|
||||
9. 测试用例: 性能指标验证 (查询≤3秒、导出≤5秒)
|
||||
10. **真实数据库集成测试**: 使用 .env 中的真实数据库连接进行完整集成测试
|
||||
11. **后端测试覆盖率验证**: 确认所有后端代码测试覆盖率 ≥ 100%
|
||||
12. **测试报告生成**: 使用 pytest-cov 生成覆盖率报告
|
||||
```
|
||||
(注: 验收标准 10-12 已存在,只需增加第9条)
|
||||
|
||||
---
|
||||
|
||||
### 改进建议 (Minor)
|
||||
1. 主页面标题与 UIDesign 头部文案不一致(缺少“云图数据查询分析”),建议补齐以满足品牌一致性。参考: doc/tasks.md:91, doc/UIDesign.md:80-82
|
||||
2. 覆盖率验收任务 T-018 同时包含指标定义、报告产出、CI 集成,建议拆分为“覆盖率验收”与“CI 集成”以降低任务粒度。参考: doc/tasks.md:110
|
||||
|
||||
#### S-1: 前端"粗略实现"说明不够具体
|
||||
**位置**: [doc/tasks.md:74, 76, 78, 85](doc/tasks.md)
|
||||
|
||||
**问题描述**:
|
||||
T-008/T-009/T-011/T-012 都标注了"粗略实现说明",但"粗略"的标准不明确。
|
||||
|
||||
**建议**:
|
||||
在任务总览或关键技术点章节定义"粗略实现"标准:
|
||||
```markdown
|
||||
## 前端"粗略实现"标准
|
||||
|
||||
本项目前端采用"功能优先、样式从简"的开发策略:
|
||||
- ✅ **功能完整**: 所有功能可用,交互流程完整
|
||||
- ✅ **样式简洁**: 使用 Tailwind 默认样式,无需过度美化
|
||||
- ✅ **品牌元素保留**: Logo、品牌色、品牌声明必须体现
|
||||
- ❌ **暂不支持**: 响应式适配、动画效果、深度优化
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### S-2: 建议增加任务估时
|
||||
**位置**: 整个 tasks.md
|
||||
|
||||
**问题描述**:
|
||||
所有任务都没有工作量估时,无法评估项目整体时间和关键路径。
|
||||
|
||||
**建议**:
|
||||
在任务总览表格增加"估时"列:
|
||||
```markdown
|
||||
| ID | 任务 | 描述 | 优先级 | 依赖 | 估时 | 验收标准 |
|
||||
|----|------|------|--------|------|------|----------|
|
||||
| T-001 | 项目初始化 | ... | P0 | - | 1天 | ... |
|
||||
```
|
||||
|
||||
**参考估时** (仅供参考):
|
||||
- T-001: 1天 (前后端分离后: 0.5天 × 2)
|
||||
- T-002: 1天
|
||||
- T-005: 2天 (含 TDD)
|
||||
- T-009: 2天
|
||||
- T-012: 2天
|
||||
|
||||
---
|
||||
|
||||
#### S-3: T-016 部署配置缺少监控和日志方案
|
||||
**位置**: [doc/tasks.md:99](doc/tasks.md:99)
|
||||
|
||||
**问题描述**:
|
||||
T-016 部署配置只涉及 Docker 和环境变量,未涉及生产环境监控和日志收集。
|
||||
|
||||
**建议**:
|
||||
增加验收标准:
|
||||
```markdown
|
||||
8. 日志配置: 前端 console 输出,后端使用 Python logging 模块输出到文件
|
||||
9. (可选) 监控配置: 接入 Sentry 或 Prometheus 进行错误监控
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### S-4: 任务依赖图与实际任务ID不一致
|
||||
**位置**: [doc/tasks.md:105](doc/tasks.md:105)
|
||||
|
||||
**问题描述**:
|
||||
第5节"任务依赖图"仍使用 DevelopmentPlan 的任务编号,与 tasks.md 实际任务ID不一致。
|
||||
|
||||
**建议修复**:
|
||||
更新任务依赖图,使用 tasks.md 的任务ID (T-001~T-017):
|
||||
```
|
||||
Phase 1: 基础架构
|
||||
T-001 (项目初始化)
|
||||
├── T-002 (数据库配置)
|
||||
├── T-003 (基础UI框架)
|
||||
└── T-004 (环境变量配置)
|
||||
|
||||
Phase 2: 核心功能
|
||||
T-002 ──▶ T-005 (查询API) ──▶ T-006 (计算逻辑) ──▶ T-009 (结果表格)
|
||||
│ │ │
|
||||
└──▶ T-007 (品牌API) │ │
|
||||
│ │
|
||||
T-003 ──▶ T-008 (查询表单) │ │
|
||||
│ │
|
||||
T-010 (导出API) ◀───────────────┤
|
||||
│ │
|
||||
T-011 (导出按钮) ◀──────────────┤
|
||||
│
|
||||
T-008, T-009, T-011 ──▶ T-012 (主页面集成) ────────────┘
|
||||
|
||||
Phase 3: 优化测试
|
||||
T-012 ──▶ T-013 (错误处理) ──▶ T-014 (性能优化)
|
||||
│ │
|
||||
├──▶ T-015 (视频链接) │
|
||||
│ │
|
||||
└──▶ T-016 (部署配置) │
|
||||
│
|
||||
T-017 (集成测试)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### S-5: 建议增加功能ID(F-xxx)对应关系
|
||||
**位置**: 整个 tasks.md
|
||||
|
||||
**建议**:
|
||||
在"关联功能"列增加功能ID引用,便于追溯需求:
|
||||
```markdown
|
||||
| ID | 任务 | 描述 | 优先级 | 依赖 | 关联功能 | 验收标准 |
|
||||
|----|------|------|--------|------|----------|----------|
|
||||
| T-005 | 查询 API 开发 | ... | P0 | T-002 | F-001, F-002, F-003 | ... |
|
||||
| T-006 | 计算逻辑实现 | ... | P0 | T-005 | F-004, F-005, F-006 | ... |
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 依赖关系分析
|
||||
|
||||
### 关键路径
|
||||
|
||||
```
|
||||
T-001 (项目初始化)
|
||||
│
|
||||
├─→ T-002 (数据库配置)
|
||||
│ │
|
||||
│ └─→ T-005 (查询API)
|
||||
│ │
|
||||
│ ├─→ T-006 (计算逻辑)
|
||||
│ │ │
|
||||
│ │ └─→ T-010 (导出API)
|
||||
│ │
|
||||
│ └─→ T-007 (品牌API)
|
||||
│ │
|
||||
│ └─→ T-009 (结果表格)
|
||||
│ │
|
||||
│ └─→ T-012 (主页面集成)
|
||||
│ │
|
||||
│ └─→ T-013 (错误处理)
|
||||
│ │
|
||||
│ └─→ T-017 (集成测试)
|
||||
│
|
||||
└─→ T-003 (基础UI)
|
||||
│
|
||||
└─→ T-008 (查询表单)
|
||||
│
|
||||
└─→ T-012 (主页面集成)
|
||||
```
|
||||
|
||||
**关键路径**:
|
||||
T-001 → T-002 → T-005 → T-007 → T-009 → T-012 → T-013 → T-017
|
||||
|
||||
**可并行任务**:
|
||||
- T-002 (数据库) 和 T-003 (基础UI) 可并行
|
||||
- T-006 (计算逻辑) 和 T-007 (品牌API) 可并行
|
||||
- T-013/T-014/T-015 可并行
|
||||
|
||||
---
|
||||
|
||||
## 评审结论
|
||||
|
||||
需修改后通过。
|
||||
### 评审结果
|
||||
|
||||
### 下一步行动
|
||||
- [ ] 对齐任务总数与优先级统计,补充迭代任务到依赖图/清单/里程碑
|
||||
- [ ] 将 T-019~T-026 同步到 DevelopmentPlan/UIDesign(或明确为独立迭代范围)
|
||||
- [ ] 增加真实数据库与测试数据准备任务(含环境获取方式)
|
||||
- [ ] 修正页面头部文案与 UIDesign 一致
|
||||
🟡 **需修改后通过**
|
||||
|
||||
---
|
||||
|
||||
### 主要优点
|
||||
|
||||
✅ **覆盖度完整**:
|
||||
- 所有 DevelopmentPlan (16个任务) 和 UIDesign (10个组件) 都有对应任务
|
||||
- 新增 T-012 主页面集成任务是合理补充
|
||||
|
||||
✅ **验收标准详细**:
|
||||
- 每个任务平均 6.2 条验收标准
|
||||
- 验收标准具体可操作,便于验收
|
||||
- T-006/T-014/T-017 的验收标准特别优秀
|
||||
|
||||
✅ **TDD 要求明确**:
|
||||
- 7个关键后端任务都要求先写测试再写代码
|
||||
- 明确要求 100% 测试覆盖率和真实数据库测试
|
||||
|
||||
✅ **架构更新到位**:
|
||||
- 任务描述已完全更新为前后端分离架构 (FastAPI + Next.js)
|
||||
- 品牌元素(麦秒思AI)在任务中明确体现
|
||||
|
||||
---
|
||||
|
||||
### 关键问题
|
||||
|
||||
❌ **严重问题** (必须修复):
|
||||
1. **C-1: T-001 粒度过大** - 前后端初始化应拆分,支持并行开发
|
||||
2. **C-2: T-010 依赖错误** - 后端 API 不应依赖前端组件 T-009
|
||||
3. **C-3: 缺少测试独立任务** - 100% 覆盖率需要独立验收里程碑
|
||||
4. **C-4: 任务编号不一致** - Phase 3 任务编号与 DevelopmentPlan 错位
|
||||
|
||||
⚠️ **一般问题** (建议修复):
|
||||
1. **M-1: T-002 数据库环境准备** - 需明确数据库环境前置条件
|
||||
2. **M-2: T-012 状态管理方案** - 建议使用 React useState
|
||||
3. **M-3: T-007 参数硬编码** - 并发和超时应可配置
|
||||
4. **M-4: T-009/T-010 字段一致性** - 建议共享字段定义文件
|
||||
5. **M-5: T-014 性能测试脚本** - 需编写自动化性能测试
|
||||
6. **M-6: T-017 性能测试用例** - 集成测试应包含性能验证
|
||||
|
||||
---
|
||||
|
||||
### 影响评估
|
||||
|
||||
**阻塞性问题**:
|
||||
- 🚫 **C-1 (T-001 粒度过大)**: 导致前后端无法并行开发,延长项目周期
|
||||
- 🚫 **C-2 (T-010 依赖错误)**: 导致执行顺序混乱,前后端耦合
|
||||
|
||||
**质量风险**:
|
||||
- ⚠️ **C-3 (缺少测试任务)**: 100% 覆盖率难以保证,可能降低代码质量
|
||||
- ⚠️ **M-5/M-6 (性能测试缺失)**: 性能指标无法自动化验证
|
||||
|
||||
**进度风险**:
|
||||
- ⚠️ **M-1 (数据库环境未就绪)**: 可能导致 T-002 阻塞
|
||||
- ⚠️ **无任务估时**: 难以评估项目整体进度和关键路径
|
||||
|
||||
---
|
||||
|
||||
## 下一步行动
|
||||
|
||||
### 必须修改 (Critical) - 预估 1.5 小时
|
||||
|
||||
- [ ] **C-1: 拆分 T-001** 为 T-001A (前端初始化) 和 T-001B (后端初始化)
|
||||
- 预估时间: 30分钟
|
||||
- 影响范围: tasks.md, DevelopmentPlan.md
|
||||
|
||||
- [ ] **C-2: 修正 T-010 依赖** 移除 T-009,改为 `T-006, T-007`
|
||||
- 预估时间: 10分钟
|
||||
- 影响范围: tasks.md:67
|
||||
|
||||
- [ ] **C-3: 增加测试任务** 在 Phase 3 增加 T-018 测试覆盖率验收
|
||||
- 预估时间: 20分钟
|
||||
- 影响范围: tasks.md Phase 3
|
||||
|
||||
- [ ] **C-4: 统一任务编号** 选择方案A/B/C 修复编号不一致问题
|
||||
- 预估时间: 30分钟
|
||||
- 影响范围: tasks.md 或 DevelopmentPlan.md
|
||||
|
||||
---
|
||||
|
||||
### 建议修改 (Major) - 预估 1 小时
|
||||
|
||||
- [ ] **M-1: T-002 数据库环境说明** 明确数据库准备前置条件
|
||||
- 预估时间: 10分钟
|
||||
|
||||
- [ ] **M-2: T-012 状态管理说明** 补充 React useState 方案
|
||||
- 预估时间: 5分钟
|
||||
|
||||
- [ ] **M-3: T-007 参数配置化** 并发和超时改为可配置
|
||||
- 预估时间: 15分钟
|
||||
|
||||
- [ ] **M-4: T-009/T-010 字段一致性** 增加共享字段定义要求
|
||||
- 预估时间: 15分钟
|
||||
|
||||
- [ ] **M-5: T-014 性能测试脚本** 增加性能测试验收标准
|
||||
- 预估时间: 10分钟
|
||||
|
||||
- [ ] **M-6: T-017 性能测试用例** 增加性能测试用例
|
||||
- 预估时间: 5分钟
|
||||
|
||||
---
|
||||
|
||||
### 可选优化 (Minor) - 预估 1 小时
|
||||
|
||||
- [ ] **S-1: 定义"粗略实现"标准** 增加前端开发标准说明
|
||||
- [ ] **S-2: 增加任务估时** 为每个任务增加工作量估时(人天)
|
||||
- [ ] **S-3: T-016 监控配置** 增加日志和监控验收标准
|
||||
- [ ] **S-4: 更新依赖图** 使用 tasks.md 的实际任务ID
|
||||
- [ ] **S-5: 增加功能ID** 在关联功能列增加 F-xxx 引用
|
||||
|
||||
---
|
||||
|
||||
### 修复优先级汇总
|
||||
|
||||
| 优先级 | 问题ID | 问题描述 | 预估时间 | 阻塞风险 |
|
||||
|--------|--------|----------|----------|----------|
|
||||
| P0 | C-1 | T-001 拆分 | 30分钟 | ⚠️ 高 |
|
||||
| P0 | C-2 | T-010 依赖修正 | 10分钟 | ⚠️ 高 |
|
||||
| P0 | C-3 | 增加测试任务 | 20分钟 | ⚠️ 中 |
|
||||
| P0 | C-4 | 统一任务编号 | 30分钟 | ⚠️ 中 |
|
||||
| P1 | M-1~M-6 | 6个一般问题 | 60分钟 | ⚠️ 低 |
|
||||
| P2 | S-1~S-5 | 5个改进建议 | 60分钟 | ✅ 无 |
|
||||
|
||||
**预计修复总时间**: 约 3.5 小时 (P0-P2 全部)
|
||||
|
||||
---
|
||||
|
||||
## 参考信息
|
||||
|
||||
### 文档链接
|
||||
|
||||
- 目标文档: [doc/tasks.md](doc/tasks.md)
|
||||
- 上游文档1: [doc/UIDesign.md](doc/UIDesign.md) - UI 设计文档
|
||||
- 上游文档2: [doc/DevelopmentPlan.md](doc/DevelopmentPlan.md) - 开发计划
|
||||
|
||||
### 修改建议操作
|
||||
|
||||
建议使用 `/mt` 命令根据本评审报告的问题清单进行增量修改:
|
||||
```bash
|
||||
/mt # 增量修改 tasks.md
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**评审人**: Claude Sonnet 4.5
|
||||
**评审日期**: 2026-01-28 15:30
|
||||
**评审版本**: tasks.md v1.0
|
||||
**评审耗时**: 45 分钟
|
||||
**评审方法**: 基于 `/rt` 评审技能,对比 UIDesign.md 和 DevelopmentPlan.md
|
||||
|
||||
42
doc/tasks.md
42
doc/tasks.md
@ -26,12 +26,12 @@
|
||||
|
||||
## 1. 任务总览
|
||||
|
||||
<!-- MODIFIED: 更新任务统计,T-001拆分+T-018新增+T-019~T-027迭代任务 -->
|
||||
<!-- MODIFIED: 更新任务统计,T-001拆分+T-018新增 -->
|
||||
| 统计项 | 数量 |
|
||||
|--------|------|
|
||||
| 总任务数 | 27 |
|
||||
| P0 任务 | 17 |
|
||||
| P1 任务 | 9 |
|
||||
| 总任务数 | 18 |
|
||||
| P0 任务 | 10 |
|
||||
| P1 任务 | 7 |
|
||||
| P2 任务 | 1 |
|
||||
|
||||
## 2. Phase 1 任务 - 基础架构搭建
|
||||
@ -48,7 +48,7 @@
|
||||
<!-- MODIFIED: 依赖改为 T-001B (后端初始化) -->
|
||||
| T-002 | 数据库配置 | 配置 SQLAlchemy,定义数据模型,连接 PostgreSQL | P0 | T-001B | 1. SQLAlchemy 2.0+ 和 asyncpg 安装完成<br>2. 定义 KolVideo 模型(使用 SQLAlchemy ORM)<br>3. 数据库异步连接成功<br>4. 索引创建: star_id, star_unique_id, star_nickname<br>5. Alembic 迁移工具配置完成<br><!-- NEW START -->6. **真实数据库测试**: 使用 .env 中的连接字符串连接真实数据库并验证<br>7. **TDD要求**: 编写数据库连接测试,模型测试,CRUD测试<br>8. **测试覆盖率**: 数据库操作测试覆盖率 ≥ 100%<!-- NEW END --> |
|
||||
<!-- MODIFIED: 依赖改为 T-001A (前端初始化) -->
|
||||
| T-003 | 基础 UI 框架 | 安装 Tailwind CSS,创建基础布局组件 | P0 | T-001A | 1. Tailwind CSS 配置完成<br>2. 品牌色系配置 (#4F46E5等)<br>3. 基础布局组件创建 (Header/Footer)<br>4. 秒思AI Logo 集成 (doc/ui/muse.svg) |
|
||||
| T-003 | 基础 UI 框架 | 安装 Tailwind CSS,创建基础布局组件 | P0 | T-001A | 1. Tailwind CSS 配置完成<br>2. 品牌色系配置 (#4F46E5等)<br>3. 基础布局组件创建 (Header/Footer)<br>4. 麦秒思AI Logo 集成 (doc/ui/muse.svg) |
|
||||
<!-- MODIFIED: 依赖改为 T-001A, T-001B (前后端都需要环境变量) -->
|
||||
| T-004 | 环境变量配置 | 配置开发/生产环境变量,数据库连接字符串 | P0 | T-001A, T-001B | 1. 前后端 .env.example 创建<br>2. 后端 DATABASE_URL 配置<br>3. 后端品牌 API 地址配置<br>4. 前端 NEXT_PUBLIC_API_URL 配置<br>5. .env 文件创建并添加到 .gitignore |
|
||||
|
||||
@ -88,7 +88,7 @@
|
||||
|----|------|------|--------|------|----------|
|
||||
<!-- MODIFIED: 简化前端实现要求 -->
|
||||
<!-- MODIFIED: 任务编号改为 T-011A,统一与 DevelopmentPlan 编号体系 (根据评审报告 C-4) -->
|
||||
| T-011A | 主页面集成 | 集成查询表单、结果表格和导出按钮,完成单页应用 **(前端粗略实现)** | P0 | T-008, T-009, T-011 | 1. page.tsx 创建单页应用<br>2. 品牌头部: Logo + "KOL Insight" + "秒思AI制作"<br>3. 查询区域集成 QueryForm<br>4. 结果区域集成 ResultTable 和 ExportButton<br>5. Footer: "© 2026 秒思AI制作"<br>6. 页面状态管理: 默认态/输入态/查询中/结果态/空结果态/错误态<br>7. 空状态组件: 引导文案 + 空盒子图标<br>8. 错误状态组件: 错误提示 + 重试按钮<br><!-- NEW START -->9. **粗略实现说明**: 重点在功能集成,UI可简化,品牌元素必须保留<!-- NEW END --> |
|
||||
| T-011A | 主页面集成 | 集成查询表单、结果表格和导出按钮,完成单页应用 **(前端粗略实现)** | P0 | T-008, T-009, T-011 | 1. page.tsx 创建单页应用<br>2. 品牌头部: Logo + "KOL Insight" + "麦秒思AI制作"<br>3. 查询区域集成 QueryForm<br>4. 结果区域集成 ResultTable 和 ExportButton<br>5. Footer: "© 2026 麦秒思AI制作"<br>6. 页面状态管理: 默认态/输入态/查询中/结果态/空结果态/错误态<br>7. 空状态组件: 引导文案 + 空盒子图标<br>8. 错误状态组件: 错误提示 + 重试按钮<br><!-- NEW START -->9. **粗略实现说明**: 重点在功能集成,UI可简化,品牌元素必须保留<!-- NEW END --> |
|
||||
|
||||
## 4. Phase 3 任务 - 优化与测试
|
||||
|
||||
@ -334,34 +334,6 @@ async with httpx.AsyncClient() as client:
|
||||
|
||||
---
|
||||
|
||||
<!-- ITER: 2026-01-28 - 修复品牌API响应解析+添加认证 -->
|
||||
## 12. 迭代任务
|
||||
|
||||
### 12.1 Bug 修复
|
||||
|
||||
| ID | 任务 | 描述 | 依赖 | 优先级 | 验收标准 |
|
||||
|----|------|------|------|--------|----------|
|
||||
| T-019 | 修复品牌API响应解析 | 品牌API返回的data是数组结构,当前代码按字典解析导致取不到brand_name | T-007 | P0 | 1. 正确解析 `data[0].brand_name` 获取品牌名称<br>2. 处理 data 数组为空的边界情况<br>3. 更新测试用例的 mock 数据结构 |
|
||||
| T-020 | 添加品牌API认证 | 品牌API需要Bearer Token认证,当前代码未配置 | T-019 | P0 | 1. 新增环境变量 `BRAND_API_TOKEN`<br>2. 请求时添加 `Authorization: Bearer {token}` 头<br>3. 更新 `.env.example` 配置示例<br>4. 更新测试用例验证认证头 |
|
||||
<!-- ITER: 2026-01-28 - 修复巨量云图API调用参数问题 -->
|
||||
| T-027 | 修复巨量云图API调用参数 | API调用不通,日期格式/Cookie头/industry_id等参数错误 | T-023 | P0 | 1. **日期格式**: 从 `YYYY-MM-DD` 改为 `YYYYMMDD`<br>2. **Cookie头**: 直接使用 `auth_token` 完整值(已含sessionid=xxx)<br>3. **industry_id**: 使用数据库中视频的industry_id,传字符串格式 `["12"]`<br>4. **Cookie获取**: 随机选取任意一组aadvid/auth_token,不按brand_id匹配<br>5. 更新测试用例验证参数格式<br>6. **TDD要求**: 测试实际API调用成功返回数据 |
|
||||
|
||||
<!-- ITER: 2026-01-28 - 新增巨量云图视频分析功能 -->
|
||||
### 12.2 功能迭代 - 视频分析模块
|
||||
|
||||
| ID | 任务 | 描述 | 依赖 | 优先级 | 验收标准 |
|
||||
|----|------|------|------|--------|----------|
|
||||
| T-021 | SessionID池服务 | 实现从内部API获取Cookie列表,随机选取sessionid | T-004 | P0 | 1. 调用 `/v1/yuntu/get_cookie` 获取100个sessionid<br>2. 随机选取机制实现<br>3. 环境变量 `YUNTU_API_TOKEN` 配置<br>4. **TDD要求**: 先写测试用例(mock API响应) |
|
||||
| T-022 | SessionID自动重试 | sessionid失效时自动切换到下一个重试 | T-021 | P0 | 1. 检测401/403状态码触发重试<br>2. 最多重试3次<br>3. 重试日志记录<br>4. **TDD要求**: 测试覆盖重试场景 |
|
||||
| T-023 | 巨量云图API封装 | 封装GetContentMaterialAnalysisInfo接口调用 | T-022 | P0 | 1. 正确构造请求参数(object_id/start_date/end_date/industry_id_list)<br>2. end_date = start_date + 30天<br>3. Cookie头设置sessionid<br>4. 超时10秒<br>5. **TDD要求**: 测试参数构造和响应解析 |
|
||||
| T-024 | 视频分析数据接口 | 实现 GET /api/v1/videos/{item_id}/analysis | T-023 | P0 | 1. 从数据库获取基础信息<br>2. 调用巨量云图API获取实时数据<br>3. 返回6大类指标结构<br>4. 计算成本指标(CPM/CPA3/CPsearch等)<br>5. 除零检查返回null<br>6. **TDD要求**: 测试覆盖率100% |
|
||||
| T-025 | 数据库A3指标更新 | 从API获取数据后更新数据库对应字段 | T-024 | P1 | 1. 更新 total_new_a3_cnt<br>2. 更新 heated_new_a3_cnt<br>3. 更新 natural_new_a3_cnt<br>4. 更新 total_cost<br>5. **TDD要求**: 测试数据库更新逻辑 |
|
||||
| T-026 | 视频分析前端页面 | 前端展示6大类25+指标(粗略实现) | T-024 | P1 | 1. 基础信息展示(8字段)<br>2. 触达指标展示(7字段)<br>3. A3指标展示(3字段)<br>4. 搜索指标展示(5字段)<br>5. 费用指标展示(3字段)<br>6. 成本指标展示(6字段)<br>7. 数值格式化(千分位/2位小数)<br>8. **粗略实现**: 功能可用即可 |
|
||||
|
||||
---
|
||||
|
||||
**文档状态**: 待执行
|
||||
**建议下一步**:
|
||||
- **最高优先级**: 执行 T-027 修复巨量云图API调用参数问题
|
||||
- 然后验证视频分析功能 T-021~T-026 是否正常工作
|
||||
**建议下一步**: 按顺序执行 Phase 1 任务,完成基础架构搭建
|
||||
**评审建议**: 可运行 `/rt` 对任务列表进行评审
|
||||
|
||||
@ -1,61 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
frontend:
|
||||
build:
|
||||
context: ./frontend
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
- NEXT_PUBLIC_API_URL=http://backend:8000/api/v1
|
||||
depends_on:
|
||||
- backend
|
||||
networks:
|
||||
- kol-network
|
||||
|
||||
backend:
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
- DATABASE_URL=${DATABASE_URL}
|
||||
- CORS_ORIGINS=http://localhost:3000,http://frontend:3000
|
||||
- BRAND_API_BASE_URL=${BRAND_API_BASE_URL:-https://api.internal.intelligrow.cn}
|
||||
depends_on:
|
||||
- db
|
||||
networks:
|
||||
- kol-network
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
db:
|
||||
image: postgres:14-alpine
|
||||
ports:
|
||||
- "5432:5432"
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_USER:-postgres}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres}
|
||||
- POSTGRES_DB=${POSTGRES_DB:-yuntu_kol}
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
networks:
|
||||
- kol-network
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
networks:
|
||||
kol-network:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
@ -1,41 +0,0 @@
|
||||
FROM node:20-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install pnpm
|
||||
RUN corepack enable && corepack prepare pnpm@latest --activate
|
||||
|
||||
# Copy package files
|
||||
COPY package.json pnpm-lock.yaml ./
|
||||
|
||||
# Install dependencies
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Build application
|
||||
RUN pnpm build
|
||||
|
||||
# Production image
|
||||
FROM node:20-alpine AS runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Create non-root user
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 nextjs
|
||||
|
||||
# Copy built files
|
||||
COPY --from=builder /app/public ./public
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER nextjs
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
ENV PORT 3000
|
||||
ENV HOSTNAME "0.0.0.0"
|
||||
|
||||
CMD ["node", "server.js"]
|
||||
@ -1,6 +1,4 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
output: 'standalone',
|
||||
};
|
||||
const nextConfig = {};
|
||||
|
||||
export default nextConfig;
|
||||
|
||||
@ -9,8 +9,6 @@
|
||||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^6.1.0",
|
||||
"antd": "^6.2.2",
|
||||
"next": "14.2.35",
|
||||
"react": "^18",
|
||||
"react-dom": "^18"
|
||||
|
||||
867
frontend/pnpm-lock.yaml
generated
867
frontend/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -1,16 +0,0 @@
|
||||
import VideoAnalysis from '@/components/VideoAnalysis';
|
||||
|
||||
export default function AnalysisPage() {
|
||||
return (
|
||||
<main className="min-h-screen bg-gray-50 py-8">
|
||||
<div className="max-w-6xl mx-auto px-4">
|
||||
<div className="mb-6">
|
||||
<a href="/" className="text-indigo-600 hover:text-indigo-800">
|
||||
← 返回查询
|
||||
</a>
|
||||
</div>
|
||||
<VideoAnalysis />
|
||||
</div>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
@ -25,35 +25,3 @@ body {
|
||||
text-wrap: balance;
|
||||
}
|
||||
}
|
||||
|
||||
/* Ant Design Modal 内容可复制 */
|
||||
.ant-modal-body {
|
||||
user-select: text !important;
|
||||
-webkit-user-select: text !important;
|
||||
}
|
||||
|
||||
.ant-modal-body * {
|
||||
user-select: text !important;
|
||||
-webkit-user-select: text !important;
|
||||
}
|
||||
|
||||
/* Descriptions 所有内容可复制 - Ant Design v6 兼容 */
|
||||
.ant-descriptions-item-content,
|
||||
.ant-descriptions-item-content *,
|
||||
.ant-descriptions td,
|
||||
.ant-descriptions td *,
|
||||
[class*="ant-descriptions"] td,
|
||||
[class*="ant-descriptions"] td * {
|
||||
user-select: text !important;
|
||||
-webkit-user-select: text !important;
|
||||
-moz-user-select: text !important;
|
||||
-ms-user-select: text !important;
|
||||
cursor: text;
|
||||
}
|
||||
|
||||
/* 确保表格单元格内容可选 */
|
||||
.ant-table-cell,
|
||||
.ant-table-cell * {
|
||||
user-select: text !important;
|
||||
-webkit-user-select: text !important;
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import type { Metadata } from 'next';
|
||||
import localFont from 'next/font/local';
|
||||
import './globals.css';
|
||||
import { Header, Footer, AntdProvider } from '@/components';
|
||||
import { Header, Footer } from '@/components';
|
||||
|
||||
const geistSans = localFont({
|
||||
src: './fonts/GeistVF.woff',
|
||||
@ -16,7 +16,7 @@ const geistMono = localFont({
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: 'KOL Insight - 云图数据查询分析',
|
||||
description: 'KOL 视频数据查询与成本分析工具 - 秒思AI制作',
|
||||
description: 'KOL 视频数据查询与成本分析工具 - 麦秒思AI制作',
|
||||
};
|
||||
|
||||
export default function RootLayout({
|
||||
@ -27,13 +27,11 @@ export default function RootLayout({
|
||||
return (
|
||||
<html lang="zh-CN">
|
||||
<body className={`${geistSans.variable} ${geistMono.variable} antialiased`}>
|
||||
<AntdProvider>
|
||||
<div className="min-h-screen flex flex-col">
|
||||
<Header />
|
||||
<main className="flex-1 bg-gray-50">{children}</main>
|
||||
<Footer />
|
||||
</div>
|
||||
</AntdProvider>
|
||||
<div className="min-h-screen flex flex-col">
|
||||
<Header />
|
||||
<main className="flex-1 bg-gray-50">{children}</main>
|
||||
<Footer />
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
@ -1,7 +1,101 @@
|
||||
'use client';
|
||||
|
||||
import VideoAnalysis from '@/components/VideoAnalysis';
|
||||
import Image from "next/image";
|
||||
|
||||
export default function Home() {
|
||||
return <VideoAnalysis />;
|
||||
return (
|
||||
<div className="grid grid-rows-[20px_1fr_20px] items-center justify-items-center min-h-screen p-8 pb-20 gap-16 sm:p-20 font-[family-name:var(--font-geist-sans)]">
|
||||
<main className="flex flex-col gap-8 row-start-2 items-center sm:items-start">
|
||||
<Image
|
||||
className="dark:invert"
|
||||
src="https://nextjs.org/icons/next.svg"
|
||||
alt="Next.js logo"
|
||||
width={180}
|
||||
height={38}
|
||||
priority
|
||||
/>
|
||||
<ol className="list-inside list-decimal text-sm text-center sm:text-left font-[family-name:var(--font-geist-mono)]">
|
||||
<li className="mb-2">
|
||||
Get started by editing{" "}
|
||||
<code className="bg-black/[.05] dark:bg-white/[.06] px-1 py-0.5 rounded font-semibold">
|
||||
src/app/page.tsx
|
||||
</code>
|
||||
.
|
||||
</li>
|
||||
<li>Save and see your changes instantly.</li>
|
||||
</ol>
|
||||
|
||||
<div className="flex gap-4 items-center flex-col sm:flex-row">
|
||||
<a
|
||||
className="rounded-full border border-solid border-transparent transition-colors flex items-center justify-center bg-foreground text-background gap-2 hover:bg-[#383838] dark:hover:bg-[#ccc] text-sm sm:text-base h-10 sm:h-12 px-4 sm:px-5"
|
||||
href="https://vercel.com/new?utm_source=create-next-app&utm_medium=appdir-template-tw&utm_campaign=create-next-app"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
<Image
|
||||
className="dark:invert"
|
||||
src="https://nextjs.org/icons/vercel.svg"
|
||||
alt="Vercel logomark"
|
||||
width={20}
|
||||
height={20}
|
||||
/>
|
||||
Deploy now
|
||||
</a>
|
||||
<a
|
||||
className="rounded-full border border-solid border-black/[.08] dark:border-white/[.145] transition-colors flex items-center justify-center hover:bg-[#f2f2f2] dark:hover:bg-[#1a1a1a] hover:border-transparent text-sm sm:text-base h-10 sm:h-12 px-4 sm:px-5 sm:min-w-44"
|
||||
href="https://nextjs.org/docs?utm_source=create-next-app&utm_medium=appdir-template-tw&utm_campaign=create-next-app"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
Read our docs
|
||||
</a>
|
||||
</div>
|
||||
</main>
|
||||
<footer className="row-start-3 flex gap-6 flex-wrap items-center justify-center">
|
||||
<a
|
||||
className="flex items-center gap-2 hover:underline hover:underline-offset-4"
|
||||
href="https://nextjs.org/learn?utm_source=create-next-app&utm_medium=appdir-template-tw&utm_campaign=create-next-app"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
<Image
|
||||
aria-hidden
|
||||
src="https://nextjs.org/icons/file.svg"
|
||||
alt="File icon"
|
||||
width={16}
|
||||
height={16}
|
||||
/>
|
||||
Learn
|
||||
</a>
|
||||
<a
|
||||
className="flex items-center gap-2 hover:underline hover:underline-offset-4"
|
||||
href="https://vercel.com/templates?framework=next.js&utm_source=create-next-app&utm_medium=appdir-template-tw&utm_campaign=create-next-app"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
<Image
|
||||
aria-hidden
|
||||
src="https://nextjs.org/icons/window.svg"
|
||||
alt="Window icon"
|
||||
width={16}
|
||||
height={16}
|
||||
/>
|
||||
Examples
|
||||
</a>
|
||||
<a
|
||||
className="flex items-center gap-2 hover:underline hover:underline-offset-4"
|
||||
href="https://nextjs.org?utm_source=create-next-app&utm_medium=appdir-template-tw&utm_campaign=create-next-app"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
<Image
|
||||
aria-hidden
|
||||
src="https://nextjs.org/icons/globe.svg"
|
||||
alt="Globe icon"
|
||||
width={16}
|
||||
height={16}
|
||||
/>
|
||||
Go to nextjs.org →
|
||||
</a>
|
||||
</footer>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,12 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import { ConfigProvider } from 'antd';
|
||||
import zhCN from 'antd/locale/zh_CN';
|
||||
|
||||
export default function AntdProvider({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<ConfigProvider locale={zhCN}>
|
||||
{children}
|
||||
</ConfigProvider>
|
||||
);
|
||||
}
|
||||
@ -1,63 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import { useState } from 'react';
|
||||
|
||||
interface ExportButtonProps {
|
||||
hasData: boolean;
|
||||
}
|
||||
|
||||
const API_BASE_URL = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:8000/api/v1';
|
||||
|
||||
export default function ExportButton({ hasData }: ExportButtonProps) {
|
||||
const [isExporting, setIsExporting] = useState(false);
|
||||
|
||||
const handleExport = async (format: 'xlsx' | 'csv') => {
|
||||
if (!hasData) {
|
||||
alert('无数据可导出');
|
||||
return;
|
||||
}
|
||||
|
||||
setIsExporting(true);
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/export?format=${format}`);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('导出失败');
|
||||
}
|
||||
|
||||
const blob = await response.blob();
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `kol_data_${new Date().toISOString().slice(0, 10)}.${format}`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
document.body.removeChild(a);
|
||||
} catch (error) {
|
||||
console.error('Export error:', error);
|
||||
alert('导出失败,请重试');
|
||||
} finally {
|
||||
setIsExporting(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex gap-2">
|
||||
<button
|
||||
onClick={() => handleExport('xlsx')}
|
||||
disabled={!hasData || isExporting}
|
||||
className="px-3 py-1.5 text-sm font-medium text-white bg-success rounded hover:bg-green-600 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
{isExporting ? '导出中...' : '导出 Excel'}
|
||||
</button>
|
||||
<button
|
||||
onClick={() => handleExport('csv')}
|
||||
disabled={!hasData || isExporting}
|
||||
className="px-3 py-1.5 text-sm font-medium text-white bg-success rounded hover:bg-green-600 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
{isExporting ? '导出中...' : '导出 CSV'}
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@ -2,7 +2,7 @@ export default function Footer() {
|
||||
return (
|
||||
<footer className="bg-gray-50 border-t border-gray-200 py-4 px-6">
|
||||
<div className="max-w-7xl mx-auto text-center text-sm text-gray-500">
|
||||
© 2026 秒思AI制作 | KOL Insight v1.0
|
||||
© 2026 麦秒思AI制作 | KOL Insight v1.0
|
||||
</div>
|
||||
</footer>
|
||||
);
|
||||
|
||||
@ -5,13 +5,13 @@ export default function Header() {
|
||||
<header className="bg-white border-b border-gray-200 py-4 px-6">
|
||||
<div className="max-w-7xl mx-auto flex items-center justify-between">
|
||||
<div className="flex items-center gap-4">
|
||||
<Image src="/muse.svg" alt="秒思AI Logo" width={40} height={40} priority />
|
||||
<Image src="/muse.svg" alt="麦秒思AI Logo" width={40} height={40} priority />
|
||||
<div>
|
||||
<h1 className="text-xl font-bold text-gray-900">KOL Insight</h1>
|
||||
<p className="text-sm text-gray-500">云图数据查询分析</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="text-sm text-gray-500">秒思AI制作</div>
|
||||
<div className="text-sm text-gray-500">麦秒思AI制作</div>
|
||||
</div>
|
||||
</header>
|
||||
);
|
||||
|
||||
@ -1,81 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import { useState } from 'react';
|
||||
import { QueryType, QUERY_TYPE_OPTIONS, QUERY_PLACEHOLDER } from '@/types';
|
||||
|
||||
interface QueryFormProps {
|
||||
onSubmit: (type: QueryType, values: string[]) => void;
|
||||
isLoading: boolean;
|
||||
}
|
||||
|
||||
export default function QueryForm({ onSubmit, isLoading }: QueryFormProps) {
|
||||
const [queryType, setQueryType] = useState<QueryType>('star_id');
|
||||
const [inputValue, setInputValue] = useState('');
|
||||
|
||||
const handleSubmit = () => {
|
||||
const values = inputValue
|
||||
.split('\n')
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0);
|
||||
|
||||
if (values.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
onSubmit(queryType, values);
|
||||
};
|
||||
|
||||
const handleClear = () => {
|
||||
setInputValue('');
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="bg-white rounded-lg shadow-sm p-6">
|
||||
<div className="mb-4">
|
||||
<label className="block text-sm font-medium text-gray-700 mb-2">查询方式</label>
|
||||
<div className="flex gap-4">
|
||||
{QUERY_TYPE_OPTIONS.map((option) => (
|
||||
<label key={option.value} className="flex items-center cursor-pointer">
|
||||
<input
|
||||
type="radio"
|
||||
name="queryType"
|
||||
value={option.value}
|
||||
checked={queryType === option.value}
|
||||
onChange={(e) => setQueryType(e.target.value as QueryType)}
|
||||
className="w-4 h-4 text-primary border-gray-300 focus:ring-primary"
|
||||
/>
|
||||
<span className="ml-2 text-sm text-gray-700">{option.label}</span>
|
||||
</label>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mb-4">
|
||||
<textarea
|
||||
value={inputValue}
|
||||
onChange={(e) => setInputValue(e.target.value)}
|
||||
placeholder={QUERY_PLACEHOLDER[queryType]}
|
||||
className="w-full h-32 px-3 py-2 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-primary focus:border-transparent resize-none"
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex justify-end gap-2">
|
||||
<button
|
||||
onClick={handleClear}
|
||||
disabled={isLoading || !inputValue}
|
||||
className="px-4 py-2 text-sm font-medium text-gray-700 bg-white border border-gray-300 rounded-md hover:bg-gray-50 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
清空
|
||||
</button>
|
||||
<button
|
||||
onClick={handleSubmit}
|
||||
disabled={isLoading || !inputValue.trim()}
|
||||
className="px-4 py-2 text-sm font-medium text-white bg-primary rounded-md hover:bg-primary-dark disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
{isLoading ? '查询中...' : '开始查询'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@ -1,195 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import { useState } from 'react';
|
||||
import { VideoData } from '@/types';
|
||||
import { formatNumber, formatLargeNumber, formatPercent, formatCurrency, formatDate } from '@/lib/utils';
|
||||
|
||||
interface ResultTableProps {
|
||||
data: VideoData[];
|
||||
total: number;
|
||||
}
|
||||
|
||||
// 表格列定义
|
||||
const columns = [
|
||||
{ key: 'item_id', label: '视频ID', width: 120 },
|
||||
{ key: 'title', label: '视频标题', width: 200 },
|
||||
{ key: 'viral_type', label: '爆文类型', width: 100 },
|
||||
{ key: 'video_url', label: '视频链接', width: 100 },
|
||||
{ key: 'star_nickname', label: '达人昵称', width: 120 },
|
||||
{ key: 'star_unique_id', label: '达人unique_id', width: 150 },
|
||||
{ key: 'natural_play_cnt', label: '自然曝光数', width: 120 },
|
||||
{ key: 'heated_play_cnt', label: '加热曝光数', width: 120 },
|
||||
{ key: 'total_play_cnt', label: '总曝光数', width: 120 },
|
||||
{ key: 'total_interact', label: '总互动', width: 100 },
|
||||
{ key: 'like_cnt', label: '点赞', width: 100 },
|
||||
{ key: 'share_cnt', label: '转发', width: 100 },
|
||||
{ key: 'comment_cnt', label: '评论', width: 100 },
|
||||
{ key: 'new_a3_rate', label: '新增A3率', width: 100 },
|
||||
{ key: 'after_view_search_uv', label: '看后搜人数', width: 120 },
|
||||
{ key: 'return_search_cnt', label: '回搜次数', width: 100 },
|
||||
{ key: 'industry_name', label: '合作行业', width: 120 },
|
||||
{ key: 'brand_name', label: '合作品牌', width: 150 },
|
||||
{ key: 'publish_time', label: '发布时间', width: 120 },
|
||||
{ key: 'estimated_video_cost', label: '预估视频价格', width: 120 },
|
||||
{ key: 'estimated_natural_cpm', label: '预估自然CPM', width: 120 },
|
||||
{ key: 'estimated_natural_search_uv', label: '预估自然看后搜人数', width: 150 },
|
||||
{ key: 'estimated_natural_search_cost', label: '预估看后搜成本', width: 150 },
|
||||
];
|
||||
|
||||
const PAGE_SIZE = 20;
|
||||
|
||||
export default function ResultTable({ data, total }: ResultTableProps) {
|
||||
const [currentPage, setCurrentPage] = useState(1);
|
||||
const [sortKey, setSortKey] = useState<string | null>(null);
|
||||
const [sortOrder, setSortOrder] = useState<'asc' | 'desc'>('desc');
|
||||
|
||||
// 排序
|
||||
const sortedData = [...data].sort((a, b) => {
|
||||
if (!sortKey) return 0;
|
||||
const aVal = a[sortKey as keyof VideoData];
|
||||
const bVal = b[sortKey as keyof VideoData];
|
||||
if (aVal === null || aVal === undefined) return 1;
|
||||
if (bVal === null || bVal === undefined) return -1;
|
||||
if (typeof aVal === 'number' && typeof bVal === 'number') {
|
||||
return sortOrder === 'asc' ? aVal - bVal : bVal - aVal;
|
||||
}
|
||||
return sortOrder === 'asc'
|
||||
? String(aVal).localeCompare(String(bVal))
|
||||
: String(bVal).localeCompare(String(aVal));
|
||||
});
|
||||
|
||||
// 分页
|
||||
const totalPages = Math.ceil(sortedData.length / PAGE_SIZE);
|
||||
const paginatedData = sortedData.slice(
|
||||
(currentPage - 1) * PAGE_SIZE,
|
||||
currentPage * PAGE_SIZE
|
||||
);
|
||||
|
||||
const handleSort = (key: string) => {
|
||||
if (sortKey === key) {
|
||||
setSortOrder(sortOrder === 'asc' ? 'desc' : 'asc');
|
||||
} else {
|
||||
setSortKey(key);
|
||||
setSortOrder('desc');
|
||||
}
|
||||
};
|
||||
|
||||
const renderCell = (row: VideoData, key: string) => {
|
||||
const value = row[key as keyof VideoData];
|
||||
|
||||
switch (key) {
|
||||
case 'video_url':
|
||||
return value ? (
|
||||
<a
|
||||
href={value as string}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-primary hover:underline"
|
||||
>
|
||||
查看
|
||||
</a>
|
||||
) : (
|
||||
'-'
|
||||
);
|
||||
case 'natural_play_cnt':
|
||||
case 'heated_play_cnt':
|
||||
case 'total_play_cnt':
|
||||
return formatLargeNumber(value as number);
|
||||
case 'total_interact':
|
||||
case 'like_cnt':
|
||||
case 'share_cnt':
|
||||
case 'comment_cnt':
|
||||
case 'after_view_search_uv':
|
||||
case 'return_search_cnt':
|
||||
return formatNumber(value as number);
|
||||
case 'new_a3_rate':
|
||||
return formatPercent(value as number);
|
||||
case 'estimated_video_cost':
|
||||
case 'estimated_natural_search_cost':
|
||||
return formatCurrency(value as number);
|
||||
case 'estimated_natural_cpm':
|
||||
case 'estimated_natural_search_uv':
|
||||
return value !== null && value !== undefined ? (value as number).toFixed(2) : '-';
|
||||
case 'publish_time':
|
||||
return formatDate(value as string);
|
||||
case 'title':
|
||||
const title = value as string;
|
||||
return title && title.length > 20 ? (
|
||||
<span title={title}>{title.slice(0, 20)}...</span>
|
||||
) : (
|
||||
title || '-'
|
||||
);
|
||||
default:
|
||||
return value !== null && value !== undefined ? String(value) : '-';
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="bg-white rounded-lg shadow-sm">
|
||||
<div className="p-4 border-b border-gray-200 flex justify-between items-center">
|
||||
<span className="text-sm text-gray-600">查询结果 (共 {total} 条)</span>
|
||||
</div>
|
||||
|
||||
<div className="overflow-x-auto">
|
||||
<table className="min-w-full divide-y divide-gray-200">
|
||||
<thead className="bg-gray-50">
|
||||
<tr>
|
||||
{columns.map((col) => (
|
||||
<th
|
||||
key={col.key}
|
||||
onClick={() => handleSort(col.key)}
|
||||
className="px-3 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider cursor-pointer hover:bg-gray-100"
|
||||
style={{ minWidth: col.width }}
|
||||
>
|
||||
<div className="flex items-center gap-1">
|
||||
{col.label}
|
||||
{sortKey === col.key && (
|
||||
<span>{sortOrder === 'asc' ? '↑' : '↓'}</span>
|
||||
)}
|
||||
</div>
|
||||
</th>
|
||||
))}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="bg-white divide-y divide-gray-200">
|
||||
{paginatedData.map((row, idx) => (
|
||||
<tr key={row.item_id} className={idx % 2 === 0 ? 'bg-white' : 'bg-gray-50'}>
|
||||
{columns.map((col) => (
|
||||
<td
|
||||
key={col.key}
|
||||
className="px-3 py-2 text-sm text-gray-900 whitespace-nowrap"
|
||||
>
|
||||
{renderCell(row, col.key)}
|
||||
</td>
|
||||
))}
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
{/* 分页 */}
|
||||
{totalPages > 1 && (
|
||||
<div className="px-4 py-3 border-t border-gray-200 flex justify-center items-center gap-2">
|
||||
<button
|
||||
onClick={() => setCurrentPage((p) => Math.max(1, p - 1))}
|
||||
disabled={currentPage === 1}
|
||||
className="px-3 py-1 text-sm border border-gray-300 rounded hover:bg-gray-50 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
上一页
|
||||
</button>
|
||||
<span className="text-sm text-gray-600">
|
||||
{currentPage} / {totalPages}
|
||||
</span>
|
||||
<button
|
||||
onClick={() => setCurrentPage((p) => Math.min(totalPages, p + 1))}
|
||||
disabled={currentPage === totalPages}
|
||||
className="px-3 py-1 text-sm border border-gray-300 rounded hover:bg-gray-50 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
下一页
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@ -1,406 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import { useState, useMemo, useCallback, memo } from 'react';
|
||||
import { Table, Input, Select, Button, Card, Space, message, Modal, Descriptions, Spin } from 'antd';
|
||||
import { SearchOutlined, EyeOutlined } from '@ant-design/icons';
|
||||
import type { ColumnsType } from 'antd/es/table';
|
||||
import { VideoAnalysisData } from '@/types';
|
||||
import { searchVideos, VideoListItem, getVideoAnalysis } from '@/lib/api';
|
||||
|
||||
// 搜索类型选项
|
||||
type SearchType = 'star_id' | 'unique_id' | 'nickname';
|
||||
|
||||
const SEARCH_TYPE_OPTIONS = [
|
||||
{ value: 'star_id' as SearchType, label: '星图ID' },
|
||||
{ value: 'unique_id' as SearchType, label: '达人unique_id' },
|
||||
{ value: 'nickname' as SearchType, label: '达人昵称' },
|
||||
];
|
||||
|
||||
const SEARCH_PLACEHOLDER: Record<SearchType, string> = {
|
||||
star_id: '请输入星图ID',
|
||||
unique_id: '请输入达人unique_id',
|
||||
nickname: '请输入达人昵称关键词',
|
||||
};
|
||||
|
||||
// 格式化数字(千分位,保留2位小数)
|
||||
function formatNumber(num: number | null | undefined): string {
|
||||
if (num === null || num === undefined) return '-';
|
||||
return num.toLocaleString('zh-CN', { minimumFractionDigits: 2, maximumFractionDigits: 2 });
|
||||
}
|
||||
|
||||
// 格式化整数(千分位)
|
||||
function formatInt(num: number | null | undefined): string {
|
||||
if (num === null || num === undefined) return '-';
|
||||
return Math.round(num).toLocaleString('zh-CN');
|
||||
}
|
||||
|
||||
// 详情弹窗组件 - 使用 memo 避免不必要的重渲染
|
||||
const DetailModal = memo(function DetailModal({
|
||||
visible,
|
||||
data,
|
||||
loading,
|
||||
onClose,
|
||||
}: {
|
||||
visible: boolean;
|
||||
data: VideoAnalysisData | null;
|
||||
loading: boolean;
|
||||
onClose: () => void;
|
||||
}) {
|
||||
return (
|
||||
<Modal
|
||||
title="视频详情"
|
||||
open={visible}
|
||||
onCancel={onClose}
|
||||
footer={null}
|
||||
width={900}
|
||||
styles={{ body: { maxHeight: '70vh', overflowY: 'auto', userSelect: 'text', WebkitUserSelect: 'text' } }}
|
||||
>
|
||||
{loading ? (
|
||||
<div style={{ textAlign: 'center', padding: 50 }}>
|
||||
<Spin size="large" />
|
||||
</div>
|
||||
) : data ? (
|
||||
<Space direction="vertical" size="middle" style={{ width: '100%', userSelect: 'text', WebkitUserSelect: 'text' } as React.CSSProperties}>
|
||||
{/* 基础信息 */}
|
||||
<Descriptions title="基础信息" bordered size="small" column={2} contentStyle={{ userSelect: 'text', cursor: 'text' }}>
|
||||
<Descriptions.Item label="达人昵称"><span style={{ userSelect: 'text' }}>{data.base_info.star_nickname || '-'}</span></Descriptions.Item>
|
||||
<Descriptions.Item label="达人unique_id"><span style={{ userSelect: 'text' }}>{data.base_info.star_unique_id || '-'}</span></Descriptions.Item>
|
||||
<Descriptions.Item label="视频ID">{data.base_info.vid || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="发布时间">{data.base_info.create_date || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="爆文类型">{data.base_info.hot_type || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="合作行业">{data.base_info.industry_id || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="合作品牌">{data.base_info.brand_name || data.base_info.brand_id || '-'}</Descriptions.Item>
|
||||
<Descriptions.Item label="视频标题" span={2}>
|
||||
{data.base_info.video_url ? (
|
||||
<a href={data.base_info.video_url} target="_blank" rel="noopener noreferrer">
|
||||
{data.base_info.title || '查看视频'}
|
||||
</a>
|
||||
) : (
|
||||
data.base_info.title || '-'
|
||||
)}
|
||||
</Descriptions.Item>
|
||||
</Descriptions>
|
||||
|
||||
{/* 触达指标 */}
|
||||
<Descriptions title="触达指标" bordered size="small" column={4} contentStyle={{ userSelect: 'text', cursor: 'text' }}>
|
||||
<Descriptions.Item label="自然曝光数">{formatInt(data.reach_metrics.natural_play_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="加热曝光数">{formatInt(data.reach_metrics.heated_play_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="总曝光数">{formatInt(data.reach_metrics.total_play_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="总互动">{formatInt(data.reach_metrics.total_interaction_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="点赞">{formatInt(data.reach_metrics.digg_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="转发">{formatInt(data.reach_metrics.share_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="评论">{formatInt(data.reach_metrics.comment_cnt)}</Descriptions.Item>
|
||||
</Descriptions>
|
||||
|
||||
{/* A3指标 */}
|
||||
<Descriptions title="A3指标" bordered size="small" column={3} contentStyle={{ userSelect: 'text', cursor: 'text' }}>
|
||||
<Descriptions.Item label="新增A3">{formatInt(data.a3_metrics.total_new_a3_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="加热新增A3">{formatInt(data.a3_metrics.heated_new_a3_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="自然新增A3">{formatInt(data.a3_metrics.natural_new_a3_cnt)}</Descriptions.Item>
|
||||
</Descriptions>
|
||||
|
||||
{/* 搜索指标 */}
|
||||
<Descriptions title="搜索指标" bordered size="small" column={3} contentStyle={{ userSelect: 'text', cursor: 'text' }}>
|
||||
<Descriptions.Item label="回搜人数">{formatInt(data.search_metrics.back_search_uv)}</Descriptions.Item>
|
||||
<Descriptions.Item label="回搜次数">{formatInt(data.search_metrics.back_search_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="看后搜人数">{formatInt(data.search_metrics.after_view_search_uv)}</Descriptions.Item>
|
||||
<Descriptions.Item label="看后搜次数">{formatInt(data.search_metrics.after_view_search_cnt)}</Descriptions.Item>
|
||||
<Descriptions.Item label="预估自然看后搜人数">{formatNumber(data.search_metrics.estimated_natural_search_uv)}</Descriptions.Item>
|
||||
</Descriptions>
|
||||
|
||||
{/* 费用指标 */}
|
||||
<Descriptions title="费用指标" bordered size="small" column={3} contentStyle={{ userSelect: 'text', cursor: 'text' }}>
|
||||
<Descriptions.Item label="预估总费用">{formatNumber(data.cost_metrics.total_cost)}</Descriptions.Item>
|
||||
<Descriptions.Item label="预估加热费用">{formatNumber(data.cost_metrics.heated_cost)}</Descriptions.Item>
|
||||
<Descriptions.Item label="预估视频采买费用">{formatNumber(data.cost_metrics.estimated_video_cost)}</Descriptions.Item>
|
||||
</Descriptions>
|
||||
|
||||
{/* 成本指标 */}
|
||||
<Descriptions title="成本指标" bordered size="small" column={3} contentStyle={{ userSelect: 'text', cursor: 'text' }}>
|
||||
<Descriptions.Item label="预估CPM">{formatNumber(data.calculated_metrics.estimated_cpm)}</Descriptions.Item>
|
||||
<Descriptions.Item label="预估自然CPM">{formatNumber(data.calculated_metrics.estimated_natural_cpm)}</Descriptions.Item>
|
||||
<Descriptions.Item label="预估CPA3">{formatNumber(data.calculated_metrics.estimated_cp_a3)}</Descriptions.Item>
|
||||
<Descriptions.Item label="预估自然CPA3">{formatNumber(data.calculated_metrics.estimated_natural_cp_a3)}</Descriptions.Item>
|
||||
<Descriptions.Item label="预估CPsearch">{formatNumber(data.calculated_metrics.estimated_cp_search)}</Descriptions.Item>
|
||||
<Descriptions.Item label="自然CPsearch">{formatNumber(data.calculated_metrics.estimated_natural_cp_search)}</Descriptions.Item>
|
||||
</Descriptions>
|
||||
</Space>
|
||||
) : null}
|
||||
</Modal>
|
||||
);
|
||||
});
|
||||
|
||||
export default function VideoAnalysis() {
|
||||
const [searchType, setSearchType] = useState<SearchType>('star_id');
|
||||
const [searchValue, setSearchValue] = useState('');
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [listData, setListData] = useState<VideoListItem[]>([]);
|
||||
|
||||
// 详情弹窗状态
|
||||
const [detailVisible, setDetailVisible] = useState(false);
|
||||
const [detailLoading, setDetailLoading] = useState(false);
|
||||
const [detailData, setDetailData] = useState<VideoAnalysisData | null>(null);
|
||||
|
||||
// 详情缓存 - 避免重复请求
|
||||
const [detailCache, setDetailCache] = useState<Record<string, VideoAnalysisData>>({});
|
||||
|
||||
// 使用 useCallback 包裹搜索处理器
|
||||
const handleSearch = useCallback(async () => {
|
||||
if (!searchValue.trim()) {
|
||||
message.warning(`请输入${SEARCH_TYPE_OPTIONS.find(o => o.value === searchType)?.label}`);
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
try {
|
||||
const response = await searchVideos({
|
||||
type: searchType,
|
||||
value: searchValue.trim(),
|
||||
});
|
||||
|
||||
if (response.success) {
|
||||
setListData(response.data as VideoListItem[]);
|
||||
if ((response.data as VideoListItem[]).length === 0) {
|
||||
message.info('未找到相关视频');
|
||||
}
|
||||
} else {
|
||||
message.error(response.error || '搜索失败');
|
||||
}
|
||||
} catch (err) {
|
||||
message.error(err instanceof Error ? err.message : '搜索失败');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, [searchType, searchValue]);
|
||||
|
||||
// 使用 useCallback 包裹详情查看处理器,带缓存逻辑
|
||||
const handleViewDetail = useCallback(async (itemId: string) => {
|
||||
// 检查缓存
|
||||
if (detailCache[itemId]) {
|
||||
setDetailData(detailCache[itemId]);
|
||||
setDetailVisible(true);
|
||||
return;
|
||||
}
|
||||
|
||||
setDetailVisible(true);
|
||||
setDetailLoading(true);
|
||||
setDetailData(null);
|
||||
|
||||
try {
|
||||
const response = await getVideoAnalysis(itemId);
|
||||
if (response.success) {
|
||||
setDetailData(response.data);
|
||||
// 缓存结果
|
||||
setDetailCache(prev => ({ ...prev, [itemId]: response.data }));
|
||||
} else {
|
||||
message.error(response.error || '获取详情失败');
|
||||
setDetailVisible(false);
|
||||
}
|
||||
} catch (err) {
|
||||
message.error(err instanceof Error ? err.message : '获取详情失败');
|
||||
setDetailVisible(false);
|
||||
} finally {
|
||||
setDetailLoading(false);
|
||||
}
|
||||
}, [detailCache]);
|
||||
|
||||
// 关闭详情弹窗的回调
|
||||
const handleCloseDetail = useCallback(() => {
|
||||
setDetailVisible(false);
|
||||
}, []);
|
||||
|
||||
// 使用 useMemo 包裹表格列定义,避免每次重建
|
||||
const columns: ColumnsType<VideoListItem> = useMemo(() => [
|
||||
{
|
||||
title: '达人昵称',
|
||||
dataIndex: 'star_nickname',
|
||||
key: 'star_nickname',
|
||||
width: 120,
|
||||
fixed: 'left',
|
||||
render: (text) => text || '-',
|
||||
},
|
||||
{
|
||||
title: '视频标题',
|
||||
dataIndex: 'title',
|
||||
key: 'title',
|
||||
width: 200,
|
||||
ellipsis: true,
|
||||
render: (text, record) =>
|
||||
record.video_url ? (
|
||||
<a href={record.video_url} target="_blank" rel="noopener noreferrer">
|
||||
{text || '查看视频'}
|
||||
</a>
|
||||
) : (
|
||||
text || '-'
|
||||
),
|
||||
},
|
||||
{
|
||||
title: '发布时间',
|
||||
dataIndex: 'create_date',
|
||||
key: 'create_date',
|
||||
width: 110,
|
||||
render: (text) => (text ? text.split('T')[0] : '-'),
|
||||
},
|
||||
{
|
||||
title: '爆文类型',
|
||||
dataIndex: 'hot_type',
|
||||
key: 'hot_type',
|
||||
width: 90,
|
||||
render: (text) => text || '-',
|
||||
},
|
||||
{
|
||||
title: '合作行业',
|
||||
dataIndex: 'industry_id',
|
||||
key: 'industry_id',
|
||||
width: 90,
|
||||
render: (text) => text || '-',
|
||||
},
|
||||
{
|
||||
title: '合作品牌',
|
||||
dataIndex: 'brand_name',
|
||||
key: 'brand_name',
|
||||
width: 100,
|
||||
render: (text) => text || '-',
|
||||
},
|
||||
{
|
||||
title: '新增A3',
|
||||
dataIndex: 'total_new_a3_cnt',
|
||||
key: 'total_new_a3_cnt',
|
||||
width: 90,
|
||||
align: 'right',
|
||||
render: (val) => formatInt(val),
|
||||
},
|
||||
{
|
||||
title: '加热A3',
|
||||
dataIndex: 'heated_new_a3_cnt',
|
||||
key: 'heated_new_a3_cnt',
|
||||
width: 90,
|
||||
align: 'right',
|
||||
render: (val) => formatInt(val),
|
||||
},
|
||||
{
|
||||
title: '自然A3',
|
||||
dataIndex: 'natural_new_a3_cnt',
|
||||
key: 'natural_new_a3_cnt',
|
||||
width: 90,
|
||||
align: 'right',
|
||||
render: (val) => formatInt(val),
|
||||
},
|
||||
{
|
||||
title: '预估自然CPM',
|
||||
dataIndex: 'estimated_natural_cpm',
|
||||
key: 'estimated_natural_cpm',
|
||||
width: 110,
|
||||
align: 'right',
|
||||
render: (val) => formatNumber(val),
|
||||
},
|
||||
{
|
||||
title: '预估CPA3',
|
||||
dataIndex: 'estimated_cp_a3',
|
||||
key: 'estimated_cp_a3',
|
||||
width: 100,
|
||||
align: 'right',
|
||||
render: (val) => formatNumber(val),
|
||||
},
|
||||
{
|
||||
title: '预估自然CPA3',
|
||||
dataIndex: 'estimated_natural_cp_a3',
|
||||
key: 'estimated_natural_cp_a3',
|
||||
width: 110,
|
||||
align: 'right',
|
||||
render: (val) => formatNumber(val),
|
||||
},
|
||||
{
|
||||
title: '预估CPsearch',
|
||||
dataIndex: 'estimated_cp_search',
|
||||
key: 'estimated_cp_search',
|
||||
width: 110,
|
||||
align: 'right',
|
||||
render: (val) => formatNumber(val),
|
||||
},
|
||||
{
|
||||
title: '自然CPsearch',
|
||||
dataIndex: 'estimated_natural_cp_search',
|
||||
key: 'estimated_natural_cp_search',
|
||||
width: 110,
|
||||
align: 'right',
|
||||
render: (val) => formatNumber(val),
|
||||
},
|
||||
{
|
||||
title: '操作',
|
||||
key: 'action',
|
||||
width: 80,
|
||||
fixed: 'right',
|
||||
render: (_, record) => (
|
||||
<Button
|
||||
type="link"
|
||||
icon={<EyeOutlined />}
|
||||
onClick={() => handleViewDetail(record.item_id)}
|
||||
>
|
||||
详情
|
||||
</Button>
|
||||
),
|
||||
},
|
||||
], [handleViewDetail]);
|
||||
|
||||
return (
|
||||
<div style={{ padding: 24, maxWidth: 1600, margin: '0 auto' }}>
|
||||
{/* 搜索区域 */}
|
||||
<Card style={{ marginBottom: 24 }}>
|
||||
<h1 style={{ fontSize: 20, fontWeight: 'bold', marginBottom: 16 }}>KOL 视频分析</h1>
|
||||
<Space.Compact style={{ width: '100%', maxWidth: 600 }}>
|
||||
<Select
|
||||
value={searchType}
|
||||
onChange={(val) => {
|
||||
setSearchType(val);
|
||||
setSearchValue('');
|
||||
}}
|
||||
style={{ width: 140 }}
|
||||
options={SEARCH_TYPE_OPTIONS}
|
||||
/>
|
||||
<Input
|
||||
value={searchValue}
|
||||
onChange={(e) => setSearchValue(e.target.value)}
|
||||
placeholder={SEARCH_PLACEHOLDER[searchType]}
|
||||
onPressEnter={handleSearch}
|
||||
style={{ flex: 1 }}
|
||||
/>
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<SearchOutlined />}
|
||||
onClick={handleSearch}
|
||||
loading={loading}
|
||||
>
|
||||
查询
|
||||
</Button>
|
||||
</Space.Compact>
|
||||
</Card>
|
||||
|
||||
{/* 结果表格 - 启用虚拟滚动 */}
|
||||
<Card>
|
||||
<Table
|
||||
columns={columns}
|
||||
dataSource={listData}
|
||||
rowKey="item_id"
|
||||
loading={loading}
|
||||
virtual
|
||||
scroll={{ x: 1800, y: 600 }}
|
||||
pagination={{
|
||||
showSizeChanger: true,
|
||||
showQuickJumper: true,
|
||||
showTotal: (total) => `共 ${total} 条`,
|
||||
}}
|
||||
locale={{ emptyText: '暂无数据,请输入搜索条件' }}
|
||||
/>
|
||||
</Card>
|
||||
|
||||
{/* 详情弹窗 */}
|
||||
<DetailModal
|
||||
visible={detailVisible}
|
||||
data={detailData}
|
||||
loading={detailLoading}
|
||||
onClose={handleCloseDetail}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@ -1,6 +1,2 @@
|
||||
export { default as Header } from './Header';
|
||||
export { default as Footer } from './Footer';
|
||||
export { default as QueryForm } from './QueryForm';
|
||||
export { default as ResultTable } from './ResultTable';
|
||||
export { default as ExportButton } from './ExportButton';
|
||||
export { default as AntdProvider } from './AntdProvider';
|
||||
|
||||
@ -1,99 +0,0 @@
|
||||
import { QueryRequest, QueryResponse, VideoAnalysisResponse, VideoAnalysisData } from '@/types';
|
||||
|
||||
const API_BASE_URL = process.env.NEXT_PUBLIC_API_URL || 'http://localhost:8000/api/v1';
|
||||
|
||||
export async function queryVideos(request: QueryRequest): Promise<QueryResponse> {
|
||||
const response = await fetch(`${API_BASE_URL}/query`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(request),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`查询失败: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
export async function exportData(format: 'xlsx' | 'csv'): Promise<Blob> {
|
||||
const response = await fetch(`${API_BASE_URL}/export?format=${format}`);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`导出失败: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.blob();
|
||||
}
|
||||
|
||||
// 获取视频分析数据 (T-026)
|
||||
export async function getVideoAnalysis(itemId: string): Promise<VideoAnalysisResponse> {
|
||||
const response = await fetch(`${API_BASE_URL}/videos/${itemId}/analysis`);
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
throw new Error('视频不存在');
|
||||
}
|
||||
throw new Error(`获取分析数据失败: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
// 搜索视频
|
||||
export interface SearchRequest {
|
||||
type: 'star_id' | 'unique_id' | 'nickname';
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface VideoListItem {
|
||||
item_id: string;
|
||||
// 基础信息(标1)
|
||||
star_nickname: string;
|
||||
title: string;
|
||||
video_url: string;
|
||||
create_date: string | null;
|
||||
hot_type: string;
|
||||
industry_id: string;
|
||||
brand_id: string;
|
||||
brand_name: string;
|
||||
// A3指标(标1)
|
||||
total_new_a3_cnt: number;
|
||||
heated_new_a3_cnt: number;
|
||||
natural_new_a3_cnt: number;
|
||||
// 成本指标(标1)
|
||||
estimated_natural_cpm: number | null;
|
||||
estimated_cp_a3: number | null;
|
||||
estimated_natural_cp_a3: number | null;
|
||||
estimated_cp_search: number | null;
|
||||
estimated_natural_cp_search: number | null;
|
||||
}
|
||||
|
||||
export interface SearchResponse {
|
||||
success: boolean;
|
||||
type: 'detail' | 'list';
|
||||
data: VideoAnalysisData | VideoListItem[];
|
||||
total: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export async function searchVideos(request: SearchRequest): Promise<SearchResponse> {
|
||||
const response = await fetch(`${API_BASE_URL}/videos/search`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(request),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
throw new Error('未找到相关视频');
|
||||
}
|
||||
throw new Error(`搜索失败: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
@ -1,70 +0,0 @@
|
||||
/**
|
||||
* 格式化数字为千分位分隔
|
||||
*/
|
||||
export function formatNumber(num: number | null | undefined): string {
|
||||
if (num === null || num === undefined) {
|
||||
return '-';
|
||||
}
|
||||
return num.toLocaleString('zh-CN');
|
||||
}
|
||||
|
||||
/**
|
||||
* 格式化大数值 (K/M 缩写)
|
||||
*/
|
||||
export function formatLargeNumber(num: number | null | undefined): string {
|
||||
if (num === null || num === undefined) {
|
||||
return '-';
|
||||
}
|
||||
if (num >= 1000000) {
|
||||
return `${(num / 1000000).toFixed(1)}M`;
|
||||
}
|
||||
if (num >= 1000) {
|
||||
return `${(num / 1000).toFixed(1)}K`;
|
||||
}
|
||||
return num.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* 格式化百分比
|
||||
*/
|
||||
export function formatPercent(num: number | null | undefined): string {
|
||||
if (num === null || num === undefined) {
|
||||
return '-';
|
||||
}
|
||||
return `${(num * 100).toFixed(2)}%`;
|
||||
}
|
||||
|
||||
/**
|
||||
* 格式化金额
|
||||
*/
|
||||
export function formatCurrency(num: number | null | undefined): string {
|
||||
if (num === null || num === undefined) {
|
||||
return '-';
|
||||
}
|
||||
return `¥${num.toLocaleString('zh-CN', { minimumFractionDigits: 2, maximumFractionDigits: 2 })}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* 格式化日期
|
||||
*/
|
||||
export function formatDate(dateStr: string | null | undefined): string {
|
||||
if (!dateStr) {
|
||||
return '-';
|
||||
}
|
||||
const date = new Date(dateStr);
|
||||
return date.toLocaleDateString('zh-CN', {
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 解析输入文本为数组 (按换行分隔)
|
||||
*/
|
||||
export function parseInputToArray(input: string): string[] {
|
||||
return input
|
||||
.split('\n')
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0);
|
||||
}
|
||||
@ -1,125 +0,0 @@
|
||||
// 查询类型
|
||||
export type QueryType = 'star_id' | 'unique_id' | 'nickname';
|
||||
|
||||
// 查询请求
|
||||
export interface QueryRequest {
|
||||
type: QueryType;
|
||||
values: string[];
|
||||
}
|
||||
|
||||
// 视频数据
|
||||
export interface VideoData {
|
||||
item_id: string;
|
||||
title: string | null;
|
||||
viral_type: string | null;
|
||||
video_url: string | null;
|
||||
star_id: string;
|
||||
star_unique_id: string;
|
||||
star_nickname: string;
|
||||
publish_time: string | null;
|
||||
natural_play_cnt: number;
|
||||
heated_play_cnt: number;
|
||||
total_play_cnt: number;
|
||||
total_interact: number;
|
||||
like_cnt: number;
|
||||
share_cnt: number;
|
||||
comment_cnt: number;
|
||||
new_a3_rate: number | null;
|
||||
after_view_search_uv: number;
|
||||
return_search_cnt: number;
|
||||
industry_id: string | null;
|
||||
industry_name: string | null;
|
||||
brand_id: string | null;
|
||||
brand_name: string | null;
|
||||
estimated_video_cost: number;
|
||||
estimated_natural_cpm: number | null;
|
||||
estimated_natural_search_uv: number | null;
|
||||
estimated_natural_search_cost: number | null;
|
||||
}
|
||||
|
||||
// 查询响应
|
||||
export interface QueryResponse {
|
||||
success: boolean;
|
||||
data: VideoData[];
|
||||
total: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
// 页面状态
|
||||
export type PageState = 'default' | 'input' | 'loading' | 'result' | 'empty' | 'error';
|
||||
|
||||
// 查询方式选项
|
||||
export const QUERY_TYPE_OPTIONS = [
|
||||
{ value: 'star_id' as QueryType, label: '星图ID' },
|
||||
{ value: 'unique_id' as QueryType, label: '达人unique_id' },
|
||||
{ value: 'nickname' as QueryType, label: '达人昵称' },
|
||||
];
|
||||
|
||||
// 查询方式对应的提示文本
|
||||
export const QUERY_PLACEHOLDER: Record<QueryType, string> = {
|
||||
star_id: '请输入星图ID,每行一个...',
|
||||
unique_id: '请输入达人unique_id,每行一个...',
|
||||
nickname: '请输入达人昵称关键词...',
|
||||
};
|
||||
|
||||
// 视频分析数据 (按用户字段对照表定义6大类)
|
||||
export interface VideoAnalysisData {
|
||||
// 基础信息
|
||||
base_info: {
|
||||
star_nickname: string;
|
||||
star_unique_id: string;
|
||||
vid: string;
|
||||
title: string;
|
||||
create_date: string | null;
|
||||
hot_type: string;
|
||||
industry_id: string;
|
||||
brand_id: string;
|
||||
brand_name: string;
|
||||
video_url: string;
|
||||
};
|
||||
// 触达指标
|
||||
reach_metrics: {
|
||||
natural_play_cnt: number;
|
||||
heated_play_cnt: number;
|
||||
total_play_cnt: number;
|
||||
total_interaction_cnt: number;
|
||||
digg_cnt: number;
|
||||
share_cnt: number;
|
||||
comment_cnt: number;
|
||||
};
|
||||
// A3指标
|
||||
a3_metrics: {
|
||||
total_new_a3_cnt: number;
|
||||
heated_new_a3_cnt: number;
|
||||
natural_new_a3_cnt: number;
|
||||
};
|
||||
// 搜索指标
|
||||
search_metrics: {
|
||||
back_search_uv: number;
|
||||
back_search_cnt: number;
|
||||
after_view_search_uv: number;
|
||||
after_view_search_cnt: number;
|
||||
estimated_natural_search_uv: number | null;
|
||||
};
|
||||
// 费用指标
|
||||
cost_metrics: {
|
||||
total_cost: number;
|
||||
heated_cost: number;
|
||||
estimated_video_cost: number;
|
||||
};
|
||||
// 成本指标(实时计算)
|
||||
calculated_metrics: {
|
||||
estimated_cpm: number | null;
|
||||
estimated_natural_cpm: number | null;
|
||||
estimated_cp_a3: number | null;
|
||||
estimated_natural_cp_a3: number | null;
|
||||
estimated_cp_search: number | null;
|
||||
estimated_natural_cp_search: number | null;
|
||||
};
|
||||
}
|
||||
|
||||
export interface VideoAnalysisResponse {
|
||||
success: boolean;
|
||||
data: VideoAnalysisData;
|
||||
error?: string;
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user