This commit is contained in:
Daniel
2026-03-04 16:48:17 +08:00
parent 64f4c438c3
commit 26938449f0
34 changed files with 956 additions and 500 deletions

39
scripts/check-db-and-crawler.sh Executable file
View File

@@ -0,0 +1,39 @@
#!/usr/bin/env bash
# 查看数据库中的 lastUpdated 与条数,并提示如何用爬虫更新
# 用法: ./scripts/check-db-and-crawler.sh
PROJECT_ROOT="${PROJECT_ROOT:-$(cd "$(dirname "$0")/.." && pwd)}"
DB_PATH="${DB_PATH:-$PROJECT_ROOT/server/data.db}"
echo "=========================================="
echo "数据库与爬虫状态"
echo "DB: $DB_PATH"
echo "=========================================="
if [[ ! -f "$DB_PATH" ]]; then
echo "数据库文件不存在。请先执行: node server/seed.js"
exit 1
fi
if command -v sqlite3 &>/dev/null; then
UPDATED_AT=$(sqlite3 "$DB_PATH" "SELECT updated_at FROM situation WHERE id = 1;" 2>/dev/null || echo "?")
SU_COUNT=$(sqlite3 "$DB_PATH" "SELECT COUNT(*) FROM situation_update;" 2>/dev/null || echo "?")
NEWS_COUNT=$(sqlite3 "$DB_PATH" "SELECT COUNT(*) FROM news_content;" 2>/dev/null || echo "?")
echo "situation.updated_at (前端 lastUpdated): $UPDATED_AT"
echo "situation_update 条数: $SU_COUNT"
echo "news_content 条数: $NEWS_COUNT"
else
echo "未安装 sqlite3无法直接查库。可安装: brew install sqlite3"
fi
echo ""
echo "--- 为何数据停在旧日期? ---"
echo " • lastUpdated 来自 situation.updated_at。"
echo " • 已改为:每次爬虫运行都会更新该时间(不再仅在有新资讯时更新)。"
echo " • 若从未跑爬虫或很久没跑,请执行一次爬虫:"
echo ""
echo " cd $PROJECT_ROOT && python crawler/run_once.py"
echo " 或: npm run crawler:once"
echo ""
echo " 若需定时更新,可启动常驻爬虫: python crawler/main.py"
echo "=========================================="