89 lines
2.8 KiB
JavaScript
89 lines
2.8 KiB
JavaScript
const http = require('http')
|
||
const path = require('path')
|
||
const fs = require('fs')
|
||
const express = require('express')
|
||
const cors = require('cors')
|
||
const { WebSocketServer } = require('ws')
|
||
const db = require('./db')
|
||
const routes = require('./routes')
|
||
const { getSituation } = require('./situationData')
|
||
|
||
const app = express()
|
||
const PORT = process.env.API_PORT || 3001
|
||
|
||
// 爬虫通知用的共享密钥:API_CRAWLER_TOKEN(仅在服务端与爬虫进程间传递)
|
||
const CRAWLER_TOKEN = process.env.API_CRAWLER_TOKEN || ''
|
||
|
||
app.set('trust proxy', 1)
|
||
app.use(cors())
|
||
app.use(express.json())
|
||
|
||
app.use('/api', routes)
|
||
app.get('/api/health', (_, res) => res.json({ ok: true }))
|
||
app.post('/api/crawler/notify', (req, res) => {
|
||
// 若配置了 API_CRAWLER_TOKEN,则要求爬虫携带 X-Crawler-Token 头
|
||
if (CRAWLER_TOKEN) {
|
||
const token = req.headers['x-crawler-token']
|
||
if (typeof token !== 'string' || token !== CRAWLER_TOKEN) {
|
||
return res.status(401).json({ error: 'unauthorized' })
|
||
}
|
||
}
|
||
notifyCrawlerUpdate()
|
||
res.json({ ok: true })
|
||
})
|
||
|
||
// 生产环境:提供前端静态文件
|
||
const distPath = path.join(__dirname, '..', 'dist')
|
||
if (fs.existsSync(distPath)) {
|
||
app.use(express.static(distPath))
|
||
app.get('*', (req, res, next) => {
|
||
if (!req.path.startsWith('/api') && req.path !== '/ws') {
|
||
res.sendFile(path.join(distPath, 'index.html'))
|
||
} else next()
|
||
})
|
||
}
|
||
|
||
const server = http.createServer(app)
|
||
|
||
const { getStats } = require('./stats')
|
||
|
||
const wss = new WebSocketServer({ server, path: '/ws' })
|
||
wss.on('connection', (ws) => {
|
||
ws.send(JSON.stringify({ type: 'situation', data: getSituation(), stats: getStats() }))
|
||
})
|
||
|
||
function broadcastSituation() {
|
||
try {
|
||
const data = JSON.stringify({ type: 'situation', data: getSituation(), stats: getStats() })
|
||
wss.clients.forEach((c) => {
|
||
if (c.readyState === 1) c.send(data)
|
||
})
|
||
} catch (_) {}
|
||
}
|
||
app.set('broadcastSituation', broadcastSituation)
|
||
setInterval(broadcastSituation, 3000)
|
||
|
||
// 供爬虫调用:先从磁盘重载 DB(纳入爬虫写入),再更新 updated_at 并立即广播
|
||
function notifyCrawlerUpdate() {
|
||
try {
|
||
const db = require('./db')
|
||
db.reloadFromFile()
|
||
db.prepare("INSERT OR REPLACE INTO situation (id, data, updated_at) VALUES (1, '{}', ?)").run(new Date().toISOString())
|
||
broadcastSituation()
|
||
const n = db.prepare('SELECT COUNT(*) as c FROM situation_update').get().c
|
||
console.log('[crawler/notify] DB 已重载并广播,situation_update 条数:', n)
|
||
} catch (e) {
|
||
console.error('[crawler/notify]', e?.message || e)
|
||
}
|
||
}
|
||
|
||
db.initDb().then(() => {
|
||
server.listen(PORT, () => {
|
||
console.log(`API + WebSocket running at http://localhost:${PORT}`)
|
||
console.log(`Swagger docs at http://localhost:${PORT}/api-docs`)
|
||
})
|
||
}).catch((err) => {
|
||
console.error('DB init failed:', err)
|
||
process.exit(1)
|
||
})
|