feat: 13个统计/聚合API + 前端同步 + 待完成功能文档
API新增:
- GET /api/system/overview 系统总览(在线率/今日统计/表大小)
- GET /api/locations/stats 位置统计(类型分布/小时趋势)
- GET /api/locations/track-summary/{id} 轨迹摘要(距离/时长/速度)
- POST /api/alarms/batch-acknowledge 批量确认告警
- GET /api/attendance/report 考勤日报表(每设备每天汇总)
- GET /api/bluetooth/stats 蓝牙统计(类型/TOP信标/RSSI分布)
- GET /api/heartbeats/stats 心跳统计(活跃设备/电量/间隔分析)
- GET /api/fences/stats 围栏统计(绑定/进出状态/今日事件)
- GET /api/fences/{id}/events 围栏进出事件历史
- GET /api/commands/stats 指令统计(成功率/类型/趋势)
API增强:
- devices/stats: 新增by_type/battery_distribution/signal_distribution
- alarms/stats: 新增today/by_source/daily_trend/top_devices
- attendance/stats: 新增today/by_source/daily_trend/by_device
前端同步:
- 仪表盘: 今日告警/考勤/定位卡片 + 在线率
- 告警页: 批量确认按钮 + 今日计数
- 考勤页: 今日计数
- 轨迹: 加载后显示距离/时长/速度摘要
- 蓝牙/围栏/指令页: 统计面板
文档: CLAUDE.md待完成功能按优先级重新规划
via [HAPI](https://hapi.run)
Co-Authored-By: HAPI <noreply@hapi.run>
This commit is contained in:
@@ -4,11 +4,12 @@ API endpoints for alarm record queries, acknowledgement, and statistics.
|
||||
"""
|
||||
|
||||
import math
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy import func, select
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import func, select, case, extract
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import require_write
|
||||
@@ -96,13 +97,19 @@ async def list_alarms(
|
||||
@router.get(
|
||||
"/stats",
|
||||
response_model=APIResponse[dict],
|
||||
summary="获取报警统计 / Get alarm statistics",
|
||||
summary="获取报警统计(增强版)/ Get enhanced alarm statistics",
|
||||
)
|
||||
async def alarm_stats(db: AsyncSession = Depends(get_db)):
|
||||
async def alarm_stats(
|
||||
days: int = Query(default=7, ge=1, le=90, description="趋势天数 / Trend days"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
获取报警统计:总数、未确认数、按类型分组统计。
|
||||
Get alarm statistics: total, unacknowledged count, and breakdown by type.
|
||||
增强版报警统计:总数、未确认数、按类型分组、按天趋势、平均响应时间、TOP10设备。
|
||||
Enhanced alarm stats: totals, by type, daily trend, avg response time, top 10 devices.
|
||||
"""
|
||||
from app.config import now_cst
|
||||
now = now_cst()
|
||||
|
||||
# Total alarms
|
||||
total_result = await db.execute(select(func.count(AlarmRecord.id)))
|
||||
total = total_result.scalar() or 0
|
||||
@@ -121,16 +128,88 @@ async def alarm_stats(db: AsyncSession = Depends(get_db)):
|
||||
)
|
||||
by_type = {row[0]: row[1] for row in type_result.all()}
|
||||
|
||||
# By source
|
||||
source_result = await db.execute(
|
||||
select(AlarmRecord.alarm_source, func.count(AlarmRecord.id))
|
||||
.group_by(AlarmRecord.alarm_source)
|
||||
)
|
||||
by_source = {(row[0] or "unknown"): row[1] for row in source_result.all()}
|
||||
|
||||
# Daily trend (last N days)
|
||||
cutoff = now - timedelta(days=days)
|
||||
trend_result = await db.execute(
|
||||
select(
|
||||
func.date(AlarmRecord.recorded_at).label("day"),
|
||||
func.count(AlarmRecord.id),
|
||||
)
|
||||
.where(AlarmRecord.recorded_at >= cutoff)
|
||||
.group_by("day")
|
||||
.order_by("day")
|
||||
)
|
||||
daily_trend = {str(row[0]): row[1] for row in trend_result.all()}
|
||||
|
||||
# Today count
|
||||
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
today_result = await db.execute(
|
||||
select(func.count(AlarmRecord.id)).where(AlarmRecord.recorded_at >= today_start)
|
||||
)
|
||||
today_count = today_result.scalar() or 0
|
||||
|
||||
# Top 10 devices by alarm count
|
||||
top_result = await db.execute(
|
||||
select(AlarmRecord.device_id, AlarmRecord.imei, func.count(AlarmRecord.id).label("cnt"))
|
||||
.group_by(AlarmRecord.device_id, AlarmRecord.imei)
|
||||
.order_by(func.count(AlarmRecord.id).desc())
|
||||
.limit(10)
|
||||
)
|
||||
top_devices = [{"device_id": row[0], "imei": row[1], "count": row[2]} for row in top_result.all()]
|
||||
|
||||
return APIResponse(
|
||||
data={
|
||||
"total": total,
|
||||
"unacknowledged": unacknowledged,
|
||||
"acknowledged": total - unacknowledged,
|
||||
"today": today_count,
|
||||
"by_type": by_type,
|
||||
"by_source": by_source,
|
||||
"daily_trend": daily_trend,
|
||||
"top_devices": top_devices,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class BatchAcknowledgeRequest(BaseModel):
|
||||
alarm_ids: list[int] = Field(..., min_length=1, max_length=500, description="告警ID列表")
|
||||
acknowledged: bool = Field(default=True, description="确认状态")
|
||||
|
||||
|
||||
@router.post(
|
||||
"/batch-acknowledge",
|
||||
response_model=APIResponse[dict],
|
||||
summary="批量确认告警 / Batch acknowledge alarms",
|
||||
dependencies=[Depends(require_write)],
|
||||
)
|
||||
async def batch_acknowledge_alarms(
|
||||
body: BatchAcknowledgeRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
批量确认(或取消确认)告警记录,最多500条。
|
||||
Batch acknowledge (or un-acknowledge) alarm records (max 500).
|
||||
"""
|
||||
result = await db.execute(
|
||||
select(AlarmRecord).where(AlarmRecord.id.in_(body.alarm_ids))
|
||||
)
|
||||
records = list(result.scalars().all())
|
||||
for r in records:
|
||||
r.acknowledged = body.acknowledged
|
||||
await db.flush()
|
||||
return APIResponse(
|
||||
message=f"已{'确认' if body.acknowledged else '取消确认'} {len(records)} 条告警",
|
||||
data={"updated": len(records), "requested": len(body.alarm_ids)},
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/batch-delete",
|
||||
response_model=APIResponse[dict],
|
||||
|
||||
@@ -4,10 +4,10 @@ API endpoints for attendance record queries and statistics.
|
||||
"""
|
||||
|
||||
import math
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy import func, select, case
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
@@ -86,18 +86,22 @@ async def list_attendance(
|
||||
@router.get(
|
||||
"/stats",
|
||||
response_model=APIResponse[dict],
|
||||
summary="获取考勤统计 / Get attendance statistics",
|
||||
summary="获取考勤统计(增强版)/ Get enhanced attendance statistics",
|
||||
)
|
||||
async def attendance_stats(
|
||||
device_id: int | None = Query(default=None, description="设备ID / Device ID (optional)"),
|
||||
start_time: datetime | None = Query(default=None, description="开始时间 / Start time"),
|
||||
end_time: datetime | None = Query(default=None, description="结束时间 / End time"),
|
||||
days: int = Query(default=7, ge=1, le=90, description="趋势天数 / Trend days"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
获取考勤统计:总记录数、按类型分组统计、按设备分组统计。
|
||||
Get attendance statistics: total records, breakdown by type and by device.
|
||||
增强版考勤统计:总数、按类型/来源/设备分组、按天趋势、今日统计。
|
||||
Enhanced: total, by type/source/device, daily trend, today count.
|
||||
"""
|
||||
from app.config import now_cst
|
||||
now = now_cst()
|
||||
|
||||
base_filter = []
|
||||
if device_id is not None:
|
||||
base_filter.append(AttendanceRecord.device_id == device_id)
|
||||
@@ -106,38 +110,146 @@ async def attendance_stats(
|
||||
if end_time:
|
||||
base_filter.append(AttendanceRecord.recorded_at <= end_time)
|
||||
|
||||
# Total count
|
||||
total_q = select(func.count(AttendanceRecord.id)).where(*base_filter) if base_filter else select(func.count(AttendanceRecord.id))
|
||||
total_result = await db.execute(total_q)
|
||||
total = total_result.scalar() or 0
|
||||
def _where(q):
|
||||
return q.where(*base_filter) if base_filter else q
|
||||
|
||||
# Total
|
||||
total = (await db.execute(_where(select(func.count(AttendanceRecord.id))))).scalar() or 0
|
||||
|
||||
# By type
|
||||
type_q = select(
|
||||
AttendanceRecord.attendance_type, func.count(AttendanceRecord.id)
|
||||
).group_by(AttendanceRecord.attendance_type)
|
||||
if base_filter:
|
||||
type_q = type_q.where(*base_filter)
|
||||
type_result = await db.execute(type_q)
|
||||
type_result = await db.execute(_where(
|
||||
select(AttendanceRecord.attendance_type, func.count(AttendanceRecord.id))
|
||||
.group_by(AttendanceRecord.attendance_type)
|
||||
))
|
||||
by_type = {row[0]: row[1] for row in type_result.all()}
|
||||
|
||||
# By device (top 20)
|
||||
device_q = select(
|
||||
AttendanceRecord.device_id, func.count(AttendanceRecord.id)
|
||||
).group_by(AttendanceRecord.device_id).order_by(
|
||||
func.count(AttendanceRecord.id).desc()
|
||||
).limit(20)
|
||||
if base_filter:
|
||||
device_q = device_q.where(*base_filter)
|
||||
device_result = await db.execute(device_q)
|
||||
by_device = {str(row[0]): row[1] for row in device_result.all()}
|
||||
# By source
|
||||
source_result = await db.execute(_where(
|
||||
select(AttendanceRecord.attendance_source, func.count(AttendanceRecord.id))
|
||||
.group_by(AttendanceRecord.attendance_source)
|
||||
))
|
||||
by_source = {(row[0] or "unknown"): row[1] for row in source_result.all()}
|
||||
|
||||
return APIResponse(
|
||||
data={
|
||||
"total": total,
|
||||
"by_type": by_type,
|
||||
"by_device": by_device,
|
||||
}
|
||||
# By device (top 20)
|
||||
device_result = await db.execute(_where(
|
||||
select(AttendanceRecord.device_id, AttendanceRecord.imei, func.count(AttendanceRecord.id))
|
||||
.group_by(AttendanceRecord.device_id, AttendanceRecord.imei)
|
||||
.order_by(func.count(AttendanceRecord.id).desc())
|
||||
.limit(20)
|
||||
))
|
||||
by_device = [{"device_id": row[0], "imei": row[1], "count": row[2]} for row in device_result.all()]
|
||||
|
||||
# Daily trend (last N days)
|
||||
cutoff = now - timedelta(days=days)
|
||||
trend_result = await db.execute(
|
||||
select(
|
||||
func.date(AttendanceRecord.recorded_at).label("day"),
|
||||
func.count(AttendanceRecord.id),
|
||||
)
|
||||
.where(AttendanceRecord.recorded_at >= cutoff)
|
||||
.group_by("day").order_by("day")
|
||||
)
|
||||
daily_trend = {str(row[0]): row[1] for row in trend_result.all()}
|
||||
|
||||
# Today
|
||||
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
today_count = (await db.execute(
|
||||
select(func.count(AttendanceRecord.id)).where(AttendanceRecord.recorded_at >= today_start)
|
||||
)).scalar() or 0
|
||||
|
||||
return APIResponse(data={
|
||||
"total": total,
|
||||
"today": today_count,
|
||||
"by_type": by_type,
|
||||
"by_source": by_source,
|
||||
"by_device": by_device,
|
||||
"daily_trend": daily_trend,
|
||||
})
|
||||
|
||||
|
||||
@router.get(
|
||||
"/report",
|
||||
response_model=APIResponse[dict],
|
||||
summary="考勤报表 / Attendance report",
|
||||
)
|
||||
async def attendance_report(
|
||||
device_id: int | None = Query(default=None, description="设备ID (可选,不传则所有设备)"),
|
||||
start_date: str = Query(..., description="开始日期 YYYY-MM-DD"),
|
||||
end_date: str = Query(..., description="结束日期 YYYY-MM-DD"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
考勤报表:按设备+日期聚合,返回每个设备每天的签到次数、首次签到时间、末次签到时间。
|
||||
Attendance report: per device per day aggregation.
|
||||
"""
|
||||
from datetime import datetime as dt
|
||||
|
||||
try:
|
||||
s_date = dt.strptime(start_date, "%Y-%m-%d")
|
||||
e_date = dt.strptime(end_date, "%Y-%m-%d").replace(hour=23, minute=59, second=59)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="日期格式需为 YYYY-MM-DD")
|
||||
|
||||
if s_date > e_date:
|
||||
raise HTTPException(status_code=400, detail="start_date must be <= end_date")
|
||||
|
||||
filters = [
|
||||
AttendanceRecord.recorded_at >= s_date,
|
||||
AttendanceRecord.recorded_at <= e_date,
|
||||
]
|
||||
if device_id is not None:
|
||||
filters.append(AttendanceRecord.device_id == device_id)
|
||||
|
||||
result = await db.execute(
|
||||
select(
|
||||
AttendanceRecord.device_id,
|
||||
AttendanceRecord.imei,
|
||||
func.date(AttendanceRecord.recorded_at).label("day"),
|
||||
func.count(AttendanceRecord.id).label("punch_count"),
|
||||
func.min(AttendanceRecord.recorded_at).label("first_punch"),
|
||||
func.max(AttendanceRecord.recorded_at).label("last_punch"),
|
||||
func.group_concat(AttendanceRecord.attendance_source).label("sources"),
|
||||
)
|
||||
.where(*filters)
|
||||
.group_by(AttendanceRecord.device_id, AttendanceRecord.imei, "day")
|
||||
.order_by(AttendanceRecord.device_id, "day")
|
||||
)
|
||||
|
||||
rows = result.all()
|
||||
report = []
|
||||
for row in rows:
|
||||
report.append({
|
||||
"device_id": row[0],
|
||||
"imei": row[1],
|
||||
"date": str(row[2]),
|
||||
"punch_count": row[3],
|
||||
"first_punch": str(row[4]) if row[4] else None,
|
||||
"last_punch": str(row[5]) if row[5] else None,
|
||||
"sources": list(set(row[6].split(","))) if row[6] else [],
|
||||
})
|
||||
|
||||
# Summary: total days in range, devices with records, attendance rate
|
||||
total_days = (e_date - s_date).days + 1
|
||||
unique_devices = len({r["device_id"] for r in report})
|
||||
device_days = len(report)
|
||||
|
||||
from app.models import Device
|
||||
if device_id is not None:
|
||||
total_device_count = 1
|
||||
else:
|
||||
total_device_count = (await db.execute(select(func.count(Device.id)))).scalar() or 1
|
||||
|
||||
expected_device_days = total_days * total_device_count
|
||||
attendance_rate = round(device_days / expected_device_days * 100, 1) if expected_device_days else 0
|
||||
|
||||
return APIResponse(data={
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"total_days": total_days,
|
||||
"total_devices": unique_devices,
|
||||
"attendance_rate": attendance_rate,
|
||||
"records": report,
|
||||
})
|
||||
|
||||
|
||||
@router.get(
|
||||
|
||||
@@ -86,6 +86,75 @@ async def list_bluetooth_records(
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/stats",
|
||||
response_model=APIResponse[dict],
|
||||
summary="蓝牙数据统计 / Bluetooth statistics",
|
||||
)
|
||||
async def bluetooth_stats(
|
||||
start_time: datetime | None = Query(default=None, description="开始时间"),
|
||||
end_time: datetime | None = Query(default=None, description="结束时间"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
蓝牙数据统计:总记录数、按类型分布、按信标MAC分组TOP20、RSSI分布。
|
||||
Bluetooth stats: total, by type, top beacons, RSSI distribution.
|
||||
"""
|
||||
from sqlalchemy import case
|
||||
|
||||
filters = []
|
||||
if start_time:
|
||||
filters.append(BluetoothRecord.recorded_at >= start_time)
|
||||
if end_time:
|
||||
filters.append(BluetoothRecord.recorded_at <= end_time)
|
||||
|
||||
def _where(q):
|
||||
return q.where(*filters) if filters else q
|
||||
|
||||
total = (await db.execute(_where(select(func.count(BluetoothRecord.id))))).scalar() or 0
|
||||
|
||||
# By record_type
|
||||
type_result = await db.execute(_where(
|
||||
select(BluetoothRecord.record_type, func.count(BluetoothRecord.id))
|
||||
.group_by(BluetoothRecord.record_type)
|
||||
))
|
||||
by_type = {row[0]: row[1] for row in type_result.all()}
|
||||
|
||||
# Top 20 beacons by record count
|
||||
beacon_result = await db.execute(_where(
|
||||
select(BluetoothRecord.beacon_mac, func.count(BluetoothRecord.id).label("cnt"))
|
||||
.where(BluetoothRecord.beacon_mac.is_not(None))
|
||||
.group_by(BluetoothRecord.beacon_mac)
|
||||
.order_by(func.count(BluetoothRecord.id).desc())
|
||||
.limit(20)
|
||||
))
|
||||
top_beacons = [{"beacon_mac": row[0], "count": row[1]} for row in beacon_result.all()]
|
||||
|
||||
# RSSI distribution
|
||||
rssi_result = await db.execute(_where(
|
||||
select(
|
||||
func.sum(case(((BluetoothRecord.rssi.is_not(None)) & (BluetoothRecord.rssi >= -50), 1), else_=0)).label("strong"),
|
||||
func.sum(case(((BluetoothRecord.rssi < -50) & (BluetoothRecord.rssi >= -70), 1), else_=0)).label("medium"),
|
||||
func.sum(case(((BluetoothRecord.rssi < -70) & (BluetoothRecord.rssi.is_not(None)), 1), else_=0)).label("weak"),
|
||||
func.sum(case((BluetoothRecord.rssi.is_(None), 1), else_=0)).label("unknown"),
|
||||
)
|
||||
))
|
||||
rrow = rssi_result.one()
|
||||
rssi_distribution = {
|
||||
"strong_above_-50": int(rrow.strong or 0),
|
||||
"medium_-50_-70": int(rrow.medium or 0),
|
||||
"weak_below_-70": int(rrow.weak or 0),
|
||||
"unknown": int(rrow.unknown or 0),
|
||||
}
|
||||
|
||||
return APIResponse(data={
|
||||
"total": total,
|
||||
"by_type": by_type,
|
||||
"top_beacons": top_beacons,
|
||||
"rssi_distribution": rssi_distribution,
|
||||
})
|
||||
|
||||
|
||||
@router.get(
|
||||
"/device/{device_id}",
|
||||
response_model=APIResponse[PaginatedList[BluetoothRecordResponse]],
|
||||
|
||||
@@ -147,6 +147,74 @@ async def _send_to_device(
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@router.get(
|
||||
"/stats",
|
||||
response_model=APIResponse[dict],
|
||||
summary="指令统计 / Command statistics",
|
||||
)
|
||||
async def command_stats(
|
||||
days: int = Query(default=7, ge=1, le=90, description="趋势天数"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
指令统计:总数、按状态分布、按类型分布、成功率、按天趋势。
|
||||
Command stats: total, by status, by type, success rate, daily trend.
|
||||
"""
|
||||
from sqlalchemy import func, select
|
||||
from datetime import timedelta
|
||||
from app.models import CommandLog
|
||||
|
||||
total = (await db.execute(select(func.count(CommandLog.id)))).scalar() or 0
|
||||
|
||||
# By status
|
||||
status_result = await db.execute(
|
||||
select(CommandLog.status, func.count(CommandLog.id))
|
||||
.group_by(CommandLog.status)
|
||||
)
|
||||
by_status = {row[0]: row[1] for row in status_result.all()}
|
||||
|
||||
# By type
|
||||
type_result = await db.execute(
|
||||
select(CommandLog.command_type, func.count(CommandLog.id))
|
||||
.group_by(CommandLog.command_type)
|
||||
)
|
||||
by_type = {row[0]: row[1] for row in type_result.all()}
|
||||
|
||||
# Success rate
|
||||
sent = by_status.get("sent", 0) + by_status.get("success", 0)
|
||||
failed = by_status.get("failed", 0)
|
||||
total_attempted = sent + failed
|
||||
success_rate = round(sent / total_attempted * 100, 1) if total_attempted else 0
|
||||
|
||||
# Daily trend
|
||||
now = now_cst()
|
||||
cutoff = now - timedelta(days=days)
|
||||
trend_result = await db.execute(
|
||||
select(
|
||||
func.date(CommandLog.created_at).label("day"),
|
||||
func.count(CommandLog.id),
|
||||
)
|
||||
.where(CommandLog.created_at >= cutoff)
|
||||
.group_by("day").order_by("day")
|
||||
)
|
||||
daily_trend = {str(row[0]): row[1] for row in trend_result.all()}
|
||||
|
||||
# Today
|
||||
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
today_count = (await db.execute(
|
||||
select(func.count(CommandLog.id)).where(CommandLog.created_at >= today_start)
|
||||
)).scalar() or 0
|
||||
|
||||
return APIResponse(data={
|
||||
"total": total,
|
||||
"today": today_count,
|
||||
"by_status": by_status,
|
||||
"by_type": by_type,
|
||||
"success_rate": success_rate,
|
||||
"daily_trend": daily_trend,
|
||||
})
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
response_model=APIResponse[PaginatedList[CommandResponse]],
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
"""Fences Router - geofence management API endpoints."""
|
||||
|
||||
import math
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import require_write
|
||||
from app.database import get_db
|
||||
from app.models import AttendanceRecord, DeviceFenceBinding, DeviceFenceState, FenceConfig
|
||||
from app.schemas import (
|
||||
APIResponse,
|
||||
DeviceFenceBindRequest,
|
||||
@@ -39,6 +42,73 @@ async def list_fences(
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/stats",
|
||||
response_model=APIResponse[dict],
|
||||
summary="围栏统计 / Fence statistics",
|
||||
)
|
||||
async def fence_stats(db: AsyncSession = Depends(get_db)):
|
||||
"""
|
||||
围栏统计:总数、活跃数、绑定设备总数、各围栏当前在内设备数、今日进出事件数。
|
||||
Fence stats: totals, bindings, devices currently inside, today's events.
|
||||
"""
|
||||
from app.config import now_cst
|
||||
now = now_cst()
|
||||
|
||||
total = (await db.execute(select(func.count(FenceConfig.id)))).scalar() or 0
|
||||
active = (await db.execute(
|
||||
select(func.count(FenceConfig.id)).where(FenceConfig.is_active == True) # noqa: E712
|
||||
)).scalar() or 0
|
||||
total_bindings = (await db.execute(select(func.count(DeviceFenceBinding.id)))).scalar() or 0
|
||||
|
||||
# Devices currently inside fences
|
||||
inside_result = await db.execute(
|
||||
select(
|
||||
DeviceFenceState.fence_id,
|
||||
func.count(DeviceFenceState.id),
|
||||
)
|
||||
.where(DeviceFenceState.is_inside == True) # noqa: E712
|
||||
.group_by(DeviceFenceState.fence_id)
|
||||
)
|
||||
devices_inside = {row[0]: row[1] for row in inside_result.all()}
|
||||
total_inside = sum(devices_inside.values())
|
||||
|
||||
# Today's fence attendance events
|
||||
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
today_events = (await db.execute(
|
||||
select(func.count(AttendanceRecord.id))
|
||||
.where(
|
||||
AttendanceRecord.attendance_source == "fence",
|
||||
AttendanceRecord.recorded_at >= today_start,
|
||||
)
|
||||
)).scalar() or 0
|
||||
|
||||
# Per-fence summary
|
||||
fence_result = await db.execute(
|
||||
select(FenceConfig.id, FenceConfig.name, FenceConfig.fence_type, FenceConfig.is_active)
|
||||
)
|
||||
fence_summary = []
|
||||
for row in fence_result.all():
|
||||
fid = row[0]
|
||||
fence_summary.append({
|
||||
"fence_id": fid,
|
||||
"name": row[1],
|
||||
"type": row[2],
|
||||
"is_active": bool(row[3]),
|
||||
"devices_inside": devices_inside.get(fid, 0),
|
||||
})
|
||||
|
||||
return APIResponse(data={
|
||||
"total": total,
|
||||
"active": active,
|
||||
"inactive": total - active,
|
||||
"total_bindings": total_bindings,
|
||||
"total_devices_inside": total_inside,
|
||||
"today_events": today_events,
|
||||
"fences": fence_summary,
|
||||
})
|
||||
|
||||
|
||||
@router.get("/all-active", response_model=APIResponse[list[FenceConfigResponse]])
|
||||
async def get_all_active(db: AsyncSession = Depends(get_db)):
|
||||
fences = await fence_service.get_all_active_fences(db)
|
||||
@@ -74,6 +144,73 @@ async def delete_fence(fence_id: int, db: AsyncSession = Depends(get_db)):
|
||||
return APIResponse(message="Fence deleted")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{fence_id}/events",
|
||||
response_model=APIResponse[PaginatedList[dict]],
|
||||
summary="围栏进出事件历史 / Fence events history",
|
||||
)
|
||||
async def fence_events(
|
||||
fence_id: int,
|
||||
start_time: datetime | None = Query(default=None, description="开始时间"),
|
||||
end_time: datetime | None = Query(default=None, description="结束时间"),
|
||||
page: int = Query(default=1, ge=1),
|
||||
page_size: int = Query(default=20, ge=1, le=100),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
获取围栏的进出事件历史(来源为 fence 的考勤记录)。
|
||||
Get fence entry/exit events (attendance records with source=fence).
|
||||
"""
|
||||
fence = await fence_service.get_fence(db, fence_id)
|
||||
if fence is None:
|
||||
raise HTTPException(status_code=404, detail="Fence not found")
|
||||
|
||||
from sqlalchemy.dialects.sqlite import JSON as _JSON
|
||||
|
||||
filters = [
|
||||
AttendanceRecord.attendance_source == "fence",
|
||||
]
|
||||
if start_time:
|
||||
filters.append(AttendanceRecord.recorded_at >= start_time)
|
||||
if end_time:
|
||||
filters.append(AttendanceRecord.recorded_at <= end_time)
|
||||
|
||||
# Filter by fence_id in lbs_data JSON (fence_auto records store fence_id there)
|
||||
# Since SQLite JSON support is limited, we use LIKE for the fence_id match
|
||||
filters.append(AttendanceRecord.lbs_data.like(f'%"fence_id": {fence_id}%'))
|
||||
|
||||
count_q = select(func.count(AttendanceRecord.id)).where(*filters)
|
||||
total = (await db.execute(count_q)).scalar() or 0
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
result = await db.execute(
|
||||
select(AttendanceRecord)
|
||||
.where(*filters)
|
||||
.order_by(AttendanceRecord.recorded_at.desc())
|
||||
.offset(offset).limit(page_size)
|
||||
)
|
||||
records = result.scalars().all()
|
||||
|
||||
items = []
|
||||
for r in records:
|
||||
items.append({
|
||||
"id": r.id,
|
||||
"device_id": r.device_id,
|
||||
"imei": r.imei,
|
||||
"event_type": r.attendance_type,
|
||||
"latitude": r.latitude,
|
||||
"longitude": r.longitude,
|
||||
"address": r.address,
|
||||
"recorded_at": str(r.recorded_at),
|
||||
})
|
||||
|
||||
return APIResponse(data=PaginatedList(
|
||||
items=items,
|
||||
total=total, page=page, page_size=page_size,
|
||||
total_pages=math.ceil(total / page_size) if total else 0,
|
||||
))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Device-Fence Binding endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@@ -73,6 +73,99 @@ async def list_heartbeats(
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/stats",
|
||||
response_model=APIResponse[dict],
|
||||
summary="心跳统计 / Heartbeat statistics",
|
||||
)
|
||||
async def heartbeat_stats(
|
||||
hours: int = Query(default=24, ge=1, le=168, description="统计时间范围(小时)"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
心跳数据统计:总记录数、活跃设备数、平均电量、按设备心跳间隔分析。
|
||||
Heartbeat stats: total, active devices, avg battery, interval analysis.
|
||||
"""
|
||||
from app.config import now_cst
|
||||
from app.models import Device
|
||||
from datetime import timedelta
|
||||
|
||||
now = now_cst()
|
||||
cutoff = now - timedelta(hours=hours)
|
||||
|
||||
# Total in range
|
||||
total = (await db.execute(
|
||||
select(func.count(HeartbeatRecord.id)).where(HeartbeatRecord.created_at >= cutoff)
|
||||
)).scalar() or 0
|
||||
|
||||
# Unique devices with heartbeats in range
|
||||
active_devices = (await db.execute(
|
||||
select(func.count(func.distinct(HeartbeatRecord.device_id)))
|
||||
.where(HeartbeatRecord.created_at >= cutoff)
|
||||
)).scalar() or 0
|
||||
|
||||
# Total registered devices
|
||||
total_devices = (await db.execute(select(func.count(Device.id)))).scalar() or 0
|
||||
|
||||
# Avg battery and signal in range
|
||||
avg_result = await db.execute(
|
||||
select(
|
||||
func.avg(HeartbeatRecord.battery_level),
|
||||
func.avg(HeartbeatRecord.gsm_signal),
|
||||
).where(HeartbeatRecord.created_at >= cutoff)
|
||||
)
|
||||
avg_row = avg_result.one()
|
||||
avg_battery = round(float(avg_row[0]), 1) if avg_row[0] else None
|
||||
avg_signal = round(float(avg_row[1]), 1) if avg_row[1] else None
|
||||
|
||||
# Per-device heartbeat count in range (for interval estimation)
|
||||
# Devices with < expected heartbeats may be anomalous
|
||||
per_device_result = await db.execute(
|
||||
select(
|
||||
HeartbeatRecord.device_id,
|
||||
HeartbeatRecord.imei,
|
||||
func.count(HeartbeatRecord.id).label("hb_count"),
|
||||
func.min(HeartbeatRecord.created_at).label("first_hb"),
|
||||
func.max(HeartbeatRecord.created_at).label("last_hb"),
|
||||
)
|
||||
.where(HeartbeatRecord.created_at >= cutoff)
|
||||
.group_by(HeartbeatRecord.device_id, HeartbeatRecord.imei)
|
||||
.order_by(func.count(HeartbeatRecord.id).desc())
|
||||
)
|
||||
device_intervals = []
|
||||
anomalous_devices = []
|
||||
for row in per_device_result.all():
|
||||
hb_count = row[2]
|
||||
first_hb = row[3]
|
||||
last_hb = row[4]
|
||||
if hb_count > 1 and first_hb and last_hb:
|
||||
span_min = (last_hb - first_hb).total_seconds() / 60
|
||||
avg_interval_min = round(span_min / (hb_count - 1), 1) if hb_count > 1 else 0
|
||||
else:
|
||||
avg_interval_min = 0
|
||||
entry = {
|
||||
"device_id": row[0], "imei": row[1],
|
||||
"heartbeat_count": hb_count,
|
||||
"avg_interval_minutes": avg_interval_min,
|
||||
}
|
||||
device_intervals.append(entry)
|
||||
# Flag devices with very few heartbeats (expected ~12/h * hours)
|
||||
if hb_count < max(1, hours * 2):
|
||||
anomalous_devices.append(entry)
|
||||
|
||||
return APIResponse(data={
|
||||
"period_hours": hours,
|
||||
"total_heartbeats": total,
|
||||
"active_devices": active_devices,
|
||||
"total_devices": total_devices,
|
||||
"inactive_devices": total_devices - active_devices,
|
||||
"avg_battery_level": avg_battery,
|
||||
"avg_gsm_signal": avg_signal,
|
||||
"device_intervals": device_intervals[:20],
|
||||
"anomalous_devices": anomalous_devices[:10],
|
||||
})
|
||||
|
||||
|
||||
@router.post(
|
||||
"/batch-delete",
|
||||
response_model=APIResponse[dict],
|
||||
|
||||
@@ -4,10 +4,10 @@ API endpoints for querying location records and device tracks.
|
||||
"""
|
||||
|
||||
import math
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Query
|
||||
from sqlalchemy import func, select, delete
|
||||
from sqlalchemy import func, select, delete, case, extract
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import require_write
|
||||
@@ -63,6 +63,154 @@ async def list_locations(
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/stats",
|
||||
response_model=APIResponse[dict],
|
||||
summary="位置数据统计 / Location statistics",
|
||||
)
|
||||
async def location_stats(
|
||||
device_id: int | None = Query(default=None, description="设备ID (可选)"),
|
||||
start_time: datetime | None = Query(default=None, description="开始时间"),
|
||||
end_time: datetime | None = Query(default=None, description="结束时间"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
位置数据统计:总记录数、按定位类型分布、有坐标率、按小时分布(24h)。
|
||||
Location statistics: total, by type, coordinate rate, hourly distribution.
|
||||
"""
|
||||
filters = []
|
||||
if device_id is not None:
|
||||
filters.append(LocationRecord.device_id == device_id)
|
||||
if start_time:
|
||||
filters.append(LocationRecord.recorded_at >= start_time)
|
||||
if end_time:
|
||||
filters.append(LocationRecord.recorded_at <= end_time)
|
||||
|
||||
where = filters if filters else []
|
||||
|
||||
# Total
|
||||
q = select(func.count(LocationRecord.id))
|
||||
if where:
|
||||
q = q.where(*where)
|
||||
total = (await db.execute(q)).scalar() or 0
|
||||
|
||||
# With coordinates
|
||||
q2 = select(func.count(LocationRecord.id)).where(
|
||||
LocationRecord.latitude.is_not(None), LocationRecord.longitude.is_not(None)
|
||||
)
|
||||
if where:
|
||||
q2 = q2.where(*where)
|
||||
with_coords = (await db.execute(q2)).scalar() or 0
|
||||
|
||||
# By type
|
||||
q3 = select(LocationRecord.location_type, func.count(LocationRecord.id)).group_by(LocationRecord.location_type)
|
||||
if where:
|
||||
q3 = q3.where(*where)
|
||||
type_result = await db.execute(q3)
|
||||
by_type = {row[0]: row[1] for row in type_result.all()}
|
||||
|
||||
# Hourly distribution (hour 0-23)
|
||||
q4 = select(
|
||||
extract("hour", LocationRecord.recorded_at).label("hour"),
|
||||
func.count(LocationRecord.id),
|
||||
).group_by("hour").order_by("hour")
|
||||
if where:
|
||||
q4 = q4.where(*where)
|
||||
hour_result = await db.execute(q4)
|
||||
hourly = {int(row[0]): row[1] for row in hour_result.all()}
|
||||
|
||||
return APIResponse(data={
|
||||
"total": total,
|
||||
"with_coordinates": with_coords,
|
||||
"without_coordinates": total - with_coords,
|
||||
"coordinate_rate": round(with_coords / total * 100, 1) if total else 0,
|
||||
"by_type": by_type,
|
||||
"hourly_distribution": hourly,
|
||||
})
|
||||
|
||||
|
||||
@router.get(
|
||||
"/track-summary/{device_id}",
|
||||
response_model=APIResponse[dict],
|
||||
summary="轨迹摘要 / Track summary",
|
||||
)
|
||||
async def track_summary(
|
||||
device_id: int,
|
||||
start_time: datetime = Query(..., description="开始时间"),
|
||||
end_time: datetime = Query(..., description="结束时间"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
轨迹统计摘要:总距离(km)、运动时长、最高速度、平均速度、轨迹点数。
|
||||
Track summary: total distance, duration, max/avg speed, point count.
|
||||
"""
|
||||
import math as _math
|
||||
|
||||
device = await device_service.get_device(db, device_id)
|
||||
if device is None:
|
||||
raise HTTPException(status_code=404, detail=f"Device {device_id} not found")
|
||||
if start_time >= end_time:
|
||||
raise HTTPException(status_code=400, detail="start_time must be before end_time")
|
||||
|
||||
records = await location_service.get_device_track(db, device_id, start_time, end_time, max_points=50000)
|
||||
|
||||
if not records:
|
||||
return APIResponse(data={
|
||||
"device_id": device_id,
|
||||
"point_count": 0,
|
||||
"total_distance_km": 0,
|
||||
"duration_minutes": 0,
|
||||
"max_speed_kmh": 0,
|
||||
"avg_speed_kmh": 0,
|
||||
"by_type": {},
|
||||
})
|
||||
|
||||
# Haversine distance calculation
|
||||
def _haversine(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
|
||||
R = 6371.0 # km
|
||||
dlat = _math.radians(lat2 - lat1)
|
||||
dlon = _math.radians(lon2 - lon1)
|
||||
a = _math.sin(dlat / 2) ** 2 + _math.cos(_math.radians(lat1)) * _math.cos(_math.radians(lat2)) * _math.sin(dlon / 2) ** 2
|
||||
return R * 2 * _math.atan2(_math.sqrt(a), _math.sqrt(1 - a))
|
||||
|
||||
total_distance = 0.0
|
||||
max_speed = 0.0
|
||||
speeds = []
|
||||
type_counts: dict[str, int] = {}
|
||||
prev = None
|
||||
|
||||
for r in records:
|
||||
t = r.location_type or "unknown"
|
||||
type_counts[t] = type_counts.get(t, 0) + 1
|
||||
|
||||
if r.speed is not None and r.speed > max_speed:
|
||||
max_speed = r.speed
|
||||
|
||||
if prev is not None and r.latitude is not None and r.longitude is not None and prev.latitude is not None and prev.longitude is not None:
|
||||
d = _haversine(prev.latitude, prev.longitude, r.latitude, r.longitude)
|
||||
total_distance += d
|
||||
|
||||
if r.latitude is not None and r.longitude is not None:
|
||||
prev = r
|
||||
|
||||
first_time = records[0].recorded_at
|
||||
last_time = records[-1].recorded_at
|
||||
duration_min = (last_time - first_time).total_seconds() / 60 if last_time > first_time else 0
|
||||
avg_speed = (total_distance / (duration_min / 60)) if duration_min > 0 else 0
|
||||
|
||||
return APIResponse(data={
|
||||
"device_id": device_id,
|
||||
"point_count": len(records),
|
||||
"total_distance_km": round(total_distance, 2),
|
||||
"duration_minutes": round(duration_min, 1),
|
||||
"max_speed_kmh": round(max_speed, 1),
|
||||
"avg_speed_kmh": round(avg_speed, 1),
|
||||
"start_time": str(first_time),
|
||||
"end_time": str(last_time),
|
||||
"by_type": type_counts,
|
||||
})
|
||||
|
||||
|
||||
@router.get(
|
||||
"/latest/{device_id}",
|
||||
response_model=APIResponse[LocationRecordResponse | None],
|
||||
|
||||
Reference in New Issue
Block a user