|
|
from __future__ import annotations
|
|
|
|
|
|
import asyncio
|
|
|
import base64
|
|
|
import logging
|
|
|
import os
|
|
|
import re
|
|
|
import struct
|
|
|
import time
|
|
|
from typing import Any, Dict, List, Tuple, Optional
|
|
|
|
|
|
import cv2
|
|
|
import numpy as np
|
|
|
import websockets
|
|
|
from websockets import serve
|
|
|
|
|
|
from . import config, state
|
|
|
from . import alarms # <-- используем для нормализации MAC и отправки тревог
|
|
|
from .config import VUE_CONTROL_WS_PORT, VUE_VIDEO_WS_PORT
|
|
|
from .model import yolo_forward
|
|
|
from .postprocess import postprocess_control
|
|
|
from .preview import maybe_downscale, publish_preview
|
|
|
from .utils import json_dumps
|
|
|
from .wiper import trigger_wiper_once # опционально
|
|
|
|
|
|
# горячая калибровка/патруль
|
|
|
from .calibration import calibrate_presets, autocal_pan_sign
|
|
|
from .patrol import ensure_patrol_resume
|
|
|
from .sector import sector_init_on_startup, sector_autocal_from_presets
|
|
|
|
|
|
logger = logging.getLogger("PTZTracker.CTRL")
|
|
|
log_main = logging.getLogger("PTZTracker")
|
|
|
|
|
|
PACKER_HI = struct.Struct(">HI")
|
|
|
|
|
|
# ---- ALARM bridge state (антидребезг по кадрам) ----
|
|
|
_alarm_on_since: Dict[int, float] = {}
|
|
|
_alarm_off_since: Dict[int, float] = {}
|
|
|
_alarm_is_on: Dict[int, bool] = {}
|
|
|
|
|
|
def _alarm_gate(cam_id: int, has_target_now: bool) -> bool | None:
|
|
|
"""
|
|
|
Возвращает True/False когда ПORA отправлять ON/OFF.
|
|
|
Возвращает None — если ждём подтверждения (антидребезг).
|
|
|
Источник has_target_now — state.ptz_states[cam_id]["rec_active"].
|
|
|
"""
|
|
|
now = time.monotonic()
|
|
|
was_on = _alarm_is_on.get(cam_id, False)
|
|
|
|
|
|
if has_target_now:
|
|
|
# Кандидат на включение
|
|
|
if not was_on:
|
|
|
t0 = _alarm_on_since.get(cam_id)
|
|
|
if t0 is None:
|
|
|
_alarm_on_since[cam_id] = now
|
|
|
log_main.debug("[ALARM][GATE] cam %s arm start", cam_id)
|
|
|
return None
|
|
|
if (now - t0) >= float(getattr(config, "REC_ON_CONFIRM_SEC", 0.35)):
|
|
|
_alarm_is_on[cam_id] = True
|
|
|
_alarm_off_since.pop(cam_id, None)
|
|
|
_alarm_on_since.pop(cam_id, None)
|
|
|
log_main.info("[ALARM][GATE] cam %s ARMED (≥%.2fs)", cam_id, getattr(config, "REC_ON_CONFIRM_SEC", 0.35))
|
|
|
return True
|
|
|
return None
|
|
|
else:
|
|
|
# уже включена — подтверждаем присутствие
|
|
|
_alarm_off_since.pop(cam_id, None)
|
|
|
return None
|
|
|
else:
|
|
|
# Кандидат на выключение
|
|
|
if was_on:
|
|
|
t0 = _alarm_off_since.get(cam_id)
|
|
|
if t0 is None:
|
|
|
_alarm_off_since[cam_id] = now
|
|
|
log_main.debug("[ALARM][GATE] cam %s disarm start", cam_id)
|
|
|
return None
|
|
|
if (now - t0) >= float(getattr(config, "REC_OFF_GRACE_SEC", 1.2)):
|
|
|
_alarm_is_on[cam_id] = False
|
|
|
_alarm_on_since.pop(cam_id, None)
|
|
|
_alarm_off_since.pop(cam_id, None)
|
|
|
log_main.info("[ALARM][GATE] cam %s DISARMED (≥%.2fs)", cam_id, getattr(config, "REC_OFF_GRACE_SEC", 1.2))
|
|
|
return False
|
|
|
return None
|
|
|
else:
|
|
|
# уже выключена
|
|
|
_alarm_on_since.pop(cam_id, None)
|
|
|
return None
|
|
|
|
|
|
async def alarm_gate_loop(poll_sec: float = 0.05) -> None:
|
|
|
"""
|
|
|
20 Гц опрос state.ptz_states[cam]['rec_active'] и отправка тревог.
|
|
|
Работает поверх уже существующей логики записи/детекции.
|
|
|
"""
|
|
|
log_main.info("[ALARM][BRIDGE] gate loop started (poll=%.3fs)", poll_sec)
|
|
|
while True:
|
|
|
try:
|
|
|
# Итерация по известным камерам (если приходят новые — они появятся в config.CAMERA_CONFIG)
|
|
|
for cam_id in list(config.CAMERA_CONFIG.keys()):
|
|
|
st = state.ptz_states.get(cam_id, {})
|
|
|
has_target_now = bool(st.get("rec_active", False))
|
|
|
|
|
|
decision = _alarm_gate(cam_id, has_target_now)
|
|
|
if decision is None:
|
|
|
continue
|
|
|
|
|
|
if decision:
|
|
|
log_main.info("[ALARM][DECIDE] cam %s -> ON (rec_active=True)", cam_id)
|
|
|
alarms.queue_alarm(cam_id, True) # MAC-only/Swagger формат решается в alarms.py
|
|
|
else:
|
|
|
log_main.info("[ALARM][DECIDE] cam %s -> OFF (rec_active=False)", cam_id)
|
|
|
alarms.queue_alarm(cam_id, False)
|
|
|
|
|
|
except Exception as e:
|
|
|
log_main.error("[ALARM][BRIDGE] loop error: %s", e)
|
|
|
|
|
|
await asyncio.sleep(poll_sec)
|
|
|
|
|
|
def _cameras_toml_path() -> str:
|
|
|
for name in ("CAMERAS_TOML_PATH", "CAMERAS_TOML", "CONFIG_TOML"):
|
|
|
p = getattr(config, name, None)
|
|
|
if p:
|
|
|
return str(p)
|
|
|
return os.path.join(os.getcwd(), "cameras.toml")
|
|
|
|
|
|
def _toml_load(path: str) -> dict:
|
|
|
try:
|
|
|
import tomllib
|
|
|
with open(path, "rb") as f:
|
|
|
return tomllib.load(f)
|
|
|
except Exception:
|
|
|
return {}
|
|
|
|
|
|
def _next_free_id(used: List[int]) -> int:
|
|
|
cur = 0
|
|
|
used_set = set(int(x) for x in used)
|
|
|
while cur in used_set:
|
|
|
cur += 1
|
|
|
return cur
|
|
|
|
|
|
def _append_camera_block(path: str, block: str) -> None:
|
|
|
if not os.path.exists(path):
|
|
|
open(path, "a", encoding="utf-8").close()
|
|
|
with open(path, "a", encoding="utf-8") as f:
|
|
|
f.write(("\n" if not block.startswith("\n") else "") + block.strip() + "\n")
|
|
|
|
|
|
def _emit_cam_block_for_toml(c: dict) -> str:
|
|
|
"""
|
|
|
Единый эмиттер TOML-блока камеры с расширенными полями.
|
|
|
Пишет только те optional-поля, которые заданы и валидны по типу.
|
|
|
"""
|
|
|
# обязательные + базовые
|
|
|
lines = [
|
|
|
"[[camera]]",
|
|
|
f"id = {int(c['id'])}",
|
|
|
f"ip = \"{c['ip']}\"",
|
|
|
f"username = \"{c['username']}\"",
|
|
|
f"password = \"{c['password']}\"",
|
|
|
f"ptz_channel = {int(c.get('ptz_channel', 1))}",
|
|
|
"wiper_channel = 1",
|
|
|
|
|
|
]
|
|
|
|
|
|
# <<< MAC >>>
|
|
|
mac = c.get("mac")
|
|
|
if isinstance(mac, str) and mac:
|
|
|
lines.append(f"mac = \"{mac}\"")
|
|
|
|
|
|
# PTZ признаки и пресеты
|
|
|
if c.get("is_ptz", False):
|
|
|
lines.append(f"preset1 = \"{c.get('preset1', '1')}\"")
|
|
|
lines.append(f"preset2 = \"{c.get('preset2', '2')}\"")
|
|
|
lines.append(f"sweep_sign = {int(c.get('sweep_sign', 1))}")
|
|
|
|
|
|
# --- Дополнительные поля ---
|
|
|
opt_float_keys = [
|
|
|
"north_offset_deg", "bullet_hfov_deg", "hfov_deg",
|
|
|
"preset1_deg", "preset2_deg",
|
|
|
"preset1_tilt_deg", "preset2_tilt_deg",
|
|
|
"sector_min_deg", "sector_max_deg",
|
|
|
"pan_offset_deg", "tilt_offset_deg",
|
|
|
]
|
|
|
opt_int_keys = ["pan_sign", "tilt_sign", "port", "port_http", "port_https", "ptz_timeout_sec"]
|
|
|
opt_bool_keys = ["https", "verify_tls", "is_ptz"]
|
|
|
opt_str_keys = ["scheme", "focus_override"]
|
|
|
|
|
|
for k in opt_float_keys:
|
|
|
v = c.get(k)
|
|
|
if isinstance(v, (int, float)):
|
|
|
lines.append(f"{k} = {float(v):.6f}")
|
|
|
for k in opt_int_keys:
|
|
|
v = c.get(k)
|
|
|
if isinstance(v, int):
|
|
|
lines.append(f"{k} = {v}")
|
|
|
for k in opt_bool_keys:
|
|
|
v = c.get(k)
|
|
|
if isinstance(v, bool):
|
|
|
lines.append(f"{k} = {'true' if v else 'false'}")
|
|
|
for k in opt_str_keys:
|
|
|
v = c.get(k)
|
|
|
if isinstance(v, str) and v:
|
|
|
lines.append(f"{k} = \"{v}\"")
|
|
|
|
|
|
return "\n".join(lines) + "\n"
|
|
|
|
|
|
def _write_or_update_camera(path: str, cam: dict) -> None:
|
|
|
"""
|
|
|
Добавляет новую камеру в TOML, если такой id ещё нет.
|
|
|
Пишет расширенные поля (север/HFOV/сектор/знаки/оверрайды), только если они заданы.
|
|
|
"""
|
|
|
data = _toml_load(path)
|
|
|
existing = data.get("camera", [])
|
|
|
ids = [c.get("id") for c in existing if isinstance(c, dict)]
|
|
|
if cam["id"] in ids:
|
|
|
return
|
|
|
block = _emit_cam_block_for_toml(cam)
|
|
|
_append_camera_block(path, "\n" + block)
|
|
|
|
|
|
def _rewrite_all_cameras(path: str, cams: List[dict]) -> None:
|
|
|
"""
|
|
|
Полностью перезаписывает cameras.toml, сохраняя для каждой камеры все полезные поля.
|
|
|
"""
|
|
|
blocks = []
|
|
|
for cam in sorted(cams, key=lambda c: int(c["id"])):
|
|
|
blocks.append(_emit_cam_block_for_toml(cam))
|
|
|
text = ("\n".join(b.strip() for b in blocks if b.strip()) + "\n") if blocks else ""
|
|
|
tmp = path + ".tmp"
|
|
|
with open(tmp, "w", encoding="utf-8") as f:
|
|
|
f.write(text)
|
|
|
f.flush()
|
|
|
os.fsync(f.fileno())
|
|
|
os.replace(tmp, path)
|
|
|
|
|
|
def _reload_config_best_effort() -> bool:
|
|
|
for name in ("reload_cameras", "reload_config_from_toml", "reload_from_file", "load_cameras"):
|
|
|
fn = getattr(config, name, None)
|
|
|
if callable(fn):
|
|
|
try:
|
|
|
fn()
|
|
|
return True
|
|
|
except Exception as e:
|
|
|
log_main.warning("[CFG] reload failed via %s: %s", name, e)
|
|
|
return False
|
|
|
|
|
|
# ------------- «Горячее» добавление камер -------------
|
|
|
def maybe_reload_cameras_when_unknown(idx: int) -> bool:
|
|
|
from . import config as C
|
|
|
if idx in C.CAMERA_CONFIG:
|
|
|
return True
|
|
|
_reload_config_best_effort()
|
|
|
return idx in C.CAMERA_CONFIG
|
|
|
|
|
|
async def _broadcast_init_to_vue() -> None:
|
|
|
try:
|
|
|
clients = getattr(state, "vue_control_clients", None)
|
|
|
if not clients:
|
|
|
return
|
|
|
ids = list(sorted(config.CAMERA_CONFIG.keys()))
|
|
|
if not ids and getattr(state, "detected_cameras", None):
|
|
|
ids = list(sorted(state.detected_cameras))
|
|
|
state.last_broadcast_ids = ids[:]
|
|
|
state.index_to_cam_id = {i + 1: cid for i, cid in enumerate(ids)} # 1-based
|
|
|
msg = {"type": "init", "data": [len(ids)] + ids}
|
|
|
raw = json_dumps(msg)
|
|
|
for ws in list(clients):
|
|
|
try:
|
|
|
await ws.send(raw)
|
|
|
except Exception:
|
|
|
pass
|
|
|
except Exception as e:
|
|
|
logger.warning("[CTRL] broadcast init failed: %s", e)
|
|
|
|
|
|
def ensure_runtime_state_for_camera(cam_id: int) -> None:
|
|
|
st = state.ptz_states.setdefault(cam_id, {})
|
|
|
defaults = {
|
|
|
"proc_busy": False,
|
|
|
"prev_t": None, "prev_cx": 0.5, "prev_cy": 0.5,
|
|
|
"vx": 0.0, "vy": 0.0,
|
|
|
"w_frac_ema": None, "prev_w_frac": None, "w_growth": 0.0,
|
|
|
"zoom_state": 0, "zoom_last_change": 0.0,
|
|
|
"zoom_int": 0.0, "zoom_prev_cmd": 0.0, "zoom_ramp_start": 0.0,
|
|
|
"last_dx": 0.0, "last_dy": 0.0, "ix": 0.0, "iy": 0.0,
|
|
|
"last_seen": 0.0, "loss_since": None, "last_bbox": None,
|
|
|
"alarm_sent": False, "stable_frames": 0,
|
|
|
"fallback_done": False, "zoom_lock": False,
|
|
|
"lock_candidate_since": None, "last_return_at": 0.0,
|
|
|
"patrol_active": False, "patrol_dir": +1,
|
|
|
"leg_start": 0.0, "leg_end": 0.0, "endpoint_latch": 0,
|
|
|
"is_ptz": (cam_id in config.PTZ_CAM_IDS),
|
|
|
"last_ptz_send": 0.0,
|
|
|
"last_cmd": (0.0, 0.0, 0.0),
|
|
|
"last_cmd_ts": 0.0,
|
|
|
"last_pan": 0.0, "last_tilt": 0.0, "last_zoom": 0.0,
|
|
|
"rec_active": False, "was_zoomed_in": False,
|
|
|
"zoom_reset_timer": None, "preset_timer": None, "preset_target": None,
|
|
|
"ptz_busy_until": 0.0, "pan_sign": 1, "mode": "IDLE",
|
|
|
}
|
|
|
for k, v in defaults.items():
|
|
|
st.setdefault(k, v)
|
|
|
|
|
|
# ---- Горячая инициализация PTZ после добавления ----
|
|
|
async def _hot_setup_ptz(cam_id: int) -> None:
|
|
|
"""
|
|
|
Ждём кадры, калибруем пресеты с ретраями, автокалибруем pan_sign/сектор
|
|
|
и только затем запускаем патруль.
|
|
|
"""
|
|
|
try:
|
|
|
ensure_runtime_state_for_camera(cam_id)
|
|
|
st = state.ptz_states[cam_id]
|
|
|
st["patrol_active"] = False
|
|
|
st["mode"] = "IDLE"
|
|
|
|
|
|
# 1) дождаться первых кадров (до 3 сек)
|
|
|
t0 = time.time()
|
|
|
while cam_id not in state.detected_cameras and (time.time() - t0) < 3.0:
|
|
|
await asyncio.sleep(0.1)
|
|
|
|
|
|
# 2) calibrate_presets с 3 попытками
|
|
|
ok = False
|
|
|
for attempt in range(3):
|
|
|
try:
|
|
|
logger.info("Calibrating presets for camera %s (attempt %d)", cam_id, attempt + 1)
|
|
|
ok = await calibrate_presets(cam_id)
|
|
|
if ok:
|
|
|
break
|
|
|
except Exception as e:
|
|
|
logger.warning("calibrate_presets cam %s failed (attempt %d): %s", cam_id, attempt + 1, e)
|
|
|
await asyncio.sleep(0.5)
|
|
|
if not ok:
|
|
|
logger.warning("Failed to calibrate camera %s, using fallback for sector clamp.", cam_id)
|
|
|
|
|
|
# 3) pan_sign + сектор
|
|
|
try:
|
|
|
await autocal_pan_sign(cam_id)
|
|
|
except Exception as e:
|
|
|
logger.debug("autocal pan_sign cam %s: %s", cam_id, e)
|
|
|
|
|
|
if getattr(config, "USE_PRESET_EDGES_FOR_SECTOR", False):
|
|
|
try:
|
|
|
await sector_autocal_from_presets()
|
|
|
except Exception as e:
|
|
|
logger.warning("[SECTOR] autocal from presets failed: %s", e)
|
|
|
|
|
|
try:
|
|
|
sector_init_on_startup()
|
|
|
except Exception:
|
|
|
pass
|
|
|
|
|
|
# 4) запуск патруля
|
|
|
try:
|
|
|
ensure_patrol_resume(cam_id, delay=1.0)
|
|
|
except Exception as e:
|
|
|
logger.warning("[PATROL] ensure_patrol_resume failed: %s", e)
|
|
|
|
|
|
except Exception as e:
|
|
|
logger.warning("[HOTCAL] _hot_setup_ptz cam %s failed: %s", cam_id, e)
|
|
|
|
|
|
# ---------------- VIDEO WS ----------------
|
|
|
async def video_ws_handler(ws, path=None) -> None:
|
|
|
addr = getattr(ws, "remote_address", None)
|
|
|
logger.info("[VIDEO WS] client connected: %s path=%s", addr, path)
|
|
|
state.video_clients.add(ws)
|
|
|
try:
|
|
|
await ws.wait_closed()
|
|
|
finally:
|
|
|
state.video_clients.discard(ws)
|
|
|
logger.info("[VIDEO WS] client disconnected: %s", addr)
|
|
|
|
|
|
async def video_broadcaster() -> None:
|
|
|
client_state: Dict[Any, Dict[str, Any]] = {}
|
|
|
min_interval_fast = getattr(config, "VIDEO_MIN_INTERVAL_FAST", 1.0 / 60.0)
|
|
|
min_interval_slow = getattr(config, "VIDEO_MIN_INTERVAL_SLOW", 1.0 / 15.0)
|
|
|
send_slow_sec = 0.02
|
|
|
buf_soft = 4 * 1024 * 1024
|
|
|
buf_hard = 8 * 1024 * 1024
|
|
|
loop_tick = 1.0 / 200.0
|
|
|
|
|
|
while True:
|
|
|
try:
|
|
|
if not state.video_clients or not state.latest_jpeg_by_cam:
|
|
|
await asyncio.sleep(loop_tick)
|
|
|
continue
|
|
|
|
|
|
now = time.perf_counter()
|
|
|
dead: set[Any] = set()
|
|
|
|
|
|
for ws in list(state.video_clients):
|
|
|
st = client_state.setdefault(
|
|
|
ws, {"last_fid": {}, "min_interval": min_interval_fast, "last_sent": 0.0}
|
|
|
)
|
|
|
if (now - st["last_sent"]) < st["min_interval"]:
|
|
|
continue
|
|
|
|
|
|
transport = getattr(ws, "transport", None)
|
|
|
buf_sz = transport.get_write_buffer_size() if transport else 0
|
|
|
if buf_sz > buf_hard:
|
|
|
try:
|
|
|
await ws.close()
|
|
|
except Exception:
|
|
|
pass
|
|
|
dead.add(ws)
|
|
|
continue
|
|
|
|
|
|
pending: List[Tuple[int, int, bytes]] = []
|
|
|
last_fid: Dict[int, int] = st["last_fid"]
|
|
|
for cam_id, (fid, jpeg) in state.latest_jpeg_by_cam.items():
|
|
|
if last_fid.get(cam_id, -1) != fid:
|
|
|
payload = PACKER_HI.pack(int(cam_id), int(fid)) + jpeg
|
|
|
pending.append((cam_id, fid, payload))
|
|
|
|
|
|
if getattr(config, "SEND_DUPLICATES_WHEN_IDLE", False) and not pending:
|
|
|
for cam_id, (fid, jpeg) in state.latest_jpeg_by_cam.items():
|
|
|
payload = PACKER_HI.pack(int(cam_id), int(fid)) + jpeg
|
|
|
pending.append((cam_id, fid, payload))
|
|
|
|
|
|
if not pending:
|
|
|
continue
|
|
|
|
|
|
max_per_tick = 12 if st["min_interval"] <= (1.0 / 30.0) else 6
|
|
|
if len(pending) > max_per_tick:
|
|
|
pending.sort(key=lambda x: x[1], reverse=True)
|
|
|
pending = pending[:max_per_tick]
|
|
|
|
|
|
t0 = time.perf_counter()
|
|
|
try:
|
|
|
send = ws.send
|
|
|
for cam_id, fid, payload in pending:
|
|
|
await send(payload)
|
|
|
last_fid[cam_id] = fid
|
|
|
except Exception:
|
|
|
dead.add(ws)
|
|
|
continue
|
|
|
finally:
|
|
|
st["last_sent"] = time.perf_counter()
|
|
|
|
|
|
send_time = st["last_sent"] - t0
|
|
|
if (send_time > send_slow_sec) or (buf_sz > buf_soft):
|
|
|
st["min_interval"] = min(st["min_interval"] * 1.25, min_interval_slow)
|
|
|
else:
|
|
|
st["min_interval"] = max(st["min_interval"] * 0.90, min_interval_fast)
|
|
|
|
|
|
for d in dead:
|
|
|
state.video_clients.discard(d)
|
|
|
client_state.pop(d, None)
|
|
|
|
|
|
except Exception as exc:
|
|
|
logger.error("video_broadcaster error: %s", exc)
|
|
|
|
|
|
await asyncio.sleep(loop_tick)
|
|
|
|
|
|
# ---------------- CTRL WS ----------------
|
|
|
def _parse_config_camera_str(s: str) -> Tuple[str, str, str]:
|
|
|
main = s.strip().split()[0]
|
|
|
m = re.match(r"^(?P<u>[^:@\s]+):(?P<p>[^@\s]+)@(?P<ip>[^\s]+)$", main)
|
|
|
if not m:
|
|
|
raise ValueError("Bad config format (expected login:password@ip vendor)")
|
|
|
return m.group("u"), m.group("p"), m.group("ip")
|
|
|
|
|
|
def _inmem_add_camera(cfg: dict) -> None:
|
|
|
cam_id = int(cfg["id"])
|
|
|
# базовые поля
|
|
|
rec = {
|
|
|
"ip": cfg["ip"],
|
|
|
"username": cfg["username"],
|
|
|
"password": cfg["password"],
|
|
|
"ptz_channel": int(cfg.get("ptz_channel", 1)),
|
|
|
"wiper_channel": 1,
|
|
|
}
|
|
|
# <<< mac in-memory >>>
|
|
|
if isinstance(cfg.get("mac"), str) and cfg.get("mac"):
|
|
|
rec["mac"] = cfg["mac"]
|
|
|
|
|
|
# для PTZ сразу сохраняем пресеты/направление — калибровке проще
|
|
|
if cfg.get("is_ptz", False):
|
|
|
rec["preset1"] = str(cfg.get("preset1", "1"))
|
|
|
rec["preset2"] = str(cfg.get("preset2", "2"))
|
|
|
rec["sweep_sign"] = int(cfg.get("sweep_sign", 1))
|
|
|
config.CAMERA_CONFIG[cam_id] = rec
|
|
|
|
|
|
if cfg.get("is_ptz", False) and cam_id not in config.PTZ_CAM_IDS:
|
|
|
config.PTZ_CAM_IDS.append(cam_id)
|
|
|
|
|
|
def _find_ptz_for_same_ip(cam_id: int) -> Optional[int]:
|
|
|
cfg = config.CAMERA_CONFIG.get(cam_id)
|
|
|
if not cfg:
|
|
|
return None
|
|
|
ip = cfg.get("ip")
|
|
|
if not ip:
|
|
|
return None
|
|
|
for pid in config.PTZ_CAM_IDS:
|
|
|
c = config.CAMERA_CONFIG.get(pid)
|
|
|
if c and c.get("ip") == ip:
|
|
|
return pid
|
|
|
for cid, c in config.CAMERA_CONFIG.items():
|
|
|
if c.get("ip") == ip and int(c.get("ptz_channel", 0)) == 1:
|
|
|
return cid
|
|
|
return None
|
|
|
|
|
|
def _token_to_cam_id(token: int) -> Optional[int]:
|
|
|
if token in config.CAMERA_CONFIG:
|
|
|
return token
|
|
|
mapping = getattr(state, "index_to_cam_id", None)
|
|
|
if isinstance(mapping, dict) and token in mapping:
|
|
|
return mapping[token]
|
|
|
ids = list(sorted(config.CAMERA_CONFIG.keys()))
|
|
|
if 1 <= token <= len(ids):
|
|
|
return ids[token - 1]
|
|
|
if 0 <= token < len(ids):
|
|
|
return ids[token]
|
|
|
return None
|
|
|
|
|
|
async def handle_vue_control(ws, path=None) -> None:
|
|
|
import json, asyncio
|
|
|
logger = logging.getLogger("PTZTracker.CTRL")
|
|
|
|
|
|
async def _direct_wiper_resolved_cam(cam_id: int, sec: float):
|
|
|
import requests
|
|
|
from requests.auth import HTTPDigestAuth
|
|
|
cfg = config.CAMERA_CONFIG[cam_id]
|
|
|
scheme = cfg.get("scheme") or ("https" if cfg.get("https") else "http")
|
|
|
host = cfg["ip"]
|
|
|
port = int(cfg.get("port") or (443 if scheme == "https" else 80))
|
|
|
ch = int(cfg.get("ptz_channel", 1))
|
|
|
user = cfg.get("username") or cfg.get("user") or "admin"
|
|
|
pwd = cfg.get("password") or cfg.get("pass") or ""
|
|
|
verify = bool(cfg.get("verify_tls", False))
|
|
|
timeout = int(cfg.get("ptz_timeout_sec", 6))
|
|
|
base = f"{scheme}://{host}:{port}"
|
|
|
url = f"{base}/ISAPI/PTZCtrl/channels/{ch}/manualWiper"
|
|
|
auth = HTTPDigestAuth(user, pwd)
|
|
|
def _do():
|
|
|
s = requests.Session(); s.trust_env = False
|
|
|
r = s.put(url, auth=auth, timeout=timeout, verify=verify)
|
|
|
body = (r.text or "")[:300]
|
|
|
ok = 200 <= r.status_code < 300
|
|
|
return ok, "manualWiper", r.status_code, body, cam_id
|
|
|
return await asyncio.get_running_loop().run_in_executor(None, _do)
|
|
|
|
|
|
_wiper_fn = None
|
|
|
try:
|
|
|
from .wiper import trigger_wiper_once as _wiper_fn
|
|
|
logger.info("[CTRL] wiper module loaded")
|
|
|
except Exception as e:
|
|
|
logger.warning("[CTRL] no wiper module, using direct fallback: %s", e)
|
|
|
|
|
|
def _resolve_cam_for_wiper(token: int) -> Optional[int]:
|
|
|
real_id = _token_to_cam_id(token)
|
|
|
if real_id is None:
|
|
|
return None
|
|
|
if real_id in config.PTZ_CAM_IDS:
|
|
|
return real_id
|
|
|
return _find_ptz_for_same_ip(real_id)
|
|
|
|
|
|
addr = getattr(ws, "remote_address", None)
|
|
|
logger.info("[CTRL] client connected: %s path=%s", addr, path)
|
|
|
state.vue_control_clients.add(ws)
|
|
|
|
|
|
try:
|
|
|
streams = getattr(state, "detected_cameras", None) or getattr(state, "streams", None) or getattr(state, "stream_indices", None) or []
|
|
|
if isinstance(streams, dict): streams = list(streams.keys())
|
|
|
elif isinstance(streams, set): streams = sorted(streams)
|
|
|
else: streams = list(streams)
|
|
|
init_msg = {"type": "init", "data": [len(streams)] + streams}
|
|
|
try:
|
|
|
await ws.send(json_dumps(init_msg))
|
|
|
except Exception:
|
|
|
logger.exception("[CTRL] init send failed")
|
|
|
|
|
|
async for raw in ws:
|
|
|
try:
|
|
|
data = json.loads(raw)
|
|
|
except Exception:
|
|
|
logger.warning("[CTRL] bad json: %r", raw)
|
|
|
continue
|
|
|
|
|
|
mtype = data.get("type")
|
|
|
payload = data.get("data") or {}
|
|
|
|
|
|
if mtype == "policy":
|
|
|
auto = data.get("auto")
|
|
|
config.AUTO_STRATEGY = bool(auto)
|
|
|
await ws.send(json_dumps({"type": "policy_ack", "auto": config.AUTO_STRATEGY}))
|
|
|
continue
|
|
|
|
|
|
if mtype == "numberCamera":
|
|
|
try:
|
|
|
cnum = payload.get("canvasId")
|
|
|
if isinstance(cnum, str): cnum = int(cnum.strip())
|
|
|
elif isinstance(cnum, float): cnum = int(cnum)
|
|
|
await ws.send(json_dumps({"type": "numberCamera_ack", "ok": True, "canvasId": cnum}))
|
|
|
except Exception as e:
|
|
|
await ws.send(json_dumps({"type": "numberCamera_ack", "ok": False, "error": str(e)}))
|
|
|
continue
|
|
|
|
|
|
if mtype == "configCamera":
|
|
|
try:
|
|
|
cfg_str = str(payload.get("config", "")).strip()
|
|
|
is_ptz = bool(payload.get("ptz", False))
|
|
|
# <<< MAC из клиента >>>
|
|
|
mac_raw = (payload.get("mac") or "").strip() if isinstance(payload.get("mac"), str) else ""
|
|
|
user, pwd, ip = _parse_config_camera_str(cfg_str)
|
|
|
|
|
|
path = _cameras_toml_path()
|
|
|
data_toml = _toml_load(path)
|
|
|
current = data_toml.get("camera", [])
|
|
|
used_ids = [int(c.get("id")) for c in current if isinstance(c, dict) and "id" in c]
|
|
|
new_id = _next_free_id(used_ids)
|
|
|
ptz_ch = 1 if is_ptz else 2
|
|
|
|
|
|
cam_rec = {
|
|
|
"id": new_id,
|
|
|
"ip": ip,
|
|
|
"username": user,
|
|
|
"password": pwd,
|
|
|
"ptz_channel": ptz_ch,
|
|
|
"is_ptz": is_ptz,
|
|
|
}
|
|
|
|
|
|
# нормализуем и сохраняем MAC, если передан
|
|
|
if mac_raw:
|
|
|
try:
|
|
|
cam_rec["mac"] = alarms._norm_mac(mac_raw)
|
|
|
except Exception:
|
|
|
await ws.send(json_dumps({"type": "configCamera_ack", "ok": False, "error": "bad_mac"}))
|
|
|
continue
|
|
|
|
|
|
_write_or_update_camera(path, cam_rec)
|
|
|
logger.info("[CFG] appended camera to %s: %s", path, cam_rec)
|
|
|
|
|
|
_inmem_add_camera(cam_rec)
|
|
|
ensure_runtime_state_for_camera(new_id)
|
|
|
|
|
|
# Не запускаем патруль напрямую — делаем горячую калибровку в фоне
|
|
|
if is_ptz:
|
|
|
asyncio.create_task(_hot_setup_ptz(new_id))
|
|
|
|
|
|
await _broadcast_init_to_vue()
|
|
|
await ws.send(json_dumps({
|
|
|
"type": "configCamera_ack", "ok": True, "id": new_id, "mac": cam_rec.get("mac")
|
|
|
}))
|
|
|
except Exception as e:
|
|
|
logger.exception("[CTRL] configCamera error: %s", e)
|
|
|
try:
|
|
|
await ws.send(json_dumps({"type": "configCamera_ack", "ok": False, "error": str(e)}))
|
|
|
except Exception:
|
|
|
pass
|
|
|
continue
|
|
|
|
|
|
if mtype == "deleteCamera":
|
|
|
try:
|
|
|
token = payload.get("canvasId")
|
|
|
if token is None:
|
|
|
token = payload.get("CameraID")
|
|
|
if token is None:
|
|
|
raise ValueError("canvasId is required")
|
|
|
token_int = int(token) if not isinstance(token, int) else token
|
|
|
|
|
|
real_id = _token_to_cam_id(token_int)
|
|
|
if real_id is None or real_id not in config.CAMERA_CONFIG:
|
|
|
await ws.send(json_dumps({"type": "deleteCamera_ack", "ok": False, "error": "unknown_canvasId"}))
|
|
|
continue
|
|
|
|
|
|
logger.info("[DELETE] canvasId=%s -> cam_id=%s", token_int, real_id)
|
|
|
|
|
|
# останов патруля/таймеров/PTZ
|
|
|
from .ptz_io import stop_ptz # безопасно
|
|
|
try:
|
|
|
stop_ptz(real_id)
|
|
|
except Exception:
|
|
|
pass
|
|
|
st = state.ptz_states.get(real_id)
|
|
|
if st:
|
|
|
st["patrol_active"] = False
|
|
|
st["endpoint_latch"] = 0
|
|
|
st["mode"] = "IDLE"
|
|
|
for t in ("zoom_reset_timer", "preset_timer"):
|
|
|
try:
|
|
|
if st.get(t):
|
|
|
st[t].cancel()
|
|
|
except Exception:
|
|
|
pass
|
|
|
try:
|
|
|
from . import ptz_io as _ptz_mod
|
|
|
if hasattr(_ptz_mod, "PTZ_AUTH"):
|
|
|
_ptz_mod.PTZ_AUTH.pop(real_id, None)
|
|
|
except Exception:
|
|
|
pass
|
|
|
|
|
|
# вычищаем рантайм и конфиг (in-memory)
|
|
|
was_ptz = (real_id in config.PTZ_CAM_IDS)
|
|
|
config.CAMERA_CONFIG.pop(real_id, None)
|
|
|
if was_ptz:
|
|
|
try:
|
|
|
config.PTZ_CAM_IDS.remove(real_id)
|
|
|
except ValueError:
|
|
|
pass
|
|
|
state.ptz_states.pop(real_id, None)
|
|
|
state.frame_id_by_cam.pop(real_id, None)
|
|
|
state.latest_jpeg_by_cam.pop(real_id, None)
|
|
|
state.detected_cameras.discard(real_id)
|
|
|
|
|
|
# переписываем cameras.toml из текущего in-memory (с расшир. полями)
|
|
|
path = _cameras_toml_path()
|
|
|
cams_left: List[dict] = []
|
|
|
for cid in sorted(config.CAMERA_CONFIG.keys()):
|
|
|
cfg = config.CAMERA_CONFIG[cid]
|
|
|
rec = {
|
|
|
"id": cid,
|
|
|
"ip": cfg["ip"],
|
|
|
"username": cfg["username"],
|
|
|
"password": cfg["password"],
|
|
|
"ptz_channel": int(cfg.get("ptz_channel", 1)),
|
|
|
"is_ptz": (cid in config.PTZ_CAM_IDS),
|
|
|
}
|
|
|
# переносим MAC, если есть
|
|
|
if isinstance(cfg.get("mac"), str) and cfg.get("mac"):
|
|
|
rec["mac"] = cfg["mac"]
|
|
|
if rec["is_ptz"]:
|
|
|
rec["preset1"] = str(cfg.get("preset1", "1"))
|
|
|
rec["preset2"] = str(cfg.get("preset2", "2"))
|
|
|
rec["sweep_sign"] = int(cfg.get("sweep_sign", 1))
|
|
|
|
|
|
# скопируем любые расширенные поля, если есть
|
|
|
for k in (
|
|
|
"north_offset_deg","bullet_hfov_deg","hfov_deg",
|
|
|
"preset1_deg","preset2_deg","preset1_tilt_deg","preset2_tilt_deg",
|
|
|
"sector_min_deg","sector_max_deg","pan_offset_deg","tilt_offset_deg",
|
|
|
"pan_sign","tilt_sign","port","port_http","port_https",
|
|
|
"ptz_timeout_sec","https","verify_tls","scheme","focus_override",
|
|
|
):
|
|
|
if k in cfg:
|
|
|
rec[k] = cfg[k]
|
|
|
|
|
|
cams_left.append(rec)
|
|
|
|
|
|
_rewrite_all_cameras(path, cams_left)
|
|
|
logger.info("[DELETE] cameras.toml rewritten at %s (left=%d)", path, len(cams_left))
|
|
|
|
|
|
_reload_config_best_effort()
|
|
|
await _broadcast_init_to_vue()
|
|
|
await ws.send(json_dumps({"type": "deleteCamera_ack", "ok": True, "id": real_id}))
|
|
|
except Exception as e:
|
|
|
logger.exception("[CTRL] deleteCamera error: %s", e)
|
|
|
try:
|
|
|
await ws.send(json_dumps({"type": "deleteCamera_ack", "ok": False, "error": str(e)}))
|
|
|
except Exception:
|
|
|
pass
|
|
|
continue
|
|
|
|
|
|
if mtype == "wiper":
|
|
|
try:
|
|
|
cam_token_raw = payload.get("cam")
|
|
|
sec = float(payload.get("sec", 3))
|
|
|
cam_token = int(cam_token_raw) if not isinstance(cam_token_raw, int) else cam_token_raw
|
|
|
logger.info("[CTRL] WIPER <- cam_token=%s sec=%.2f", cam_token, sec)
|
|
|
|
|
|
cam_id = _resolve_cam_for_wiper(cam_token)
|
|
|
if cam_id is None:
|
|
|
ack = {"type": "wiper_ack", "ok": False, "cam": cam_token, "error": "no_ptz_for_camera"}
|
|
|
await ws.send(json_dumps(ack))
|
|
|
continue
|
|
|
|
|
|
if _wiper_fn is not None:
|
|
|
ok, endpoint, http_status, detail, _cam_back = await _wiper_fn(cam_id, sec)
|
|
|
else:
|
|
|
ok, endpoint, http_status, detail, _cam_back = await _direct_wiper_resolved_cam(cam_id, sec)
|
|
|
|
|
|
ack = {
|
|
|
"type": "wiper_ack",
|
|
|
"ok": bool(ok),
|
|
|
"cam": cam_token,
|
|
|
"cam_id": cam_id,
|
|
|
"sec": int(round(sec)),
|
|
|
"endpoint": endpoint,
|
|
|
"status": http_status,
|
|
|
"detail": (detail[:200] if isinstance(detail, str) else detail),
|
|
|
}
|
|
|
logger.info("[CTRL] WIPER -> %s", ack)
|
|
|
await ws.send(json_dumps(ack))
|
|
|
except Exception as e:
|
|
|
logger.exception("[CTRL] WIPER error: %s", e)
|
|
|
try:
|
|
|
await ws.send(json_dumps({"type": "wiper_ack", "ok": False, "error": str(e)}))
|
|
|
except Exception:
|
|
|
pass
|
|
|
continue
|
|
|
|
|
|
except Exception as e:
|
|
|
logger.warning("[CTRL] ws loop error: %s", e)
|
|
|
finally:
|
|
|
state.vue_control_clients.discard(ws)
|
|
|
logger.info("[CTRL] client disconnected: %s", addr)
|
|
|
|
|
|
# ------------- Микро-батчер инференса -------------
|
|
|
class MicroBatcher:
|
|
|
__slots__ = ("max_batch","max_wait","_buf","_cond","_task","adaptive","bmin","bmax","target_ms","ema_ms")
|
|
|
|
|
|
def __init__(
|
|
|
self,
|
|
|
max_batch: int = 12,
|
|
|
max_wait_ms: float = 2.0,
|
|
|
adaptive: bool = config.ADAPTIVE_BATCH_ENABLE,
|
|
|
bmin: int = config.BATCH_MIN,
|
|
|
bmax: int = config.BATCH_MAX,
|
|
|
target_ms: float = config.BATCH_TARGET_MS,
|
|
|
) -> None:
|
|
|
self.max_batch = max(1, int(max_batch))
|
|
|
self._buf: List[Tuple[int, np.ndarray, bool, asyncio.Future]] = []
|
|
|
self._cond = asyncio.Condition()
|
|
|
self.max_wait = max_wait_ms / 1000.0
|
|
|
self._task = asyncio.create_task(self._loop())
|
|
|
self.adaptive = adaptive
|
|
|
self.bmin = max(1, int(bmin))
|
|
|
self.bmax = max(self.bmin, int(bmax))
|
|
|
self.target_ms = float(target_ms)
|
|
|
self.ema_ms: float | None = None
|
|
|
|
|
|
async def submit(self, cam_idx: int, frame_bgr: np.ndarray, is_ptz: bool) -> None:
|
|
|
loop = asyncio.get_running_loop()
|
|
|
fut: asyncio.Future = loop.create_future()
|
|
|
async with self._cond:
|
|
|
self._buf.append((cam_idx, frame_bgr, is_ptz, fut))
|
|
|
self._cond.notify()
|
|
|
return await fut
|
|
|
|
|
|
async def _loop(self) -> None:
|
|
|
run = yolo_forward
|
|
|
while True:
|
|
|
async with self._cond:
|
|
|
while not self._buf:
|
|
|
await self._cond.wait()
|
|
|
|
|
|
start_wait = time.perf_counter()
|
|
|
while len(self._buf) < self.max_batch:
|
|
|
remaining = self.max_wait - (time.perf_counter() - start_wait)
|
|
|
if remaining <= 0:
|
|
|
break
|
|
|
try:
|
|
|
await asyncio.wait_for(self._cond.wait(), timeout=remaining)
|
|
|
except asyncio.TimeoutError:
|
|
|
break
|
|
|
|
|
|
batch = self._buf[: self.max_batch]
|
|
|
del self._buf[: self.max_batch]
|
|
|
|
|
|
try:
|
|
|
frames = [b[1] for b in batch]
|
|
|
loop = asyncio.get_running_loop()
|
|
|
|
|
|
infer_t0 = time.perf_counter()
|
|
|
results = await loop.run_in_executor(None, run, frames)
|
|
|
infer_ms = (time.perf_counter() - infer_t0) * 1000.0
|
|
|
self.ema_ms = infer_ms if self.ema_ms is None else (0.8 * self.ema_ms + 0.2 * infer_ms)
|
|
|
|
|
|
if self.adaptive:
|
|
|
if self.ema_ms < self.target_ms and len(self._buf) > 0 and self.max_batch < self.bmax:
|
|
|
self.max_batch += 1
|
|
|
elif self.ema_ms > self.target_ms * config.BATCH_DOWN_SCALE and self.max_batch > self.bmin:
|
|
|
self.max_batch -= 1
|
|
|
|
|
|
post_out = await asyncio.gather(
|
|
|
*[
|
|
|
loop.run_in_executor(
|
|
|
None, postprocess_control, cam_idx, frame_bgr, res, is_ptz
|
|
|
)
|
|
|
for (cam_idx, frame_bgr, is_ptz, _), res in zip(batch, results)
|
|
|
],
|
|
|
return_exceptions=True,
|
|
|
)
|
|
|
|
|
|
for (_, _, _, fut), po in zip(batch, post_out):
|
|
|
if isinstance(po, Exception):
|
|
|
logger.error("postprocess error: %s", po)
|
|
|
if not fut.done():
|
|
|
fut.set_result(None)
|
|
|
|
|
|
except Exception as exc:
|
|
|
logger.error("batch forward error: %s", exc)
|
|
|
for _, _, _, fut in batch:
|
|
|
if not fut.done():
|
|
|
fut.set_result(None)
|
|
|
|
|
|
# ------------- Поток кадров от C++ -------------
|
|
|
async def cpp_ws_loop() -> None:
|
|
|
frames_total = 0
|
|
|
skipped_total = 0
|
|
|
last_log = time.time()
|
|
|
|
|
|
while True:
|
|
|
try:
|
|
|
logger.info("[CPP WS] connecting to %s ...", config.CPP_WS_URI)
|
|
|
async with websockets.connect(
|
|
|
config.CPP_WS_URI,
|
|
|
max_size=None,
|
|
|
compression=None,
|
|
|
ping_interval=None,
|
|
|
close_timeout=1.0,
|
|
|
) as ws:
|
|
|
logger.info("[CPP WS] connected]")
|
|
|
async for raw in ws:
|
|
|
if isinstance(raw, (bytes, bytearray)):
|
|
|
buf = memoryview(raw)
|
|
|
if len(buf) < 3 or buf[0] != 1:
|
|
|
continue
|
|
|
idx = int.from_bytes(buf[1:3], "big")
|
|
|
known_before = idx in config.CAMERA_CONFIG
|
|
|
if not known_before:
|
|
|
maybe_reload_cameras_when_unknown(idx)
|
|
|
arr = np.frombuffer(buf[3:], dtype=np.uint8)
|
|
|
img = cv2.imdecode(arr, cv2.IMREAD_COLOR)
|
|
|
if img is None:
|
|
|
continue
|
|
|
else:
|
|
|
try:
|
|
|
import json as _json
|
|
|
msg = _json.loads(raw)
|
|
|
except Exception:
|
|
|
continue
|
|
|
if msg.get("type") != "image":
|
|
|
continue
|
|
|
idx_str, b64 = msg["data"].split("|", 1)
|
|
|
idx = int(idx_str)
|
|
|
known_before = idx in config.CAMERA_CONFIG
|
|
|
if not known_before:
|
|
|
maybe_reload_cameras_when_unknown(idx)
|
|
|
arr = np.frombuffer(base64.b64decode(b64), dtype=np.uint8)
|
|
|
img = cv2.imdecode(arr, cv2.IMREAD_COLOR)
|
|
|
if img is None:
|
|
|
continue
|
|
|
|
|
|
frames_total += 1
|
|
|
h, w = img.shape[:2]
|
|
|
|
|
|
now = time.time()
|
|
|
if now - last_log >= 1.0:
|
|
|
logger.info(
|
|
|
"[CPP WS] recv: %d fps, skipped=%d | last cam=%s | %dx%d | clients=%d",
|
|
|
frames_total, skipped_total, idx, w, h, len(state.video_clients),
|
|
|
)
|
|
|
frames_total = 0
|
|
|
skipped_total = 0
|
|
|
last_log = now
|
|
|
|
|
|
first_seen = idx not in state.detected_cameras
|
|
|
state.detected_cameras.add(idx)
|
|
|
|
|
|
ensure_runtime_state_for_camera(idx)
|
|
|
|
|
|
if first_seen or (not known_before and idx in config.CAMERA_CONFIG):
|
|
|
try:
|
|
|
await _broadcast_init_to_vue()
|
|
|
except Exception as e:
|
|
|
logger.warning("[CPP WS] broadcast init failed: %s", e)
|
|
|
|
|
|
busy = state.ptz_states[idx]["proc_busy"]
|
|
|
no_clients = getattr(config, "PUBLISH_ONLY_IF_CLIENTS", False) and not state.video_clients
|
|
|
if getattr(config, "DROP_DECODE_WHEN_BUSY", False) and busy and no_clients:
|
|
|
skipped_total += 1
|
|
|
continue
|
|
|
|
|
|
if (
|
|
|
getattr(config, "PUBLISH_RAW_BEFORE_INFER", False)
|
|
|
and (not getattr(config, "PUBLISH_ONLY_IF_CLIENTS", False) or state.video_clients)
|
|
|
):
|
|
|
down, _, _ = maybe_downscale(img, getattr(config, "PREVIEW_TARGET_W", 640))
|
|
|
publish_preview(idx, down)
|
|
|
|
|
|
pst = state.ptz_states[idx]
|
|
|
if pst["proc_busy"]:
|
|
|
continue
|
|
|
pst["proc_busy"] = True
|
|
|
|
|
|
async def process_frame(cam_idx: int, frame) -> None:
|
|
|
try:
|
|
|
await state.batcher.submit(
|
|
|
cam_idx,
|
|
|
frame,
|
|
|
cam_idx in config.PTZ_CAM_IDS, # type: ignore[arg-type]
|
|
|
)
|
|
|
except Exception as exc:
|
|
|
logger.error("Error processing frame for camera %s: %s", cam_idx, exc)
|
|
|
finally:
|
|
|
state.ptz_states[cam_idx]["proc_busy"] = False
|
|
|
|
|
|
asyncio.create_task(process_frame(idx, img))
|
|
|
|
|
|
except Exception as exc:
|
|
|
logger.error("[CPP WS] error: %s", exc)
|
|
|
await asyncio.sleep(0.5)
|
|
|
|
|
|
# ------------- Управляющий канал C++ (детекции) -------------
|
|
|
async def cpp_detection_loop() -> None:
|
|
|
assert state.detection_queue is not None
|
|
|
RESYNC_EVERY_SEC = 10.0
|
|
|
|
|
|
def _snapshot_states() -> List[Dict[str, Any]]:
|
|
|
snap: List[Dict[str, Any]] = []
|
|
|
for cid in config.CAMERA_CONFIG.keys():
|
|
|
st = state.ptz_states.get(cid, {})
|
|
|
snap.append({"IdCamera": int(cid), "detection": bool(st.get("rec_active", False))})
|
|
|
return snap
|
|
|
|
|
|
while True:
|
|
|
try:
|
|
|
logger.info("[CPP CTRL] connecting to %s ...", config.CPP_CTRL_WS_URI)
|
|
|
async with websockets.connect(
|
|
|
config.CPP_CTRL_WS_URI,
|
|
|
max_size=None,
|
|
|
compression=None,
|
|
|
ping_interval=None,
|
|
|
close_timeout=1.0,
|
|
|
) as ws:
|
|
|
logger.info("[CPP CTRL] connected")
|
|
|
|
|
|
try:
|
|
|
for item in _snapshot_states():
|
|
|
await ws.send(json_dumps({"type": "detection", "data": item}))
|
|
|
except Exception as e:
|
|
|
logger.error("[CPP CTRL] initial resync failed: %s", e)
|
|
|
raise
|
|
|
|
|
|
last_resync = time.time()
|
|
|
while True:
|
|
|
now = time.time()
|
|
|
|
|
|
if now - last_resync >= RESYNC_EVERY_SEC and state.detection_queue.empty():
|
|
|
try:
|
|
|
for item in _snapshot_states():
|
|
|
await ws.send(json_dumps({"type": "detection", "data": item}))
|
|
|
except Exception as e:
|
|
|
logger.error("[CPP CTRL] periodic resync failed: %s", e)
|
|
|
break
|
|
|
last_resync = now
|
|
|
|
|
|
try:
|
|
|
payload = await asyncio.wait_for(state.detection_queue.get(), timeout=1.0)
|
|
|
except asyncio.TimeoutError:
|
|
|
continue
|
|
|
|
|
|
try:
|
|
|
await ws.send(json_dumps(payload))
|
|
|
last_resync = time.time()
|
|
|
except Exception as exc:
|
|
|
logger.error("[CPP CTRL] send failed: %s", exc)
|
|
|
try:
|
|
|
state.detection_queue.put_nowait(payload)
|
|
|
except Exception:
|
|
|
pass
|
|
|
break
|
|
|
finally:
|
|
|
state.detection_queue.task_done()
|
|
|
|
|
|
except Exception as exc:
|
|
|
logger.error("[CPP CTRL] error: %s", exc)
|
|
|
|
|
|
await asyncio.sleep(1.0)
|
|
|
|
|
|
# ------------- Старт WS-серверов -------------
|
|
|
async def build_servers():
|
|
|
ctrl_server = await serve(
|
|
|
handle_vue_control,
|
|
|
"0.0.0.0",
|
|
|
VUE_CONTROL_WS_PORT,
|
|
|
max_size=None,
|
|
|
ping_interval=None,
|
|
|
compression=None,
|
|
|
)
|
|
|
video_server = await serve(
|
|
|
video_ws_handler,
|
|
|
"0.0.0.0",
|
|
|
VUE_VIDEO_WS_PORT,
|
|
|
max_size=None,
|
|
|
ping_interval=None,
|
|
|
compression=None,
|
|
|
)
|
|
|
return ctrl_server, video_server
|