master
arychagov 2 months ago
parent 925c9f9008
commit 28e9690770

@ -2,7 +2,7 @@ import dotenv from 'dotenv';
dotenv.config();
import { InfluxDB, Point } from '@influxdata/influxdb-client';
const url = 'http://localhost:8086';
const url = 'http://my_influxdb:8086';
const token = process.env.TOKEN;
console.log('Influx Token:', token);
const org = 'my-org';

@ -7,8 +7,8 @@ router.post('/data', async (req, res) => {
try {
const { timestamp, num_set, channel, data, block, dev_name } = req.body;
if (!timestamp || !num_set || !channel || !Array.isArray(data) || !dev_name || !block) {
// console.error(req.body);
if (!timestamp || !num_set || !channel || !Array.isArray(data) || !dev_name) {
console.error(req.body);
res.status(400).json({ error: 'Invalid payload' });
return
}
@ -41,7 +41,7 @@ router.post('/error', async (req, res) => {
const { timestamp, num_set, channel, dev_name, sys_error, fiber_error } = req.body;
if (!timestamp || !num_set || !channel || !sys_error || !dev_name || !fiber_error) {
// console.error(req.body);
console.error(req.body);
res.status(400).json({ error: 'Invalid payload' });
return
}

@ -0,0 +1,10 @@
FROM node:22-alpine3.22 AS base
WORKDIR /app
COPY ./ToreksBackend/package.json ./ToreksBackend/yarn.lock ./
RUN yarn install
COPY ToreksBackend ./
FROM base as prod
WORKDIR /app
RUN yarn build

@ -16,14 +16,35 @@ services:
- DOCKER_INFLUXDB_INIT_BUCKET=my-bucket
- DOCKER_INFLUXDB_INIT_TOKEN=my-super-token
restart: unless-stopped
networks:
- app-network
grafana:
image: grafana/grafana:latest
container_name: grafana
backend-prod:
container_name: backend-prod
restart: always
env_file:
- ToreksBackend/.env
build:
context: .
target: prod
dockerfile: backend.Dockerfile
ports:
- "3005:3000"
volumes:
- grafana_data:/var/lib/grafana
- "3000:3000"
networks:
- app-network
command: yarn start
# grafana:
# image: grafana/grafana:latest
# container_name: grafana
# ports:
# - "3005:3000"
# volumes:
# - grafana_data:/var/lib/grafana
networks:
app-network:
driver: bridge
volumes:
influx_data:

Loading…
Cancel
Save