Browse Source

Add API routes for athlete data

main
Haris Razis 4 years ago
parent
commit
45ffb8bca5
  1. 2
      client/services/socket.js
  2. 2
      docker-compose.yml
  3. 157
      influxdb.conf
  4. 13
      server/actions/influx_actions.js
  5. 24
      server/routes/data.js
  6. 7
      server/services/socket.js

2
client/services/socket.js

@ -30,6 +30,6 @@ socket.on('closeConn', () => {
}); });
setInterval(() => { setInterval(() => {
socket.emit('data', {measurement: 123, pointName: 'hey-ho'}); socket.emit('data', {measurement: 5, mac, pointName: 'leg-measurement'});
}, 3 * 1000); }, 3 * 1000);

2
docker-compose.yml

@ -10,6 +10,8 @@ services:
container_name: "influx" container_name: "influx"
environment: environment:
- INFLUXDB_DB=db0 - INFLUXDB_DB=db0
volumes:
- ./influxdb.conf:/etc/influxdb/influxdb.conf
networks: networks:
- backend - backend
- monitor - monitor

157
influxdb.conf

@ -0,0 +1,157 @@
reporting-disabled = false
bind-address = "127.0.0.1:8088"
[meta]
dir = "/var/lib/influxdb/meta"
retention-autocreate = true
logging-enabled = true
[data]
dir = "/var/lib/influxdb/data"
index-version = "inmem"
wal-dir = "/var/lib/influxdb/wal"
wal-fsync-delay = "0s"
validate-keys = false
query-log-enabled = true
cache-max-memory-size = 1073741824
cache-snapshot-memory-size = 26214400
cache-snapshot-write-cold-duration = "10m0s"
compact-full-write-cold-duration = "4h0m0s"
compact-throughput = 50331648
compact-throughput-burst = 50331648
max-series-per-database = 1000000
max-values-per-tag = 100000
max-concurrent-compactions = 0
max-index-log-file-size = 1048576
series-id-set-cache-size = 100
series-file-max-concurrent-snapshot-compactions = 0
trace-logging-enabled = false
tsm-use-madv-willneed = false
[coordinator]
write-timeout = "10s"
max-concurrent-queries = 0
query-timeout = "0s"
log-queries-after = "0s"
max-select-point = 0
max-select-series = 0
max-select-buckets = 0
[retention]
enabled = true
check-interval = "30m0s"
[shard-precreation]
enabled = true
check-interval = "10m0s"
advance-period = "30m0s"
[monitor]
store-enabled = true
store-database = "_internal"
store-interval = "10s"
[subscriber]
enabled = true
http-timeout = "30s"
insecure-skip-verify = false
ca-certs = ""
write-concurrency = 40
write-buffer-size = 1000
[http]
enabled = true
bind-address = ":8086"
auth-enabled = false
log-enabled = true
suppress-write-log = false
write-tracing = false
flux-enabled = true
flux-log-enabled = true
pprof-enabled = true
pprof-auth-enabled = false
debug-pprof-enabled = false
ping-auth-enabled = false
prom-read-auth-enabled = false
https-enabled = false
https-certificate = "/etc/ssl/influxdb.pem"
https-private-key = ""
max-row-limit = 0
max-connection-limit = 0
shared-secret = ""
realm = "InfluxDB"
unix-socket-enabled = false
unix-socket-permissions = "0777"
bind-socket = "/var/run/influxdb.sock"
max-body-size = 25000000
access-log-path = ""
max-concurrent-write-limit = 0
max-enqueued-write-limit = 0
enqueued-write-timeout = 30000000000
[logging]
format = "auto"
level = "info"
suppress-logo = false
[[graphite]]
enabled = false
bind-address = ":2003"
database = "graphite"
retention-policy = ""
protocol = "tcp"
batch-size = 5000
batch-pending = 10
batch-timeout = "1s"
consistency-level = "one"
separator = "."
udp-read-buffer = 0
[[collectd]]
enabled = false
bind-address = ":25826"
database = "collectd"
retention-policy = ""
batch-size = 5000
batch-pending = 10
batch-timeout = "10s"
read-buffer = 0
typesdb = "/usr/share/collectd/types.db"
security-level = "none"
auth-file = "/etc/collectd/auth_file"
parse-multivalue-plugin = "split"
[[opentsdb]]
enabled = false
bind-address = ":4242"
database = "opentsdb"
retention-policy = ""
consistency-level = "one"
tls-enabled = false
certificate = "/etc/ssl/influxdb.pem"
batch-size = 1000
batch-pending = 5
batch-timeout = "1s"
log-point-errors = true
[[udp]]
enabled = false
bind-address = ":8089"
database = "udp"
retention-policy = ""
batch-size = 5000
batch-pending = 10
read-buffer = 0
batch-timeout = "1s"
precision = ""
[continuous_queries]
log-enabled = true
enabled = true
query-stats-enabled = false
run-interval = "1s"
[tls]
min-version = ""
max-version = ""

13
server/actions/influx_actions.js

@ -2,7 +2,6 @@ const {Point} = require('@influxdata/influxdb-client');
const chalk = require('chalk') const chalk = require('chalk')
const {writeApi, queryApi} = require('../connections/influx_conn') const {writeApi, queryApi} = require('../connections/influx_conn')
const {bucket} = require('../config/keys')
iWrite = (pointName, uuid, measurement) => { iWrite = (pointName, uuid, measurement) => {
const point = new Point(pointName) const point = new Point(pointName)
@ -25,17 +24,15 @@ closeWrite = () => {
}); });
} }
iPoint = (timeFrame, filter) => { iQuery = (query) => {
const query = `from(bucket: "${bucket}") |> range(start: -${timeFrame}) |> group(columns: ["client"])
|> filter(fn: (r) => r._measurement == "${filter}")`;
return queryApi return queryApi
.collectRows(query) .collectRows(query)
.then(async (result) => { .then((result) => {
return result;
}) })
.catch(() => { .catch((err) => {
return [{Error: 'Error occurred'}]; return [{Error: 'Error occurred'}];
}); });
} }
module.exports = {iWrite, closeWrite, iPoint} module.exports = {iWrite, closeWrite, iQuery}

24
server/routes/data.js

@ -1,9 +1,27 @@
const express = require('express') const express = require('express')
const router = express.Router(); const router = express.Router();
const mongoose = require('mongoose');
const {requireAuth} = require('../middlewares/middleware'); const {requireAuth} = require('../middlewares/middleware');
router.get('/yoda', requireAuth, (req, res) => { const Athlete = mongoose.model('Athlete');
res.send('Become powerful you have, the dark side in you I sense. Yrsssss.'); const {influx_bucket} = require('../config/keys')
}) const {iQuery} = require('../actions/influx_actions')
router.get('/api/data', requireAuth, async (req, res) => {
const query = `from(bucket: "${influx_bucket}") |> range(start: -1h)`;
const data = await iQuery(query);
res.send(data);
});
router.get('/api/data/:id', requireAuth, async (req, res) => {
const athlete = await Athlete.findById(req.params.id);
const query = `from(bucket: "${influx_bucket}") |> range(start: -1h) |> filter(fn: (r) => r.client == "${athlete.id}")`;
const data = await iQuery(query);
res.send(data)
});
module.exports = router; module.exports = router;

7
server/services/socket.js

@ -3,9 +3,8 @@ const redisAdapter = require('socket.io-redis');
const mongoose = require('mongoose'); const mongoose = require('mongoose');
const chalk = require('chalk'); const chalk = require('chalk');
const {pub, sub} = require('../connections/redis_conn')
const {saveAthlete} = require('../actions/mongo_actions') const {saveAthlete} = require('../actions/mongo_actions')
const {iWrite, closeWrite, iQuery} = require('../actions/influx_actions') const {iWrite} = require('../actions/influx_actions')
const Athlete = mongoose.model('Athlete'); const Athlete = mongoose.model('Athlete');
module.exports = (server) => { module.exports = (server) => {
@ -38,10 +37,10 @@ module.exports = (server) => {
}); });
socket.on('data', (data) => { socket.on('data', (data) => {
const {measurement, pointName} = data; const {measurement, pointName, mac} = data;
io.emit('console', {measurement}) io.emit('console', {measurement})
iWrite(pointName, socket.id, measurement) iWrite(pointName, mac, measurement)
}); });
}); });

Loading…
Cancel
Save