1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
|
#!/usr/bin/env python3
from apscheduler.schedulers.background import BlockingScheduler
from usv_driver import Usv
from influxdb import InfluxDBClient
import sys
import argparse
import logging
INTERVAL=14 # execution takes about 6s
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument('--host', help='Database host', default='localhost')
parser.add_argument('--port', help='Database port', default='8086')
parser.add_argument('--db', help='Database name', default='test')
parser.add_argument('--serial', help='Serial port', default='/dev/ttyU0')
parser.add_argument('--baud', help='Serial baudrate', default='600')
parser.add_argument('--param', help='Parameter JSON file', default='./usv_param.json')
args = parser.parse_args(sys.argv[1:])
scheduler = BlockingScheduler(timezone="Europe/Berlin")
db_client = InfluxDBClient(host=args.host,
port=args.port,
database=args.db)
usv = Usv(args.serial, int(args.baud), args.param)
@scheduler.scheduled_job('interval', seconds=INTERVAL)
def timed_job():
parameters = usv.get_parameters()
data = []
for name in parameters:
data.append({
'measurement': 'usv_parameters',
'tags': {
'name': name,
'id': 'LI????VA',
},
'fields': {'value': parameters[name]}
});
logging.info("insert: %s: %s" % (name, parameters[name]))
db_client.write_points(data)
scheduler.start()
|