#!/usr/bin/env python3 from apscheduler.schedulers.background import BlockingScheduler from usv_driver import Usv from influxdb import InfluxDBClient import sys import argparse import logging INTERVAL=14 # execution takes about 6s logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser() parser.add_argument('--host', help='Database host', default='localhost') parser.add_argument('--port', help='Database port', default='8086') parser.add_argument('--db', help='Database name', default='test') parser.add_argument('--serial', help='Serial port', default='/dev/ttyU0') parser.add_argument('--baud', help='Serial baudrate', default='600') parser.add_argument('--param', help='Parameter JSON file', default='./usv_param.json') args = parser.parse_args(sys.argv[1:]) scheduler = BlockingScheduler(timezone="Europe/Berlin") db_client = InfluxDBClient(host=args.host, port=args.port, database=args.db) usv = Usv(args.serial, int(args.baud), args.param) @scheduler.scheduled_job('interval', seconds=INTERVAL) def timed_job(): parameters = usv.get_parameters() data = [] for name in parameters: data.append({ 'measurement': 'usv_parameters', 'tags': { 'name': name, 'id': 'LI????VA', }, 'fields': {'value': parameters[name]} }); logging.info("insert: %s: %s" % (name, parameters[name])) db_client.write_points(data) scheduler.start()