2022-09-06 22:47:52 +02:00
|
|
|
import fcntl
|
2022-09-02 18:41:19 +02:00
|
|
|
import hmac
|
2022-09-02 14:06:47 +02:00
|
|
|
import json
|
2022-09-02 18:41:19 +02:00
|
|
|
import logging
|
2022-12-11 22:57:41 +01:00
|
|
|
import logging.config
|
2022-09-02 18:41:19 +02:00
|
|
|
import os
|
2022-09-02 14:06:47 +02:00
|
|
|
|
2024-10-20 11:27:23 +02:00
|
|
|
from collections import defaultdict
|
|
|
|
|
|
|
|
from flask import (Flask, request, jsonify, abort, render_template, url_for)
|
2022-09-02 14:06:47 +02:00
|
|
|
|
|
|
|
from ltsdb_json import LTS
|
2022-12-27 10:29:49 +01:00
|
|
|
from dashboard import Dashboard
|
2022-09-02 14:06:47 +02:00
|
|
|
|
2022-12-11 22:57:41 +01:00
|
|
|
import config
|
|
|
|
|
|
|
|
logging.config.dictConfig(config.logging)
|
2022-09-02 14:06:47 +02:00
|
|
|
|
|
|
|
app = Flask(__name__)
|
|
|
|
|
|
|
|
log = logging.getLogger()
|
|
|
|
|
|
|
|
@app.route("/")
|
|
|
|
def home():
|
|
|
|
return jsonify({ "success": None })
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/report", methods=["POST"])
|
|
|
|
def report():
|
2023-01-07 12:55:21 +01:00
|
|
|
return record()
|
|
|
|
|
|
|
|
@app.route("/record", methods=["POST"])
|
|
|
|
def record():
|
2022-09-02 14:06:47 +02:00
|
|
|
data = request.get_json()
|
|
|
|
n_ts = 0
|
|
|
|
n_dp = 0
|
|
|
|
for d in data:
|
|
|
|
d["description"]["remote_addr"] = request.remote_addr
|
2022-09-02 18:41:19 +02:00
|
|
|
d["description"]["node"] = verify_node(d)
|
|
|
|
|
2022-09-02 14:06:47 +02:00
|
|
|
log.info("received %s", json.dumps(d))
|
|
|
|
ts = LTS(d["description"])
|
|
|
|
for dp in d["data"]:
|
|
|
|
ts.add(*dp)
|
|
|
|
ts.save()
|
|
|
|
n_dp += 1
|
|
|
|
n_ts += 1
|
|
|
|
return jsonify({ "success": True, "timeseries": n_ts, "datapoints": n_dp })
|
2022-09-02 18:41:19 +02:00
|
|
|
|
2022-09-04 17:58:17 +02:00
|
|
|
@app.route("/ts/<id>")
|
|
|
|
def get_timeseries(id):
|
2022-09-04 21:03:17 +02:00
|
|
|
try:
|
|
|
|
ts = LTS(id=id)
|
|
|
|
except FileNotFoundError:
|
|
|
|
abort(404)
|
2022-09-04 17:58:17 +02:00
|
|
|
return jsonify({"description": ts.description, "data": ts.data})
|
|
|
|
|
2022-09-06 22:47:52 +02:00
|
|
|
@app.route("/dimensions")
|
|
|
|
def list_dimensions():
|
|
|
|
with open("data/.index") as fh:
|
|
|
|
fcntl.flock(fh, fcntl.LOCK_SH)
|
|
|
|
index = json.load(fh)
|
|
|
|
# Just return the number of timeseries for each dimension/member, not
|
|
|
|
# the timeseries themselves
|
|
|
|
for d in index.keys():
|
|
|
|
for m in index[d].keys():
|
|
|
|
index[d][m] = len(index[d][m])
|
|
|
|
return jsonify(index)
|
|
|
|
|
|
|
|
@app.route("/search")
|
|
|
|
def search():
|
|
|
|
log.debug("search: %s", request.args)
|
2022-12-09 22:55:07 +01:00
|
|
|
return jsonify(_search())
|
|
|
|
|
|
|
|
def _search():
|
2022-09-06 22:47:52 +02:00
|
|
|
timeseries = None
|
|
|
|
with open("data/.index") as fh:
|
|
|
|
fcntl.flock(fh, fcntl.LOCK_SH)
|
|
|
|
index = json.load(fh)
|
|
|
|
for k, v in request.args.lists():
|
|
|
|
log.debug("search: %s -> %s", k, v)
|
2022-12-09 22:55:07 +01:00
|
|
|
if timeseries is None:
|
2022-09-06 22:47:52 +02:00
|
|
|
timeseries = set()
|
|
|
|
log.debug("search: %s: %s", k, index[k])
|
|
|
|
for m in v:
|
|
|
|
timeseries |= set(index[k][m])
|
|
|
|
else:
|
|
|
|
filter = set()
|
|
|
|
for m in v:
|
|
|
|
filter |= set(index[k][m])
|
|
|
|
timeseries &= filter
|
|
|
|
results = list(timeseries)
|
2022-12-09 22:55:07 +01:00
|
|
|
return results
|
2022-09-06 22:47:52 +02:00
|
|
|
|
2022-09-02 18:41:19 +02:00
|
|
|
def verify_node(d):
|
|
|
|
node = d["auth"]["node"]
|
|
|
|
timestamp = d["auth"]["timestamp"]
|
|
|
|
digest1 = d["auth"]["hmac"]
|
|
|
|
if "/" in node:
|
|
|
|
raise ValueError("invalid node name %s", node)
|
|
|
|
try:
|
2023-09-20 10:50:29 +02:00
|
|
|
fn = "config/" + node
|
|
|
|
log.info("getting client config from %s", fn)
|
|
|
|
with open(fn) as fh:
|
2022-09-02 18:41:19 +02:00
|
|
|
node_conf = json.load(fh)
|
|
|
|
except Exception as e:
|
|
|
|
log.warning("got %s opening %s", e, "config/" + node)
|
|
|
|
abort(401, "unknown client")
|
|
|
|
last = node_conf["last"]
|
|
|
|
for key in node_conf["keys"]:
|
|
|
|
msg = (node + " " + str(timestamp)).encode("UTF-8")
|
2022-09-04 12:48:15 +02:00
|
|
|
hmac2 = hmac.new(key.encode("UTF-8"), msg, "SHA256")
|
2022-09-02 18:41:19 +02:00
|
|
|
digest2 = hmac2.hexdigest()
|
|
|
|
if hmac.compare_digest(digest1, digest2):
|
|
|
|
if timestamp > node_conf["last"]:
|
|
|
|
node_conf["last"] = timestamp
|
|
|
|
os.replace("config/" + node, "config/" + node + ".old")
|
2023-09-20 10:50:29 +02:00
|
|
|
tmpfn = fn + "." + str(os.getpid())
|
|
|
|
oldfn = fn + ".old"
|
|
|
|
with open(tmpfn, "w") as fh:
|
2022-09-02 18:41:19 +02:00
|
|
|
json.dump(node_conf, fh) # XXX
|
2023-09-20 10:50:29 +02:00
|
|
|
try:
|
|
|
|
os.unlink(oldfn)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
os.link(fn, oldfn)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
os.rename(tmpfn, fn)
|
2022-09-02 18:41:19 +02:00
|
|
|
return node
|
|
|
|
else:
|
|
|
|
abort(409, "timestamp out of sync")
|
|
|
|
abort(401, "auth failed")
|
2022-11-20 18:43:45 +01:00
|
|
|
|
|
|
|
@app.get("/v")
|
|
|
|
def visualize():
|
|
|
|
timeseries_ids = request.args.getlist("ts")
|
2022-12-09 22:55:07 +01:00
|
|
|
if not timeseries_ids:
|
|
|
|
timeseries_ids = _search()
|
|
|
|
log.debug("timeseries_ids = %s", timeseries_ids)
|
2022-11-20 18:43:45 +01:00
|
|
|
timeseries_data = []
|
|
|
|
for id in timeseries_ids:
|
|
|
|
ts = LTS(id=id)
|
|
|
|
timeseries_data.append(ts)
|
|
|
|
return render_template("visualize.html", ts=timeseries_data)
|
2022-12-27 10:29:49 +01:00
|
|
|
|
|
|
|
@app.get("/dashboard/")
|
|
|
|
def dashboard_index():
|
|
|
|
d = Dashboard("dashboards/" + "index" + ".json")
|
|
|
|
return d.as_html()
|
|
|
|
|
|
|
|
@app.get("/dashboard/<dashboard>")
|
|
|
|
def dashboard_file(dashboard):
|
|
|
|
d = Dashboard("dashboards/" + dashboard + ".json")
|
|
|
|
return d.as_html()
|
2024-10-20 11:27:23 +02:00
|
|
|
|
|
|
|
@app.get("/nav")
|
|
|
|
def nav():
|
|
|
|
# Start with a list of all dimensions, the number of matching time series
|
|
|
|
# and a truncated list of series.
|
|
|
|
# If a dimension is chosen, display a choice of members
|
|
|
|
# choosing one or more members goes back to the list of
|
|
|
|
# (remaining) dimensions
|
|
|
|
with open("data/.index") as fh:
|
|
|
|
fcntl.flock(fh, fcntl.LOCK_SH)
|
|
|
|
index = json.load(fh)
|
|
|
|
timeseries = None
|
|
|
|
for k, v in request.args.lists():
|
|
|
|
if k[0] == ".":
|
|
|
|
continue
|
|
|
|
log.debug("search: %s -> %s", k, v)
|
|
|
|
if timeseries is None:
|
|
|
|
timeseries = set()
|
|
|
|
log.debug("search: %s: %s", k, index[k])
|
|
|
|
for m in v:
|
|
|
|
timeseries |= set(index[k][m])
|
|
|
|
else:
|
|
|
|
filter = set()
|
|
|
|
for m in v:
|
|
|
|
filter |= set(index[k][m])
|
|
|
|
timeseries &= filter
|
|
|
|
if timeseries is None:
|
|
|
|
timeseries = set()
|
|
|
|
for mc in index.values():
|
|
|
|
for tsl in mc.values():
|
|
|
|
timeseries |= set(tsl)
|
|
|
|
if d := request.args.get(".m"):
|
|
|
|
members = []
|
|
|
|
for m, tsl in index[d].items():
|
|
|
|
if set(tsl) & timeseries:
|
|
|
|
members.append(m)
|
|
|
|
return render_template("nav_member_select.html", dimension=d, members=members)
|
|
|
|
else:
|
|
|
|
params = request.args.to_dict(flat=False)
|
|
|
|
matching_dimensions = defaultdict(int)
|
|
|
|
for d, mc in index.items():
|
|
|
|
if d in params:
|
|
|
|
continue
|
|
|
|
for m, tsl in mc.items():
|
|
|
|
mtsl = set(tsl) & timeseries
|
|
|
|
if mtsl:
|
|
|
|
matching_dimensions[d] += len(mtsl)
|
|
|
|
matching_dimensions_list = []
|
|
|
|
for d in matching_dimensions:
|
|
|
|
params[".m"] = d
|
|
|
|
url = url_for("nav", **params)
|
|
|
|
app.logger.debug(f"{d=} {url=}")
|
|
|
|
matching_dimensions_list.append(
|
|
|
|
{"name": d, "count": matching_dimensions[d], "url": url}
|
|
|
|
)
|
|
|
|
total_timeseries = len(timeseries)
|
|
|
|
timeseries = [LTS(id=ts) for ts in list(timeseries)[:100]]
|
|
|
|
return render_template(
|
|
|
|
"nav_dimension_list.html",
|
|
|
|
matching_dimensions=matching_dimensions_list,
|
|
|
|
timeseries=timeseries, total_timeseries=total_timeseries)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#
|