Compare commits

..

No commits in common. "master" and "r3.08" have entirely different histories.

9 changed files with 53 additions and 260 deletions

90
app.py
View File

@ -5,9 +5,7 @@ import logging
import logging.config
import os
from collections import defaultdict
from flask import (Flask, request, jsonify, abort, render_template, url_for)
from flask import (Flask, request, jsonify, abort, render_template)
from ltsdb_json import LTS
from dashboard import Dashboard
@ -99,9 +97,8 @@ def verify_node(d):
if "/" in node:
raise ValueError("invalid node name %s", node)
try:
fn = "config/" + node
log.info("getting client config from %s", fn)
with open(fn) as fh:
log.info("getting client config from %s", "config/" + node)
with open("config/" + node) as fh:
node_conf = json.load(fh)
except Exception as e:
log.warning("got %s opening %s", e, "config/" + node)
@ -115,19 +112,8 @@ def verify_node(d):
if timestamp > node_conf["last"]:
node_conf["last"] = timestamp
os.replace("config/" + node, "config/" + node + ".old")
tmpfn = fn + "." + str(os.getpid())
oldfn = fn + ".old"
with open(tmpfn, "w") as fh:
with open("config/" + node, "w") as fh:
json.dump(node_conf, fh) # XXX
try:
os.unlink(oldfn)
except FileNotFoundError:
pass
try:
os.link(fn, oldfn)
except FileNotFoundError:
pass
os.rename(tmpfn, fn)
return node
else:
abort(409, "timestamp out of sync")
@ -154,71 +140,3 @@ def dashboard_index():
def dashboard_file(dashboard):
d = Dashboard("dashboards/" + dashboard + ".json")
return d.as_html()
@app.get("/nav")
def nav():
# Start with a list of all dimensions, the number of matching time series
# and a truncated list of series.
# If a dimension is chosen, display a choice of members
# choosing one or more members goes back to the list of
# (remaining) dimensions
with open("data/.index") as fh:
fcntl.flock(fh, fcntl.LOCK_SH)
index = json.load(fh)
timeseries = None
for k, v in request.args.lists():
if k[0] == ".":
continue
log.debug("search: %s -> %s", k, v)
if timeseries is None:
timeseries = set()
log.debug("search: %s: %s", k, index[k])
for m in v:
timeseries |= set(index[k][m])
else:
filter = set()
for m in v:
filter |= set(index[k][m])
timeseries &= filter
if timeseries is None:
timeseries = set()
for mc in index.values():
for tsl in mc.values():
timeseries |= set(tsl)
if d := request.args.get(".m"):
members = []
for m, tsl in index[d].items():
if set(tsl) & timeseries:
members.append(m)
return render_template("nav_member_select.html", dimension=d, members=members)
else:
params = request.args.to_dict(flat=False)
matching_dimensions = defaultdict(int)
for d, mc in index.items():
if d in params:
continue
for m, tsl in mc.items():
mtsl = set(tsl) & timeseries
if mtsl:
matching_dimensions[d] += len(mtsl)
matching_dimensions_list = []
for d in matching_dimensions:
params[".m"] = d
url = url_for("nav", **params)
app.logger.debug(f"{d=} {url=}")
matching_dimensions_list.append(
{"name": d, "count": matching_dimensions[d], "url": url}
)
total_timeseries = len(timeseries)
timeseries = [LTS(id=ts) for ts in list(timeseries)[:100]]
return render_template(
"nav_dimension_list.html",
matching_dimensions=matching_dimensions_list,
timeseries=timeseries, total_timeseries=total_timeseries)
#

View File

@ -1,36 +0,0 @@
#!/usr/bin/python3
import re
import subprocess
import time
import ltsdb_record
p = subprocess.run(["psql", "-c", "select version()", "-A", "-t", "-X", "-q",],
stdout=subprocess.PIPE, universal_newlines=True)
# This works only for PostgreSQL 10.x and above. I don't expect to encounter
# older versions any more.
m = re.match(r"^PostgreSQL (\d+).(\d+) ", p.stdout)
if m:
version = int(m.group(1)) + int(m.group(2)) / 100
report0 = []
report0.append({ "measure": "postgresql_version", "unit": "version", "value":
version})
now = time.time()
report = [
{
"description": {
"hostname": ltsdb_record.node,
"measure": r["measure"],
"unit": r["unit"]
},
"data": [
[now, r["value"]]
]
}
for r in report0
]
success = ltsdb_record.record_observations(report)
exit(1 - success)

View File

@ -1,11 +1,14 @@
#!/usr/bin/python3
import argparse
import hmac
import json
import os
import socket
import ssl
import time
import ltsdb_record
import requests
ap = argparse.ArgumentParser()
ap.add_argument("--verbose", action="store_true")
@ -13,20 +16,23 @@ ap.add_argument("hostname")
ap.add_argument("port", type=int, default=443, nargs="?")
args = ap.parse_args()
# It's a bit weird that this works.
myhostname = socket.gethostbyaddr(socket.gethostname())[0]
now = time.time()
report0 = []
try:
with socket.create_connection((args.hostname, args.port)) as sock:
with socket.create_connection((args.hostname, args.port)) as sock:
context = ssl.create_default_context()
try:
with context.wrap_socket(sock, server_hostname=args.hostname) as ssock:
cert = ssock.getpeercert()
not_after = ssl.cert_time_to_seconds(cert["notAfter"])
delta = not_after - now
except (ssl.SSLCertVerificationError, ConnectionRefusedError) as e:
print("got error %s; setting delta to 0" % e)
except ssl.SSLCertVerificationError as e:
print("got error %s; setting delta to 0", e)
delta = 0
report0.append({ "measure": "tls_cert_ttl", "unit": "s", "value": delta })
report0.append({ "measure": "tls_cert_ttl", "unit": "s", "value": delta })
report = [
{
@ -43,5 +49,32 @@ report = [
for r in report0
]
success = ltsdb_record.record_observations(report)
exit(1 - success)
for dir in (".", os.environ["HOME"] + "/.config/ltsdb", "/etc/ltsdb"):
try:
with open(dir + "/config.json") as fh:
client_config = json.load(fh)
baseurl = client_config["server"]
break
except FileNotFoundError:
pass
while True:
for r in report:
node = myhostname
timestamp = time.time()
msg = (node + " " + str(timestamp)).encode("UTF-8")
digest = hmac.new(client_config["key"].encode("UTF-8"), msg, "SHA256").hexdigest()
r["auth"] = {
"node": node,
"timestamp": timestamp,
"hmac": digest,
}
#pprint.pp(report)
r = requests.post(baseurl + "report", json=report)
print(r)
if r.status_code == 200:
exit(0)
elif r.status_code == 409:
time.sleep(0.5 + random.random())
continue
else:
exit(1)

View File

@ -27,11 +27,7 @@ class Dashboard:
if w.get("multi"):
ts_list = LTS.find(w["data"][0])
for ts in ts_list:
try:
tso = LTS(id=ts)
except json.decoder.JSONDecodeError as e:
log.error("%s contains bad data: %s: Skipping", ts, e)
continue
if not tso.data:
log.warning("%s has no data: Skipping", tso.id)
continue
@ -392,13 +388,9 @@ class TimeSeries(Widget):
max_value = max([d[3] if len(d) >= 4 else d[1] for d in self.lts.data])
max_value = max(max_value, 0.001) # ensure positive
unit = self.lts.description["unit"]
if self.yscale == "log":
try:
min_value = min(d[1] for d in self.lts.data if d[1] > 0)
if unit == "s":
self.extra["min"] = "%g" % min_value + " (" + self.format_time(min_value) + ")"
else:
self.extra["min"] = "%g" % min_value
except ValueError:
# no non-negative values
@ -413,10 +405,6 @@ class TimeSeries(Widget):
# Make sure min_value is less than max_value
min_value /= 2
log.debug("min_value = %s, max_value = %s", min_value, max_value)
if unit == "s":
self.extra["max"] = "%g" % max_value + " (" + self.format_time(max_value) + ")"
self.extra["last"] = "%g" % data[-1][1] + " (" + self.format_time(data[-1][1]) + ")"
else:
self.extra["max"] = "%g" % max_value
self.extra["last"] = "%g" % data[-1][1]
log.debug("collecting data")
@ -472,29 +460,6 @@ class TimeSeries(Widget):
log.debug("in as_html")
return Markup(render_template("timeseries.html", widget=self))
def format_time(self, seconds):
value = seconds
unit = "s"
if value >= 365.25 * 86400:
value /= 365.25 * 86400
unit = "years"
elif value >= 86400:
value /= 86400
unit = "days"
elif value >= 3600:
value /= 3600
unit = "h"
elif value >= 60:
value /= 60
unit = "m"
elif value >= 1:
pass
elif value >= 0.001:
value *= 1000
unit = "ms"
return f"{value:.2f} {unit}"
class Gauge(Widget):
def __init__(self, d):
super().__init__(d)
@ -536,4 +501,3 @@ class Gauge(Widget):
self.lastvalue_formatted = Markup(f"<span class='value'>{value:.2f}</span><span class='unit'>{unit}</unit>")
return Markup(render_template("gauge.html", widget=self))
# vim: sw=4

View File

@ -41,11 +41,7 @@ class LTS:
with open(self.filename, "x+") as fh:
fcntl.flock(fh, fcntl.LOCK_EX)
json.dump({"description": self.description, "data": self.data}, fh)
log.info(f"Created {self.filename}")
self.rebuild_index()
except json.decoder.JSONDecodeError as e:
log.exception(f"Cannot decode JSON in {self.filename}: {e}")
raise
def pop(self, i):
# Pop the element at index i and adjust the min/max values of the
@ -131,11 +127,7 @@ class LTS:
(_, _, hash) = fn.rpartition("/")
with open(fn, "r") as fh:
fcntl.flock(fh, fcntl.LOCK_SH)
try:
d = json.load(fh)
except json.decoder.JSONDecodeError as e:
log.exception(f"Cannot decode JSON in {fn}: {e}")
raise
for k, v in d["description"].items():
d1 = index.setdefault(k, {})
d2 = d1.setdefault(v, [])

View File

@ -2,7 +2,6 @@
import logging
import logging.config
import math
import os
import socket
import statistics
@ -54,29 +53,6 @@ class DiskFullPredictor:
log.info("d = %s, current_used_bytes = %s, current_usable_bytes = %s", m, current_used_bytes, current_usable_bytes)
tuf = now - lts.data[i][0]
break
else:
# Try always use the minimum of a range.
# We prefer the first datapoint
first_used_bytes = lts.data[0][2] if len(lts.data[0]) >= 4 else lts.data[0][1]
# But if that's not useable we search the whole timeseries for the
# minimum
if first_used_bytes >= current_used_bytes:
first_used_bytes = current_used_bytes
first_i = None
for i in range(len(lts.data)):
used_bytes = lts.data[i][2] if len(lts.data[i]) >= 4 else lts.data[i][1]
if used_bytes < first_used_bytes:
first_used_bytes = used_bytes
first_i = i
else:
first_i = 0
if first_i is not None:
historic_growth = current_used_bytes / first_used_bytes
future_growth = current_usable_bytes / current_used_bytes
tuf = math.log(future_growth) / math.log(historic_growth) * (now - lts.data[first_i][0])
tuf = max(tuf, now - lts.data[first_i][0])
tuf = min(tuf, 1E9)
desc = {**lts.description,
"measure": "time_until_disk_full",
"node": node,

View File

@ -8,9 +8,8 @@
body {
font-family: sans-serif;
}
th, td {
th {
text-align: left;
vertical-align: baseline;
}
main {
display: flex;

View File

@ -1,26 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width; initial-scale=1">
<meta charset="utf-8">
<style>
</style>
</head>
<body>
<ul>
{% for d in matching_dimensions %}
<li><a href="{{d['url']}}">{{d.name}}</a> ({{d.count}})</li>
{% endfor %}
</ul>
{{timeseries|length}}/{{total_timeseries}} timeseries:
<ul>
{% for ts in timeseries %}
<li>
<a href="/v?ts={{ts.id}}">{{ts.description}}</a>
</li>
{% endfor %}
</ul>
</body>
</html>

View File

@ -1,27 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width; initial-scale=1">
<meta charset="utf-8">
<style>
</style>
</head>
<body>
<form>
{% for dimension, members in request.args.lists() %}
{% for member in members %}
{% if dimension[0] != "." %}
<input name="{{dimension}}" value="{{member}}" type="hidden">
{% endif %}
{% endfor %}
{% endfor %}
<select name="{{dimension}}" multiple size={{members|length}}>
{% for member in members %}
<option>{{member}}</option>
{% endfor %}
</select>
<input type="submit">
</form>
</body>
</html>