diff --git a/app.py b/app.py index 3108b9d..7827653 100644 --- a/app.py +++ b/app.py @@ -36,6 +36,12 @@ def report(): n_ts += 1 return jsonify({ "success": True, "timeseries": n_ts, "datapoints": n_dp }) +@app.route("/ts/") +def get_timeseries(id): + breakpoint() + ts = LTS(id=id) + return jsonify({"description": ts.description, "data": ts.data}) + def verify_node(d): node = d["auth"]["node"] timestamp = d["auth"]["timestamp"] diff --git a/ltsdb_json.py b/ltsdb_json.py index 6eb2f3a..bb155cd 100644 --- a/ltsdb_json.py +++ b/ltsdb_json.py @@ -11,19 +11,21 @@ class LTS: base_dir = "data" limit = 1000 - def __init__(self, description): - # Oh, I think we need to be able to load by hash, too - canonical_description = {x: description[x] for x in sorted(description.keys())} - self.description = canonical_description - serialized_description = json.dumps(canonical_description) - m = hashlib.sha256() - m.update(bytes(serialized_description, encoding="UTF-8")) - self.filename = self.base_dir + "/" + m.hexdigest() + def __init__(self, description=None, id=None): + if description: + canonical_description = {x: description[x] for x in sorted(description.keys())} + self.description = canonical_description + serialized_description = json.dumps(canonical_description) + m = hashlib.sha256() + m.update(bytes(serialized_description, encoding="UTF-8")) + id = m.hexdigest() + self.filename = self.base_dir + "/" + id try: with open(self.filename, "r") as fh: fcntl.flock(fh, fcntl.LOCK_SH) d = json.load(fh) self.new = False + self.description = d["description"] self.data = d["data"] except FileNotFoundError as e: self.new = True