diff --git a/api/niflheim-api.py b/api/niflheim-api.py index 6e4aa27..5fd4528 100644 --- a/api/niflheim-api.py +++ b/api/niflheim-api.py @@ -93,6 +93,25 @@ class nodes24h(Resource): return nodeinfo + +# alive nodes count for latest 30 days +class nodes30d(Resource): + def get(self): + cur = dbconn.cursor() + nodes = {} + cur.execute("SELECT * FROM timeseries ORDER BY unixtstamp DESC LIMIT 24 * 30") + for i in cur.fetchall(): + nodes[i[1]] = i[0] + + dbconn.commit() + cur.close() + + nodeinfo = {} + nodeinfo['nodes30d'] = nodes + + return nodeinfo + + # alive nodes count for latest 24 hours class crawlResult(Resource): def get(self): @@ -122,6 +141,7 @@ def fpage(): api.add_resource(nodesCurrent, '/current') api.add_resource(nodesInfo, '/nodeinfo') api.add_resource(nodes24h, '/nodes24h') +api.add_resource(nodes30d, '/nodes30d') api.add_resource(crawlResult, '/result.json') # regirster signal handler diff --git a/api/templates/index.html b/api/templates/index.html index 56c96c7..6d1cf2e 100644 --- a/api/templates/index.html +++ b/api/templates/index.html @@ -34,11 +34,17 @@
http://[31a:fb8a:c43e:ca59::2]/nodes24h [⇗] [DNS]
+ Active nodes count for last 30 days:
+
+ http://[31a:fb8a:c43e:ca59::2]/nodes30d [⇗] [DNS] +
Get latest crawler data
- You can download data in raw json provided by yggcrawl: result.json [⇗] [DNS] -

+ You can download data in raw json provided by yggcrawl:
+
+ http://[31a:fb8a:c43e:ca59::2]/result.json [⇗] [DNS] +
Made with fork of Niflheim-API by yakamok