mirror of
https://github.com/r4sas/Niflheim-api
synced 2025-01-25 22:24:13 +00:00
add importer from yggcrawl, update template
This commit is contained in:
parent
e68961f131
commit
a37f5b0ec4
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
/venv
|
||||
results.json
|
||||
yggcrawl.log
|
@ -13,12 +13,14 @@ import traceback
|
||||
from threading import Lock, Thread
|
||||
from queue import Queue
|
||||
|
||||
#####
|
||||
|
||||
# Configuration to use TCP connection or unix domain socket for admin connection to yggdrasil
|
||||
useAdminSock = True
|
||||
yggAdminTCP = ('localhost', 9001)
|
||||
yggAdminSock = ('/var/run/yggdrasil.sock')
|
||||
|
||||
DB_PASSWORD = "password"
|
||||
DB_PASS = "password"
|
||||
DB_USER = "yggindex"
|
||||
DB_NAME = "yggindex"
|
||||
DB_HOST = "localhost"
|
||||
@ -27,6 +29,8 @@ DB_HOST = "localhost"
|
||||
saveDefaultNodeInfo = False
|
||||
removableFileds = ['buildname', 'buildarch', 'buildplatform', 'buildversion', 'board_name', 'kernel', 'model', 'system']
|
||||
|
||||
#####
|
||||
|
||||
class Worker(Thread):
|
||||
def __init__(self, tasks):
|
||||
Thread.__init__(self)
|
||||
@ -140,7 +144,7 @@ def insert_new_entry(ipv6, coords):
|
||||
nodejson = json.dumps(nodeinfo[ipv6])
|
||||
nodename = nodeinfo[ipv6]["name"] if "name" in nodeinfo[ipv6] else ""
|
||||
|
||||
dbconn = psycopg2.connect(host=DB_HOST, database=DB_NAME, user=DB_USER, password=DB_PASSWORD)
|
||||
dbconn = psycopg2.connect(host=DB_HOST, database=DB_NAME, user=DB_USER, password=DB_PASS)
|
||||
cur = dbconn.cursor()
|
||||
timestamp = str(int(time.time()))
|
||||
|
||||
|
69
api/importer.py
Normal file
69
api/importer.py
Normal file
@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import psycopg2, json, traceback
|
||||
|
||||
#####
|
||||
|
||||
# Configuration to use TCP connection or unix domain socket for admin connection to yggdrasil
|
||||
DB_PASS = "password"
|
||||
DB_USER = "yggindex"
|
||||
DB_NAME = "yggindex"
|
||||
DB_HOST = "localhost"
|
||||
|
||||
## Save in database node info fields like buildname, buildarch, etc. (True/False)?
|
||||
saveDefaultNodeInfo = False
|
||||
removableFileds = ['buildname', 'buildarch', 'buildplatform', 'buildversion', 'board_name', 'kernel', 'model', 'system']
|
||||
|
||||
#####
|
||||
|
||||
with open('api/results.json', 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
timestamp = data['meta']['generated_at_utc']
|
||||
|
||||
# connect to database
|
||||
dbconn = psycopg2.connect(host=DB_HOST, database=DB_NAME, user=DB_USER, password=DB_PASS)
|
||||
cur = dbconn.cursor()
|
||||
|
||||
# start importing
|
||||
for node in data['topology']:
|
||||
nodename = ""
|
||||
nodeinfo = {}
|
||||
ipv6 = data['topology'][node]['ipv6_addr']
|
||||
coords = '[%s]' % (' '.join(str(e) for e in data['topology'][node]['coords']))
|
||||
|
||||
if node in data['nodeinfo']:
|
||||
nodeinfo = data['nodeinfo'][node]
|
||||
|
||||
if not saveDefaultNodeInfo:
|
||||
# remove default Node info fields
|
||||
for field in removableFileds:
|
||||
tmprm = nodeinfo.pop(field, None)
|
||||
|
||||
if "name" in nodeinfo:
|
||||
nodename = nodeinfo['name']
|
||||
elif data['topology'][node]['found'] == False:
|
||||
nodename = '? %s' % coords
|
||||
else:
|
||||
nodename = ipv6
|
||||
|
||||
nodeinfo = json.dumps(nodeinfo)
|
||||
|
||||
try:
|
||||
cur.execute(
|
||||
"INSERT INTO yggindex (ipv6, coords, unixtstamp, name) VALUES(%s, %s, %s, %s) ON CONFLICT (ipv6) DO UPDATE SET unixtstamp=%s, coords=%s, name=%s;",
|
||||
(ipv6, coords, timestamp, nodename, timestamp, coords, nodename)
|
||||
)
|
||||
cur.execute(
|
||||
"INSERT INTO yggnodeinfo (ipv6, nodeinfo, timestamp) VALUES(%s, %s, %s) ON CONFLICT (ipv6) DO UPDATE SET nodeinfo=%s, timestamp=%s;",
|
||||
(ipv6, nodeinfo, timestamp, nodeinfo, timestamp)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
print("database error inserting")
|
||||
traceback.print_exc()
|
||||
|
||||
dbconn.commit()
|
||||
cur.close()
|
||||
dbconn.close()
|
||||
|
@ -7,7 +7,7 @@ import time
|
||||
|
||||
#run every hour
|
||||
|
||||
DB_PASSWORD = "password"
|
||||
DB_PASS = "password"
|
||||
DB_USER = "yggindex"
|
||||
DB_NAME = "yggindex"
|
||||
DB_HOST = "localhost"
|
||||
@ -24,7 +24,7 @@ def age_calc(ustamp):
|
||||
return False
|
||||
|
||||
def get_nodes_for_count():
|
||||
dbconn = psycopg2.connect(host=DB_HOST,database=DB_NAME, user=DB_USER, password=DB_PASSWORD)
|
||||
dbconn = psycopg2.connect(host=DB_HOST,database=DB_NAME, user=DB_USER, password=DB_PASS)
|
||||
cur = dbconn.cursor()
|
||||
nodes = {}
|
||||
cur.execute("select * from yggindex")
|
||||
@ -39,7 +39,7 @@ def get_nodes_for_count():
|
||||
return str(len(nodes))
|
||||
|
||||
def add_to_db():
|
||||
dbconn = psycopg2.connect(host=DB_HOST,database=DB_NAME, user=DB_USER, password=DB_PASSWORD)
|
||||
dbconn = psycopg2.connect(host=DB_HOST,database=DB_NAME, user=DB_USER, password=DB_PASS)
|
||||
cur = dbconn.cursor()
|
||||
|
||||
cur.execute('''INSERT INTO timeseries(max, unixtstamp) VALUES(''' + "'" + get_nodes_for_count() + "'," + str(int(time.time())) + ''')''')
|
||||
|
@ -1,16 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import time
|
||||
import signal, sys, time
|
||||
from flask import Flask, render_template
|
||||
from flask_restful import Resource, Api
|
||||
import requests
|
||||
import psycopg2
|
||||
import json
|
||||
|
||||
app = Flask(__name__)
|
||||
api = Api(app)
|
||||
######
|
||||
|
||||
DB_PASSWORD = "password"
|
||||
DB_PASS = "password"
|
||||
DB_USER = "yggindex"
|
||||
DB_NAME = "yggindex"
|
||||
DB_HOST = "localhost"
|
||||
@ -19,6 +18,19 @@ DB_HOST = "localhost"
|
||||
# I'm using 1 hour beause of running crawler every 15 minutes
|
||||
ALIVE_SECONDS = 3600 # 1 hour
|
||||
|
||||
######
|
||||
|
||||
app = Flask(__name__)
|
||||
api = Api(app)
|
||||
|
||||
dbconn = psycopg2.connect(host=DB_HOST,\
|
||||
database=DB_NAME,\
|
||||
user=DB_USER,\
|
||||
password=DB_PASS)
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
dbconn.close()
|
||||
sys.exit(0)
|
||||
|
||||
def age_calc(ustamp):
|
||||
if (time.time() - ustamp) <= ALIVE_SECONDS:
|
||||
@ -29,10 +41,6 @@ def age_calc(ustamp):
|
||||
# active nodes
|
||||
class nodesCurrent(Resource):
|
||||
def get(self):
|
||||
dbconn = psycopg2.connect(host=DB_HOST,\
|
||||
database=DB_NAME,\
|
||||
user=DB_USER,\
|
||||
password=DB_PASSWORD)
|
||||
cur = dbconn.cursor()
|
||||
nodes = {}
|
||||
cur.execute("select * from yggindex")
|
||||
@ -42,7 +50,6 @@ class nodesCurrent(Resource):
|
||||
|
||||
dbconn.commit()
|
||||
cur.close()
|
||||
dbconn.close()
|
||||
|
||||
nodelist = {}
|
||||
nodelist['yggnodes'] = nodes
|
||||
@ -53,10 +60,6 @@ class nodesCurrent(Resource):
|
||||
# nodes info
|
||||
class nodesInfo(Resource):
|
||||
def get(self):
|
||||
dbconn = psycopg2.connect(host=DB_HOST,\
|
||||
database=DB_NAME,\
|
||||
user=DB_USER,\
|
||||
password=DB_PASSWORD)
|
||||
cur = dbconn.cursor()
|
||||
nodes = {}
|
||||
cur.execute("select * from yggnodeinfo")
|
||||
@ -66,7 +69,6 @@ class nodesInfo(Resource):
|
||||
|
||||
dbconn.commit()
|
||||
cur.close()
|
||||
dbconn.close()
|
||||
|
||||
nodeinfo = {}
|
||||
nodeinfo['yggnodeinfo'] = nodes
|
||||
@ -77,10 +79,6 @@ class nodesInfo(Resource):
|
||||
# alive nodes count for latest 24 hours
|
||||
class nodes24h(Resource):
|
||||
def get(self):
|
||||
dbconn = psycopg2.connect(host=DB_HOST,\
|
||||
database=DB_NAME,\
|
||||
user=DB_USER,\
|
||||
password=DB_PASSWORD)
|
||||
cur = dbconn.cursor()
|
||||
nodes = {}
|
||||
cur.execute("SELECT * FROM timeseries ORDER BY unixtstamp DESC LIMIT 24")
|
||||
@ -89,21 +87,23 @@ class nodes24h(Resource):
|
||||
|
||||
dbconn.commit()
|
||||
cur.close()
|
||||
dbconn.close()
|
||||
|
||||
nodeinfo = {}
|
||||
nodeinfo['nodes24h'] = nodes
|
||||
|
||||
return nodeinfo
|
||||
|
||||
# alive nodes count for latest 24 hours
|
||||
class crawlResult(Resource):
|
||||
def get(self):
|
||||
with open('api/results.json', 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def fpage():
|
||||
dbconn = psycopg2.connect(host=DB_HOST,\
|
||||
database=DB_NAME,\
|
||||
user=DB_USER,\
|
||||
password=DB_PASSWORD)
|
||||
cur = dbconn.cursor()
|
||||
nodes = 0
|
||||
cur.execute("select * from yggindex")
|
||||
@ -114,15 +114,19 @@ def fpage():
|
||||
|
||||
dbconn.commit()
|
||||
cur.close()
|
||||
dbconn.close()
|
||||
|
||||
return render_template('index.html', nodes=nodes)
|
||||
|
||||
|
||||
#sort out the api request here for the url
|
||||
# sort out the api request here for the url
|
||||
api.add_resource(nodesCurrent, '/current')
|
||||
api.add_resource(nodesInfo, '/nodeinfo')
|
||||
api.add_resource(nodes24h, '/nodes24h')
|
||||
api.add_resource(crawlResult, '/result.json')
|
||||
|
||||
# regirster signal handler
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='::', port=3000)
|
||||
|
@ -13,9 +13,10 @@
|
||||
<center>
|
||||
<strong>Current Nodes online<br />
|
||||
<font size="18">{{ nodes }}</font></strong>
|
||||
<br /><br />
|
||||
You can see them in <i>Yggdrasil Interactive World map</i> <sup><a href="http://[21f:dd73:7cdb:773b:a924:7ec0:800b:221e]/">[⇗]</a> <a href="http://map.ygg/" target="_blank">[DNS]</a></sup>
|
||||
</center>
|
||||
<br /><br />
|
||||
<a href="http://[21f:dd73:7cdb:773b:a924:7ec0:800b:221e]/">Yggdrasil Interactive World map</a><br /><br />
|
||||
|
||||
<div class="wide"></div>
|
||||
<strong>Make an API request</strong><br />
|
||||
@ -23,17 +24,22 @@
|
||||
|
||||
Get a current list of active and online nodes:<br />
|
||||
<div class="apireq">
|
||||
http://[31a:fb8a:c43e:ca59::2]/current <sup><a href="/current" target="_blank">⇗</a></sup>
|
||||
http://[31a:fb8a:c43e:ca59::2]/current <sup><a href="http://[31a:fb8a:c43e:ca59::2]/current" target="_blank">[⇗]</a> <a href="http://nodelist.ygg/current" target="_blank">[DNS]</a></sup>
|
||||
</div>
|
||||
Nodeinfo from all current active nodes:<br />
|
||||
<div class="apireq">
|
||||
http://[31a:fb8a:c43e:ca59::2]/nodeinfo <sup><a href="/nodeinfo" target="_blank">⇗</a></sup>
|
||||
http://[31a:fb8a:c43e:ca59::2]/nodeinfo <sup><a href="http://[31a:fb8a:c43e:ca59::2]/nodeinfo" target="_blank">[⇗]</a> <a href="http://nodelist.ygg/nodeinfo" target="_blank">[DNS]</a></sup>
|
||||
</div>
|
||||
Active nodes count for last 24 hours:<br />
|
||||
<div class="apireq">
|
||||
http://[31a:fb8a:c43e:ca59::2]/nodes24h <sup><a href="/nodes24h" target="_blank">⇗</a></sup>
|
||||
http://[31a:fb8a:c43e:ca59::2]/nodes24h <sup><a href="http://[31a:fb8a:c43e:ca59::2]/nodes24h" target="_blank">[⇗]</a> <a href="http://nodelist.ygg/nodes24h" target="_blank">[DNS]</a></sup>
|
||||
</div>
|
||||
|
||||
<div class="wide"></div>
|
||||
<strong>Get latest crawler data</strong><br />
|
||||
You can download data in raw json provided by <a href="https://github.com/neilalexander/yggcrawl" target="_blank">yggcrawl</a>: result.json <sup><a href="http://[31a:fb8a:c43e:ca59::2]/result.json" target="_blank">[⇗]</a> <a href="http://nodelist.ygg/result.json" target="_blank">[DNS]</a></sup>
|
||||
<br /><br />
|
||||
|
||||
<div class="wide"></div>
|
||||
<small>Made with <a href="https://github.com/r4sas/Niflheim-api" target="_blank">fork</a> of <a href="https://github.com/yakamok/Niflheim-api" target="_blank">Niflheim-API</a> by yakamok</small>
|
||||
</div>
|
||||
|
12
ygg-crawl.sh
Executable file
12
ygg-crawl.sh
Executable file
@ -0,0 +1,12 @@
|
||||
#!/bin/sh
|
||||
|
||||
YGGCRAWL="/opt/yggcrawl/yggcrawl" # path to yggcrawl binary
|
||||
YGGAPIPATH="/opt/yggdrasil-api" # path to Niflheim-API directory
|
||||
|
||||
CRAWLPEER="tcp://127.0.0.1:12345" # Yggdrasil peer address
|
||||
CRAWLFILE="api/results.json"
|
||||
CRAWLRETR=3
|
||||
|
||||
cd $YGGAPIPATH
|
||||
$YGGCRAWL -peer $CRAWLPEER -retry $CRAWLRETR -file $CRAWLFILE > api/yggcrawl.log 2>&1
|
||||
venv/bin/python api/importer.py >> api/yggcrawl.log 2>&1
|
Loading…
x
Reference in New Issue
Block a user