mirror of https://github.com/r4sas/Niflheim-api
R4SAS
2 years ago
21 changed files with 1119 additions and 101 deletions
@ -0,0 +1,14 @@
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env python |
||||
|
||||
import json |
||||
from pk2addr import keyTo128BitAddress |
||||
|
||||
with open('api/result.json', 'r') as f: |
||||
data = json.load(f) |
||||
|
||||
addlist = open("api/addresses.txt", "w") |
||||
|
||||
for key, _ in data['yggnodes'].items(): |
||||
addlist.write(keyTo128BitAddress(key) + "\n") |
||||
|
||||
addlist.close() |
@ -0,0 +1,24 @@
@@ -0,0 +1,24 @@
|
||||
### Configuration file ### |
||||
|
||||
### Database configuration ### |
||||
DB_PASS = "password" |
||||
DB_USER = "yggindex" |
||||
DB_NAME = "yggindex" |
||||
DB_HOST = "localhost" |
||||
|
||||
DB_RETRIES = 3 |
||||
DB_RECONNIDLE = 2 |
||||
|
||||
# count peer alive if it was available not more that amount of seconds ago |
||||
# I'm using 1 hour beause of running crawler every 30 minutes |
||||
ALIVE_SECONDS = 3600 # 1 hour |
||||
|
||||
### Built-in crawler configuration ### |
||||
# Configuration to use TCP connection or unix domain socket for admin connection to yggdrasil |
||||
useAdminSock = True |
||||
yggAdminTCP = ('localhost', 9001) |
||||
yggAdminSock = ('/var/run/yggdrasil.sock') |
||||
|
||||
# Save in database node info fields like buildname, buildarch, etc. (True/False)? |
||||
saveDefaultNodeInfo = False |
||||
removableFileds = ['buildname', 'buildarch', 'buildplatform', 'buildversion', 'board_name', 'kernel', 'model', 'system'] |
@ -0,0 +1,112 @@
@@ -0,0 +1,112 @@
|
||||
import pygraphviz as pgv |
||||
import time |
||||
import json |
||||
import networkx as nx |
||||
from networkx.algorithms import centrality |
||||
import urllib.request |
||||
|
||||
def position_nodes(nodes, edges): |
||||
G = pgv.AGraph(strict=True, directed=False, size='10!') |
||||
|
||||
for n in nodes.values(): |
||||
G.add_node(n.ip, label=n.label, coords=n.coords) |
||||
|
||||
for e in edges: |
||||
G.add_edge(e.a.ip, e.b.ip, len=1.0) |
||||
|
||||
G.layout(prog='neato', args='-Gepsilon=0.0001 -Gmaxiter=100000') |
||||
|
||||
return G |
||||
|
||||
def compute_betweenness(G): |
||||
ng = nx.Graph() |
||||
for start in G.iternodes(): |
||||
others = G.neighbors(start) |
||||
for other in others: |
||||
ng.add_edge(start, other) |
||||
|
||||
c = centrality.betweenness_centrality(ng) |
||||
|
||||
for k, v in c.items(): |
||||
c[k] = v |
||||
|
||||
return c |
||||
|
||||
def canonalize_ip(ip): |
||||
return ':'.join( i.rjust(4, '0') for i in ip.split(':') ) |
||||
|
||||
def load_db(): |
||||
url = "http://[316:c51a:62a3:8b9::2]/result.json" |
||||
f = urllib.request.urlopen(url) |
||||
return dict( |
||||
[ |
||||
(canonalize_ip(v[0]), v[1]) for v in |
||||
[ |
||||
l.split(None)[:2] for l in |
||||
json.loads(f.read())["yggnodes"].keys() |
||||
] |
||||
if len(v) > 1 |
||||
] |
||||
) |
||||
|
||||
def get_graph_json(G): |
||||
max_neighbors = 1 |
||||
for n in G.iternodes(): |
||||
neighbors = len(G.neighbors(n)) |
||||
if neighbors > max_neighbors: |
||||
max_neighbors = neighbors |
||||
print('Max neighbors: %d' % max_neighbors) |
||||
|
||||
out_data = { |
||||
'created': int(time.time()), |
||||
'nodes': [], |
||||
'edges': [] |
||||
} |
||||
|
||||
centralities = compute_betweenness(G) |
||||
db = load_db() |
||||
|
||||
for n in G.iternodes(): |
||||
neighbor_ratio = len(G.neighbors(n)) / float(max_neighbors) |
||||
pos = n.attr['pos'].split(',', 1) |
||||
centrality = centralities.get(n.name, 0) |
||||
size = 5*(1 + 1*centrality) |
||||
name = db.get(canonalize_ip(n.name)) |
||||
# If label isn't the default value, set name to that instead |
||||
if n.attr['label'] != n.name.split(':')[-1]: name = n.attr['label'] |
||||
|
||||
out_data['nodes'].append({ |
||||
'id': n.name, |
||||
'label': name if name else n.attr['label'], |
||||
'name': name, |
||||
'coords': n.attr['coords'], |
||||
'x': float(pos[0]), |
||||
'y': float(pos[1]), |
||||
'color': _gradient_color(neighbor_ratio, [(100, 100, 100), (0, 0, 0)]), |
||||
'size': size, |
||||
'centrality': '%.4f' % centrality |
||||
}) |
||||
|
||||
for e in G.iteredges(): |
||||
out_data['edges'].append({ |
||||
'sourceID': e[0], |
||||
'targetID': e[1] |
||||
}) |
||||
|
||||
return json.dumps(out_data) |
||||
|
||||
|
||||
def _gradient_color(ratio, colors): |
||||
jump = 1.0 / (len(colors) - 1) |
||||
gap_num = int(ratio / (jump + 0.0000001)) |
||||
|
||||
a = colors[gap_num] |
||||
b = colors[gap_num + 1] |
||||
|
||||
ratio = (ratio - gap_num * jump) * (len(colors) - 1) |
||||
|
||||
r = int(a[0] + (b[0] - a[0]) * ratio) |
||||
g = int(a[1] + (b[1] - a[1]) * ratio) |
||||
b = int(a[2] + (b[2] - a[2]) * ratio) |
||||
|
||||
return '#%02x%02x%02x' % (r, g, b) |
@ -0,0 +1,32 @@
@@ -0,0 +1,32 @@
|
||||
# Author: silentfamiliar@matrix |
||||
|
||||
def keyTo128BitAddress(key): |
||||
key260bits = int("1" + key, 16) # "1" to avoid trimming leading 0s |
||||
source_cursor = 4 # skip the "1" |
||||
|
||||
# loop over each bit while NOT(bit) is 1 |
||||
while (1 & ~(key260bits >> (260 - source_cursor - 1))) == 1: |
||||
source_cursor = source_cursor + 1 |
||||
|
||||
ones_count = source_cursor - 4 # 1s to count minus 4 which was our initial offset |
||||
source_cursor = source_cursor + 1 # skipping trailing 0 |
||||
|
||||
dest = (0x2 << 8) | ones_count # set header |
||||
bitsToAdd = 128 - 16 # header was 2 bytes which is 16 bit |
||||
|
||||
# append needed amount of NOT key starting from source_cursor |
||||
dest = (dest << bitsToAdd) | ((2**bitsToAdd - 1) & ~(key260bits >> (260 - source_cursor - bitsToAdd))) |
||||
|
||||
# the long addr |
||||
dest_hex = "0" + hex(dest)[2:] |
||||
# format ipv6 128bit addr |
||||
|
||||
addr = "" |
||||
for i in range(8): |
||||
piece = int(dest_hex[i*4:i*4+4], 16) |
||||
if (len(addr) != 0) and not (addr[len(addr)-2:len(addr)] == "::"): |
||||
addr += ":" |
||||
if (piece != 0): |
||||
addr += hex(piece)[2:] |
||||
|
||||
return addr |
File diff suppressed because one or more lines are too long
@ -0,0 +1,202 @@
@@ -0,0 +1,202 @@
|
||||
* { |
||||
margin: 0; |
||||
padding: 0; |
||||
} |
||||
html, body { |
||||
background: #F5F5F5; |
||||
font-family: 'sans serif'; |
||||
} |
||||
|
||||
#header { |
||||
background: #FFF; |
||||
height: 48px; |
||||
line-height: 48px; |
||||
position: absolute; |
||||
top: 0; |
||||
left: 0; |
||||
right: 0; |
||||
} |
||||
|
||||
h1 { |
||||
font-family: 'Inconsolata', 'Consolas', 'Ubuntu Mono', monospace; |
||||
font-size: 32px; |
||||
float: left; |
||||
padding: 0 40px; |
||||
font-weight: 100; |
||||
color: #333; |
||||
} |
||||
|
||||
small { |
||||
font-size: 16px; |
||||
} |
||||
|
||||
.grey { |
||||
color: #999; |
||||
} |
||||
|
||||
ul { |
||||
list-style-type: none; |
||||
height: 100%; |
||||
} |
||||
li { |
||||
float: left; |
||||
height: 100%; |
||||
} |
||||
|
||||
#header a { |
||||
color: #777; |
||||
padding: 0 20px; |
||||
font-family: 'Open Sans', 'sans-serif'; |
||||
font-size: 14px; |
||||
text-decoration: none; |
||||
height: 100%; |
||||
display: block; |
||||
|
||||
} |
||||
|
||||
#header a.selected { |
||||
background: #DDD; |
||||
} |
||||
|
||||
#header a:hover { |
||||
background: #EEE; |
||||
} |
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#general-info { |
||||
position: absolute; |
||||
bottom: 0; |
||||
left: 0; |
||||
font-size: 10px; |
||||
padding: 5px; |
||||
line-height: 150%; |
||||
z-index: 999; |
||||
} |
||||
|
||||
|
||||
#sidebar { |
||||
padding: 20px 20px; |
||||
background: rgba(220, 220, 220, 0.8); |
||||
position: absolute; |
||||
top: 0; |
||||
right: 0; |
||||
/*bottom: 0;*/ |
||||
max-height: calc(100vh - 40px); |
||||
min-width: 250px; |
||||
z-index: 999; |
||||
overflow-y: scroll; |
||||
font-size: 12px; |
||||
} |
||||
|
||||
#search-wrapper { |
||||
width: 100%; |
||||
margin-bottom: 20px; |
||||
/*position: absolute;*/ |
||||
/*top: 0;*/ |
||||
/*right: 10px;*/ |
||||
/*z-index: 5;*/ |
||||
font-size: 10px; |
||||
} |
||||
#search-box { |
||||
width: 100%; |
||||
padding: 5px; |
||||
outline: none; |
||||
border: none; |
||||
/*border: 1px solid #CCC;*/ |
||||
margin: -5px; |
||||
font-size: inherit; |
||||
} |
||||
|
||||
#sidebar a { |
||||
color: #333; |
||||
text-decoration: none; |
||||
} |
||||
#sidebar a:hover { |
||||
color: #AAA; |
||||
} |
||||
|
||||
#sidebar h2 { |
||||
text-align: center; |
||||
margin-bottom: 5px; |
||||
color: #29BBFF; |
||||
} |
||||
|
||||
#node-info table { |
||||
width: 100%; |
||||
} |
||||
|
||||
#node-info td + td { |
||||
text-align: right; |
||||
} |
||||
|
||||
#node-info strong { |
||||
color: #29BBFF; |
||||
letter-spacing: 1px; |
||||
} |
||||
|
||||
.tt { |
||||
font-family: monospace; |
||||
font-size: 10px; |
||||
} |
||||
|
||||
.autocomplete-suggestions { |
||||
font-family: monospace; |
||||
font-size: 10px; |
||||
border: 1px solid #FFF; |
||||
background: #FFF; |
||||
overflow: auto; |
||||
color: #555; |
||||
} |
||||
.autocomplete-suggestion { |
||||
padding: 2px 5px; |
||||
white-space: nowrap; |
||||
overflow: hidden; |
||||
} |
||||
.autocomplete-selected { background: #7FD6FF; } |
||||
.autocomplete-suggestions strong { |
||||
color: #000000; |
||||
} |
||||
|
||||
#content-wrapper { |
||||
position: absolute; |
||||
top: 48px; |
||||
left: 0; |
||||
right: 0; |
||||
bottom: 0; |
||||
} |
||||
|
||||
#map { |
||||
position: absolute; |
||||
width:100%; |
||||
height:100%; |
||||
} |
||||
|
||||
#content { |
||||
width: 500px; |
||||
margin: 30px auto; |
||||
font-family: sans-serif; |
||||
font-size: 16px; |
||||
color: #333; |
||||
line-height: 28px; |
||||
letter-spacing: 0.2px; |
||||
} |
||||
|
||||
#content h2 { |
||||
text-align: center; |
||||
margin-bottom: 20px; |
||||
color: #29BBFF; |
||||
} |
||||
|
||||
#content h3 { |
||||
margin-top: 20px; |
||||
text-align: center; |
||||
color: #29BBFF; |
||||
} |
||||
|
||||
#content a { |
||||
color: #29BBFF; |
||||
} |
@ -0,0 +1,17 @@
@@ -0,0 +1,17 @@
|
||||
{% extends "map/base.html" %} |
||||
|
||||
{% block content %} |
||||
<div id="content-wrapper"> |
||||
<div id="content"> |
||||
<h2>About</h2> |
||||
<p>This is a project that aims to demystify what the <a href="https://yggdrasil-network.github.io/">Yggdrasil</a> network is like. Currently the only thing we have here is a map of the spanning tree subset of the network. The full source code is at <a href="https://github.com/Arceliar/yggdrasil-map">GitHub</a>.</p> |
||||
|
||||
<h3>Network map</h3> |
||||
<p>The network page has a map of Yggdrasil's spanning tree as it is now. The map is not complete since it is hard/impossible to get a full picture of the network, and it only includes the minimum subset of links needed to construct the spanning tree. The known nodes and tree coordinates are taken from <a href="http://y.yakamo.org:3000/">Yakamo's API</a>. Node names can be configured by setting a "name" field in <a href="https://yggdrasil-network.github.io/configuration.html">NodeInfo</a>, or from <a href="https://github.com/yakamok/yggdrasil-nodelist">Yakamo's node list</a> as a fallback.</p> |
||||
|
||||
<h3>Contact</h3> |
||||
<p>This project was forked from <em>zielmicha</em>'s fork of <em>Randati</em>'s fc00. |
||||
The yggdrasil developers can be contacted over matrix or IRC, for more info see: <a href="https://yggdrasil-network.github.io/">yggdrasil-network.github.io</a>.</p> |
||||
</div> |
||||
</div> |
||||
{% endblock %} |
@ -0,0 +1,25 @@
@@ -0,0 +1,25 @@
|
||||
<!DOCTYPE html> |
||||
<html> |
||||
<head> |
||||
<meta charset="UTF-8"> |
||||
<title>0200::/7 – Mapping The Yggdrasil Network</title> |
||||
<script src="{{ url_for('static', filename='map/jquery-2.0.3.min.js')}}"></script> |
||||
<script src="{{ url_for('static', filename='map/jquery.autocomplete.min.js')}}"></script> |
||||
<link href="{{ url_for('static', filename='map/style.css')}}" rel="stylesheet" type="text/css"> |
||||
</head> |
||||
<body> |
||||
<div id="header"> |
||||
<h1>0200<span class="grey">::/7</span></h1> |
||||
|
||||
<ul> |
||||
<li><a href="/">Status</a></li> |
||||
<li><a href="/map" {% if page == 'network' %} class="selected" {% endif %}>Map</a></li> |
||||
<li><a href="/map/about"{% if page == 'about' %} class="selected" {% endif %}>About</a></li> |
||||
<li><a href="https://github.com/r4sas/yggdrasil-monitor">Source</a></li> |
||||
<li><tt>{% if ip is not none %}{{ ip }}{% endif %}</tt></li> |
||||
</ul> |
||||
</div> |
||||
|
||||
{% block content %}{% endblock %} |
||||
</body> |
||||
</html> |
@ -0,0 +1,22 @@
@@ -0,0 +1,22 @@
|
||||
{% extends "map/base.html" %} |
||||
|
||||
{% block content %} |
||||
<div id="general-info"> |
||||
Nodes: <span id="number-of-nodes">-</span><br> |
||||
Links: <span id="number-of-connections">-</span><br> |
||||
Updated <span id="update-time">-</span><br> |
||||
</div> |
||||
|
||||
<div id="sidebar"> |
||||
<div id="search-wrapper"> |
||||
<input id="search-box" class="tt" type="text" placeholder="Search nodes..."> |
||||
</div> |
||||
<div id="node-info"></div> |
||||
</div> |
||||
|
||||
<div id="content-wrapper"> |
||||
<canvas id="map"></canvas> |
||||
</div> |
||||
|
||||
<script type="text/javascript" src="static/map/network.js"></script> |
||||
{% endblock %} |
@ -0,0 +1,78 @@
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env python |
||||
import graphPlotter |
||||
import html |
||||
|
||||
import urllib.request, json |
||||
url = "http://[316:c51a:62a3:8b9::2]/result.json" |
||||
|
||||
# nodes indexed by coords |
||||
class NodeInfo: |
||||
def __init__(self, ip, coords): |
||||
self.ip = str(ip) |
||||
self.label = str(ip).split(":")[-1] |
||||
self.coords = str(coords) |
||||
self.version = "unknown" |
||||
def getCoordList(self): |
||||
return self.coords.strip("[]").split(" ") |
||||
def getParent(self): |
||||
p = self.getCoordList() |
||||
if len(p) > 0: p = p[:-1] |
||||
return "[" + " ".join(p).strip() + "]" |
||||
def getLink(self): |
||||
c = self.getCoordList() |
||||
return int(self.getCoordList()[-1].strip() or "0") |
||||
|
||||
class LinkInfo: |
||||
def __init__(self, a, b): |
||||
self.a = a # NodeInfo |
||||
self.b = b # NodeInfo |
||||
|
||||
def generate_graph(time_limit=60*60*3): |
||||
response = urllib.request.urlopen(url) |
||||
data = json.loads(response.read())["yggnodes"] |
||||
|
||||
toAdd = [] |
||||
for key in data: |
||||
if 'address' not in data[key] or 'coords' not in data[key]: continue |
||||
ip = data[key]['address'] |
||||
coords = data[key]['coords'] |
||||
info = NodeInfo(ip, coords) |
||||
try: |
||||
if 'nodeinfo' in data[key]: |
||||
if 'name' in data[key]['nodeinfo']: |
||||
label = str(data[key]['nodeinfo']['name']) |
||||
if len(label) <= 64: |
||||
info.label = label |
||||
except: pass |
||||
info.label = html.escape(info.label) |
||||
toAdd.append(info) |
||||
|
||||
nodes = dict() |
||||
def addAncestors(info): |
||||
coords = info.getParent() |
||||
parent = NodeInfo("{} {}".format("?", coords), coords) |
||||
parent.label = parent.ip |
||||
nodes[parent.coords] = parent |
||||
if parent.coords != parent.getParent(): addAncestors(parent) |
||||
|
||||
for info in toAdd: addAncestors(info) |
||||
for info in toAdd: nodes[info.coords] = info |
||||
|
||||
sortedNodes = sorted(nodes.values(), key=(lambda x: x.getLink())) |
||||
#for node in sortedNodes: print node.ip, node.coords, node.getParent(), node.getLink() |
||||
|
||||
edges = [] |
||||
for node in sortedNodes: |
||||
if node.coords == node.getParent: continue |
||||
edges.append(LinkInfo(node, nodes[node.getParent()])) |
||||
|
||||
print('%d nodes, %d edges' % (len(nodes), len(edges))) |
||||
|
||||
graph = graphPlotter.position_nodes(nodes, edges) |
||||
js = graphPlotter.get_graph_json(graph) |
||||
|
||||
with open('api/static/graph.json', 'w') as f: |
||||
f.write(js) |
||||
|
||||
if __name__ == '__main__': |
||||
generate_graph() |
@ -0,0 +1,7 @@
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env python |
||||
|
||||
from flup.server.fcgi import WSGIServer |
||||
from web import app |
||||
|
||||
if __name__ == '__main__': |
||||
WSGIServer(app).run() |
@ -1,12 +1,32 @@
@@ -1,12 +1,32 @@
|
||||
#!/bin/sh |
||||
#!/bin/bash |
||||
|
||||
YGGCRAWL="/opt/yggcrawl/yggcrawl" # path to yggcrawl binary |
||||
YGGAPIPATH="/opt/yggdrasil-api" # path to Niflheim-API directory |
||||
ulimit -n 4096 |
||||
|
||||
CRAWLPEER="tcp://127.0.0.1:12345" # Yggdrasil peer address |
||||
CRAWLFILE="api/results.json" |
||||
CRAWLRETR=3 |
||||
YGGCRAWL="/opt/yggdrasil-crawler/crawler" |
||||
YGGAPIPATH="/opt/yggdrasil-api" |
||||
|
||||
TMPFILE="api/current.json" |
||||
CRAWLFILE="api/result.json" |
||||
|
||||
# Crawler timeout in minutes. It must be lesser then crontab job period |
||||
# Increased to 50 minutes and crontab runs hourly due to network instabillity |
||||
#CRAWLTIMEOUT=50 |
||||
|
||||
############################################################################## |
||||
|
||||
cd $YGGAPIPATH |
||||
$YGGCRAWL -peer $CRAWLPEER -retry $CRAWLRETR -file $CRAWLFILE > api/yggcrawl.log 2>&1 |
||||
venv/bin/python api/importer.py >> api/yggcrawl.log 2>&1 |
||||
|
||||
#let "TIMEOUT = $CRAWLTIMEOUT * 60" |
||||
#timeout $TIMEOUT $YGGCRAWL > $TMPFILE 2>logs/crawler.log |
||||
|
||||
$YGGCRAWL > $TMPFILE 2>logs/crawler.log |
||||
|
||||
if [[ $? == 0 ]] # Crawler not triggered error or was killed |
||||
then |
||||
# add a little delay... |
||||
sleep 3 |
||||
mv -f $TMPFILE $CRAWLFILE |
||||
venv/bin/python api/importer.py > logs/importer.log 2>&1 |
||||
venv/bin/python api/addresses.py > logs/addresses.log 2>&1 |
||||
venv/bin/python api/updateGraph.py > logs/graph.log 2>&1 |
||||
fi |
||||
|
Loading…
Reference in new issue