global: convert to InfluxDB

This converts the entire setup to use InfluxDB 1.x for stats tables
instead of MySQL.

Signed-off-by: Adrian Schmutzler <freifunk@adrianschmutzler.de>
This commit is contained in:
Adrian Schmutzler 2020-11-24 10:27:49 +01:00
parent 0169efa303
commit 6eda60f537
9 changed files with 283 additions and 367 deletions

24
ffmap/db/init_influx.py Executable file
View File

@ -0,0 +1,24 @@
#!/usr/bin/python3
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + '/' + '../..'))
from ffmap.influxtools import FreifunkInflux
policies = {
"router_default": 90,
"router_neighbor": 90,
"router_netif": 90,
"router_gw": 90,
"global_default": 90,
"global_gw": 90,
"global_hoods": 90
}
influ = FreifunkInflux()
for k, v in policies.items():
influ.query('CREATE RETENTION POLICY {} ON "fff-monitoring" DURATION {}d REPLICATION 1'.format(k,v))
influ.close()

40
ffmap/influxtools.py Executable file
View File

@ -0,0 +1,40 @@
#!/usr/bin/python3
from influxdb import InfluxDBClient
from ffmap.influconfig import infludata
from ffmap.misc import *
import datetime
class FreifunkInflux:
client = None
def __init__(self):
self.client = InfluxDBClient(host=infludata["host"],port=infludata["port"],username=infludata["user"],password=infludata["pw"],database=infludata["db"])
#self.client.switch_database(infludata["db"])
def close(self):
self.client.close()
def write(self,json,retention):
self.client.write_points(json,time_precision='s',retention_policy=retention)
def query(self,sql,params=None):
if params:
return self.client.query(sql,bind_params=params,epoch='s')
else:
return self.client.query(sql,epoch='s')
def fetchlist(self,sql,params=None):
return list(self.query(sql,params).get_points())
def utcawareint(self,data,keys=None):
if keys:
for k in keys:
data[k] = datetime.datetime.fromtimestamp(data[k],datetime.timezone.utc)
else:
data = datetime.datetime.fromtimestamp(data,datetime.timezone.utc)
return data
def utctimestamp(self):
return int(utcnow().timestamp())

View File

@ -5,6 +5,7 @@ import sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + '/' + '..'))
from ffmap.mysqltools import FreifunkMySQL
from ffmap.influxtools import FreifunkInflux
from ffmap.misc import *
from ffmap.config import CONFIG
import MySQLdb as my
@ -26,10 +27,6 @@ def delete_router(mysql,dbid):
mysql.execute("DELETE FROM router_neighbor WHERE router = %s",(dbid,))
mysql.execute("DELETE FROM router_gw WHERE router = %s",(dbid,))
mysql.execute("DELETE FROM router_events WHERE router = %s",(dbid,))
mysql.execute("DELETE FROM router_stats WHERE router = %s",(dbid,))
mysql.execute("DELETE FROM router_stats_neighbor WHERE router = %s",(dbid,))
mysql.execute("DELETE FROM router_stats_netif WHERE router = %s",(dbid,))
mysql.execute("DELETE FROM router_stats_gw WHERE router = %s",(dbid,))
mysql.commit()
def ban_router(mysql,dbid):
@ -145,7 +142,7 @@ def import_nodewatcher_xml(mysql, mac, xml, banned, hoodsv2, netifdict, hoodsdic
if diff_active:
router_update["w2_airtime"] = diff_busy / diff_active # auto float-division in Python3
else:
router_update["w2_airtime"] = 0
router_update["w2_airtime"] = 0.0
fields_w5 = (router_update["w5_active"], router_update["w5_busy"], olddata["w5_busy"], olddata["w5_active"],)
if not any(w == None for w in fields_w5):
diff_active = router_update["w5_active"] - olddata["w5_active"]
@ -153,7 +150,7 @@ def import_nodewatcher_xml(mysql, mac, xml, banned, hoodsv2, netifdict, hoodsdic
if diff_active:
router_update["w5_airtime"] = diff_busy / diff_active # auto float-division in Python3
else:
router_update["w5_airtime"] = 0
router_update["w5_airtime"] = 0.0
if olddata:
# statistics
@ -429,9 +426,9 @@ def delete_orphaned_routers(mysql):
def delete_unlinked_routers(mysql):
# Delete entries in router_* tables without corresponding router in master table
tables = ["router_events","router_gw","router_ipv6","router_neighbor","router_netif","router_stats","router_stats_gw","router_stats_neighbor","router_stats_netif","router_stats_old","router_stats_old_gw","router_stats_old_neighbor","router_stats_old_netif"]
tables = ["router_events","router_gw","router_ipv6","router_neighbor","router_netif"]
for t in tables:
start_time = time.time()
mysql.execute("""
@ -449,104 +446,8 @@ def delete_unlinked_routers(mysql):
print("--- Deleted %i rows from %s: %.3f seconds ---" % (mysql.cursor().rowcount,t,time.time() - start_time))
time.sleep(1)
def delete_stats_helper(mysql,label,query,tuple):
minustime=0
rowsaffected=1
allrows=0
start_time = time.time()
while rowsaffected > 0:
try:
rowsaffected = mysql.execute(query,tuple)
mysql.commit()
allrows += rowsaffected
except my.OperationalError:
rowsaffected = 1
time.sleep(10)
minustime += 10
end_time = time.time()
writelog(CONFIG["debug_dir"] + "/deletetime.txt", "Deleted %i rows from %s stats: %.3f seconds" % (allrows,label,end_time - start_time - minustime))
print("--- Deleted %i rows from %s stats: %.3f seconds ---" % (allrows,label,end_time - start_time - minustime))
def delete_old_stats(mysql):
threshold = (utcnow() - datetime.timedelta(days=CONFIG["router_stat_days"])).timestamp()
threshold_netif = (utcnow() - datetime.timedelta(days=CONFIG["router_stat_netif"])).timestamp()
threshold_gw = (utcnow() - datetime.timedelta(days=CONFIG["router_stat_gw"])).timestamp()
threshold_gw_netif = mysql.formatdt(utcnow() - datetime.timedelta(hours=CONFIG["gw_netif_threshold_hours"]))
old = (utcnow() - datetime.timedelta(days=CONFIG["router_oldstat_days"])).timestamp()
old_netif = (utcnow() - datetime.timedelta(days=CONFIG["router_oldstat_netif"])).timestamp()
old_gw = (utcnow() - datetime.timedelta(days=CONFIG["router_oldstat_gw"])).timestamp()
limit = "100000"
limit = "500000"
start_time = time.time()
rowsaffected = mysql.execute("""
DELETE s FROM router_stats AS s
LEFT JOIN router AS r ON s.router = r.id
WHERE s.time < %s AND (r.status = 'online' OR r.status IS NULL)
""",(threshold,))
mysql.commit()
writelog(CONFIG["debug_dir"] + "/deletetime.txt", "Deleted %i rows from stats: %.3f seconds" % (rowsaffected,time.time() - start_time))
print("--- Deleted %i rows from stats: %.3f seconds ---" % (rowsaffected,time.time() - start_time))
time.sleep(10)
start_time = time.time()
rowsaffected = mysql.execute("""
DELETE s FROM router_stats_old AS s
LEFT JOIN router AS r ON s.router = r.id
WHERE s.time < %s AND (r.status = 'online' OR r.status IS NULL)
""",(old,))
mysql.commit()
writelog(CONFIG["debug_dir"] + "/deletetime.txt", "Deleted %i rows from stats (old): %.3f seconds" % (rowsaffected,time.time() - start_time))
print("--- Deleted %i rows from stats (old): %.3f seconds ---" % (rowsaffected,time.time() - start_time))
time.sleep(10)
query = """
DELETE FROM router_stats_gw
WHERE time < %s
LIMIT """+limit+"""
"""
delete_stats_helper(mysql,"gw-stats",query,(threshold_gw,))
time.sleep(10)
query = """
DELETE FROM router_stats_old_gw
WHERE time < %s
LIMIT """+limit+"""
"""
delete_stats_helper(mysql,"gw-stats (old)",query,(old_gw,))
time.sleep(30)
query = """
DELETE FROM router_stats_neighbor
WHERE time < %s
LIMIT """+limit+"""
"""
delete_stats_helper(mysql,"neighbor-stats",query,(threshold,))
time.sleep(30)
query = """
DELETE FROM router_stats_old_neighbor
WHERE time < %s
LIMIT """+limit+"""
"""
delete_stats_helper(mysql,"neighbor-stats (old)",query,(old,))
time.sleep(30)
query = """
DELETE FROM router_stats_netif
WHERE time < %s
LIMIT """+limit+"""
"""
delete_stats_helper(mysql,"netif-stats",query,(threshold_netif,))
time.sleep(30)
query = """
DELETE FROM router_stats_old_netif
WHERE time < %s
LIMIT """+limit+"""
"""
delete_stats_helper(mysql,"netif-stats (old)",query,(old_netif,))
start_time = time.time()
allrows = mysql.execute("DELETE FROM gw_netif WHERE last_contact < %s",(threshold_gw_netif,))
@ -561,7 +462,7 @@ def delete_old_stats(mysql):
SELECT router, COUNT(time) AS count FROM router_events
GROUP BY router
""")
for e in events:
delnum = int(e["count"] - CONFIG["event_num_entries"])
if delnum > 0:
@ -599,122 +500,93 @@ def set_status(mysql,router_id,status):
def new_router_stats(mysql, router_id, uptime, router_update, netifdict, statstime):
#if not (uptime + CONFIG["router_stat_mindiff_secs"]) < router_update["sys_uptime"]:
# return
time = mysql.formattimestamp(statstime)
#time = mysql.formattimestamp(statstime)
stattime = mysql.findone("SELECT time FROM router_stats WHERE router = %s ORDER BY time DESC LIMIT 1",(router_id,),"time")
oldstattime = mysql.findone("SELECT time FROM router_stats_old WHERE router = %s ORDER BY time DESC LIMIT 1",(router_id,),"time")
influ = FreifunkInflux()
routerdata = (
time,
router_id,
router_update["memory"]['free'],
router_update["memory"]['buffering'],
router_update["memory"]['caching'],
router_update["sys_loadavg"],
router_update["processes"]['runnable'],
router_update["processes"]['total'],
router_update["clients"],
router_update["clients_eth"],
router_update["clients_w2"],
router_update["clients_w5"],
router_update["w2_airtime"],
router_update["w5_airtime"],
)
#stattime = mysql.findone("SELECT time FROM router_stats WHERE router = %s ORDER BY time DESC LIMIT 1",(router_id,),"time")
#oldstattime = mysql.findone("SELECT time FROM router_stats_old WHERE router = %s ORDER BY time DESC LIMIT 1",(router_id,),"time")
if not stattime or (stattime + CONFIG["router_stat_mindiff_default"]) < time:
mysql.execute("""
INSERT INTO router_stats (time, router, sys_memfree, sys_membuff, sys_memcache, loadavg, sys_procrun, sys_proctot,
clients, clients_eth, clients_w2, clients_w5, airtime_w2, airtime_w5)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE time=time
""",routerdata)
if not oldstattime or (oldstattime + CONFIG["router_oldstat_mindiff_default"]) < time:
mysql.execute("""
INSERT INTO router_stats_old (time, router, sys_memfree, sys_membuff, sys_memcache, loadavg, sys_procrun, sys_proctot,
clients, clients_eth, clients_w2, clients_w5, airtime_w2, airtime_w5)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE time=time
""",routerdata)
netiftime = mysql.findone("SELECT time FROM router_stats_netif WHERE router = %s ORDER BY time DESC LIMIT 1",(router_id,),"time")
oldnetiftime = mysql.findone("SELECT time FROM router_stats_old_netif WHERE router = %s ORDER BY time DESC LIMIT 1",(router_id,),"time")
stats_json = [{
"measurement": "stat",
"tags": {
"router": router_id,
},
"time": statstime,
"fields": {
"sys_memfree": router_update["memory"]['free'],
"sys_membuff": router_update["memory"]['buffering'],
"sys_memcache": router_update["memory"]['caching'],
"loadavg": router_update["sys_loadavg"],
"sys_procrun": router_update["processes"]['runnable'],
"sys_proctot": router_update["processes"]['total'],
"clients": router_update["clients"],
"clients_eth": router_update["clients_eth"],
"clients_w2": router_update["clients_w2"],
"clients_w5": router_update["clients_w5"],
"airtime_w2": router_update["w2_airtime"],
"airtime_w5": router_update["w5_airtime"]
}
}]
influ.write(stats_json,"router_default")
ndata = []
nkeys = []
#if not stattime or (stattime + CONFIG["router_stat_mindiff_default"]) < time:
#netiftime = mysql.findone("SELECT time FROM router_stats_netif WHERE router = %s ORDER BY time DESC LIMIT 1",(router_id,),"time")
#oldnetiftime = mysql.findone("SELECT time FROM router_stats_old_netif WHERE router = %s ORDER BY time DESC LIMIT 1",(router_id,),"time")
stats_json = []
for netif in router_update["netifs"]:
# sanitize name
name = netif["name"].replace(".", "").replace("$", "")
with suppress(KeyError):
if name in netifdict.keys():
ndata.append((time,router_id,netifdict[name],netif["traffic"]["rx"],netif["traffic"]["tx"],))
else:
writelog(CONFIG["debug_dir"] + "/test_yellow.txt", "{}".format(name))
nkeys.append((name,))
# 99.9 % of the routers will NOT enter this, so the doubled code is not a problem
if nkeys:
mysql.executemany("""
INSERT INTO netifs (name)
VALUES (%s)
ON DUPLICATE KEY UPDATE name=name
""",nkeys)
netifdict = mysql.fetchdict("SELECT id, name FROM netifs",(),"name","id")
ndata = []
for netif in router_update["netifs"]:
# sanitize name
name = netif["name"].replace(".", "").replace("$", "")
with suppress(KeyError):
ndata.append((time,router_id,netifdict[name],netif["traffic"]["rx"],netif["traffic"]["tx"],))
if not netiftime or (netiftime + CONFIG["router_stat_mindiff_netif"]) < time:
mysql.executemany("""
INSERT INTO router_stats_netif (time, router, netif, rx, tx)
VALUES (%s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE time=time
""",ndata)
if not oldnetiftime or (oldnetiftime + CONFIG["router_oldstat_mindiff_netif"]) < time:
mysql.executemany("""
INSERT INTO router_stats_old_netif (time, router, netif, rx, tx)
VALUES (%s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE time=time
""",ndata)
stats_json.append({
"measurement": "stat",
"tags": {
"router": router_id,
"netif": name,
},
"time": statstime,
"fields": {
"rx": int(netif["traffic"]["rx"]),
"tx": int(netif["traffic"]["tx"])
}
})
influ.write(stats_json,"router_netif")
# reuse timestamp from router_stats to avoid additional queries
nbdata = []
stats_json = []
for neighbour in router_update["neighbours"]:
with suppress(KeyError):
nbdata.append((time,router_id,neighbour["mac"],neighbour["quality"],))
if not stattime or (stattime + CONFIG["router_stat_mindiff_default"]) < time:
mysql.executemany("""
INSERT INTO router_stats_neighbor (time, router, mac, quality)
VALUES (%s, %s, %s, %s)
ON DUPLICATE KEY UPDATE time=time
""",nbdata)
if not oldstattime or (oldstattime + CONFIG["router_oldstat_mindiff_default"]) < time:
mysql.executemany("""
INSERT INTO router_stats_old_neighbor (time, router, mac, quality)
VALUES (%s, %s, %s, %s)
ON DUPLICATE KEY UPDATE time=time
""",nbdata)
stats_json.append({
"measurement": "stat",
"tags": {
"router": router_id,
"mac": int2shortmac(neighbour["mac"]),
},
"time": statstime,
"fields": {
"quality": float(neighbour["quality"])
}
})
influ.write(stats_json,"router_neighbor")
# reuse timestamp from router_stats to avoid additional queries
gwdata = []
stats_json = []
for gw in router_update["gws"]:
with suppress(KeyError):
gwdata.append((time,router_id,gw["mac"],gw["quality"],))
if not stattime or (stattime + CONFIG["router_stat_mindiff_default"]) < time:
mysql.executemany("""
INSERT INTO router_stats_gw (time, router, mac, quality)
VALUES (%s, %s, %s, %s)
ON DUPLICATE KEY UPDATE time=time
""",gwdata)
if not oldstattime or (oldstattime + CONFIG["router_oldstat_mindiff_default"]) < time:
mysql.executemany("""
INSERT INTO router_stats_old_gw (time, router, mac, quality)
VALUES (%s, %s, %s, %s)
ON DUPLICATE KEY UPDATE time=time
""",gwdata)
stats_json.append({
"measurement": "stat",
"tags": {
"router": router_id,
"mac": gw["mac"],
},
"time": statstime,
"fields": {
"quality": float(gw["quality"])
}
})
influ.write(stats_json,"router_gw")
def calculate_network_io(mysql, router_id, uptime, router_update):
"""
@ -843,8 +715,8 @@ def parse_nodewatcher_xml(xml,statstime):
router_update["processes"]["runnable"] = int(processboth.split("/")[0])
router_update["processes"]["total"] = int(processboth.split("/")[1])
else:
router_update["processes"]["runnable"] = 0
router_update["processes"]["total"] = 0
router_update["processes"]["runnable"] = int(0)
router_update["processes"]["total"] = int(0)
try:
lng = evalxpathfloat(tree,"/data/system_data/geo/lng/text()")
@ -871,8 +743,8 @@ def parse_nodewatcher_xml(xml,statstime):
"traffic": {
"rx_bytes": evalxpathint(netif,"traffic_rx/text()"),
"tx_bytes": evalxpathint(netif,"traffic_tx/text()"),
"rx": 0,
"tx": 0,
"rx": int(0),
"tx": int(0),
},
"ipv4_addr": ipv4toint(evalxpath(netif,"ipv4_addr/text()")),
"mac": mac2int(evalxpath(netif,"mac_addr/text()")),

View File

@ -5,6 +5,7 @@ import sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + '/' + '..'))
from ffmap.mysqltools import FreifunkMySQL
from ffmap.influxtools import FreifunkInflux
from ffmap.gwtools import gw_name, gw_bat
from ffmap.misc import *
from ffmap.config import CONFIG
@ -118,7 +119,7 @@ def router_traffic_hood(mysql):
dict[d["hood"]]["tx"] += d["tx"]
for h in allhoods:
if not h["hood"] in dict:
dict[h["hood"]] = {"hood": h["hood"], "rx": 0, "tx": 0}
dict[h["hood"]] = {"hood": h["hood"], "rx": int(0), "tx": int(0)}
return dict
def total_clients_gw(mysql):
@ -423,92 +424,83 @@ def gws_admin(mysql,selectgw):
""",(mac2int(selectgw),),"name")
return data
def record_global_stats(mysql):
threshold=(utcnow() - datetime.timedelta(days=CONFIG["global_stat_days"])).timestamp()
time = mysql.utctimestamp()
def record_global_stats(influ,mysql):
#threshold=(utcnow() - datetime.timedelta(days=CONFIG["global_stat_days"])).timestamp()
time = influ.utctimestamp()
status = router_status(mysql)
traffic = router_traffic(mysql)
mysql.execute("""
INSERT INTO stats_global (time, clients, online, offline, unknown, orphaned, rx, tx)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
clients=VALUES(clients),
online=VALUES(online),
offline=VALUES(offline),
unknown=VALUES(unknown),
orphaned=VALUES(orphaned),
rx=VALUES(rx),
tx=VALUES(tx)
""",(time,total_clients(mysql),status.get("online",0),status.get("offline",0),status.get("unknown",0),status.get("orphaned",0),traffic["rx"],traffic["tx"],))
stats_json = [{
"measurement": "stat",
"time": time,
"fields": {
"clients": int(total_clients(mysql)),
"online": int(status.get("online",0)),
"offline": int(status.get("offline",0)),
"unknown": int(status.get("unknown",0)),
"orphaned": int(status.get("orphaned",0)),
"rx": int(traffic["rx"]),
"tx": int(traffic["tx"])
}
}]
mysql.execute("""
DELETE FROM stats_global
WHERE time < %s
""",(threshold,))
influ.write(stats_json,"global_default")
mysql.commit()
def record_hood_stats(mysql):
threshold=(utcnow() - datetime.timedelta(days=CONFIG["global_stat_days"])).timestamp()
time = mysql.utctimestamp()
def record_hood_stats(influ,mysql):
#threshold=(utcnow() - datetime.timedelta(days=CONFIG["global_stat_days"])).timestamp()
time = influ.utctimestamp()
status = router_status_hood(mysql)
clients = total_clients_hood(mysql)
traffic = router_traffic_hood(mysql)
hdata = []
stats_json = []
for hood in clients.keys():
if not hood:
hood = "Default"
hdata.append((time,hood,clients[hood],status[hood].get("online",0),status[hood].get("offline",0),status[hood].get("unknown",0),status[hood].get("orphaned",0),traffic[hood]["rx"],traffic[hood]["tx"],))
hood = 1
mysql.executemany("""
INSERT INTO stats_hood (time, hood, clients, online, offline, unknown, orphaned, rx, tx)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
clients=VALUES(clients),
online=VALUES(online),
offline=VALUES(offline),
unknown=VALUES(unknown),
orphaned=VALUES(orphaned),
rx=VALUES(rx),
tx=VALUES(tx)
""",hdata)
stats_json.append({
"measurement": "stat",
"tags": {
"hood": hood,
},
"time": time,
"fields": {
"clients": int(clients[hood]),
"online": int(status[hood].get("online",0)),
"offline": int(status[hood].get("offline",0)),
"unknown": int(status[hood].get("unknown",0)),
"orphaned": int(status[hood].get("orphaned",0)),
"rx": int(traffic[hood]["rx"]),
"tx": int(traffic[hood]["tx"])
}
})
mysql.execute("""
DELETE FROM stats_hood
WHERE time < %s
""",(threshold,))
influ.write(stats_json,"global_hoods")
mysql.commit()
def record_gw_stats(mysql):
threshold=(utcnow() - datetime.timedelta(days=CONFIG["global_gwstat_days"])).timestamp()
time = mysql.utctimestamp()
def record_gw_stats(influ,mysql):
#threshold=(utcnow() - datetime.timedelta(days=CONFIG["global_gwstat_days"])).timestamp()
time = influ.utctimestamp()
status = router_status_gw(mysql)
clients = total_clients_gw(mysql)
gdata = []
stats_json = []
for mac in clients.keys():
gdata.append((time,mac,clients[mac],status[mac].get("online",0),status[mac].get("offline",0),status[mac].get("unknown",0),status[mac].get("orphaned",0),))
stats_json.append({
"measurement": "stat",
"tags": {
"mac": int2shortmac(mac),
},
"time": time,
"fields": {
"clients": int(clients[mac]),
"online": int(status[mac].get("online",0)),
"offline": int(status[mac].get("offline",0)),
"unknown": int(status[mac].get("unknown",0)),
"orphaned": int(status[mac].get("orphaned",0))
}
})
mysql.executemany("""
INSERT INTO stats_gw (time, mac, clients, online, offline, unknown, orphaned)
VALUES (%s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
clients=VALUES(clients),
online=VALUES(online),
offline=VALUES(offline),
unknown=VALUES(unknown),
orphaned=VALUES(orphaned)
""",gdata)
mysql.execute("""
DELETE FROM stats_gw
WHERE time < %s
""",(threshold,))
mysql.commit()
influ.write(stats_json,"global_gw")
def router_user_sum(mysql):
data = mysql.fetchall("""

View File

@ -4,6 +4,7 @@ from ffmap.routertools import *
from ffmap.gwtools import *
from ffmap.maptools import *
from ffmap.mysqltools import FreifunkMySQL
from ffmap.influxtools import FreifunkInflux
from ffmap.stattools import record_global_stats, record_hood_stats
from ffmap.config import CONFIG
from ffmap.misc import *
@ -24,49 +25,29 @@ api = Blueprint("api", __name__)
@api.route('/load_netif_stats/<dbid>')
def load_netif_stats(dbid):
netif = request.args.get("netif","")
mysql = FreifunkMySQL()
influ = FreifunkInflux()
threshold = mysql.findone("SELECT time FROM router_stats_netif WHERE router = %s ORDER BY time ASC LIMIT 1",(dbid,),"time")
netiffetch = mysql.fetchall("""
(SELECT netifs.name AS netif, rx, tx, time
FROM router_stats_old_netif
INNER JOIN netifs ON router_stats_old_netif.netif = netifs.id
WHERE router = %s AND netifs.name = %s AND time < %s
ORDER BY time ASC)
UNION
(SELECT netifs.name AS netif, rx, tx, time
FROM router_stats_netif
INNER JOIN netifs ON router_stats_netif.netif = netifs.id
WHERE router = %s AND netifs.name = %s
ORDER BY time ASC)
""",(dbid,netif,threshold,dbid,netif,))
mysql.close()
netiffetch = influ.fetchlist('SELECT netif, rx, tx, time FROM router_netif.stat WHERE router = $router AND netif = $netif ORDER BY time ASC',{"router": dbid, "netif": netif})
for ns in netiffetch:
ns["time"] = {"$date": int(mysql.utcawareint(ns["time"]).timestamp()*1000)}
ns["time"] = {"$date": int(influ.utcawareint(ns["time"]).timestamp()*1000)}
r = make_response(json.dumps(netiffetch))
r.mimetype = 'application/json'
return r
#return make_response(json.dumps({}))
# Load router neighbor statistics
@api.route('/load_neighbor_stats/<dbid>')
def load_neighbor_stats(dbid):
mysql = FreifunkMySQL()
influ = FreifunkInflux()
threshold = mysql.findone("SELECT time FROM router_stats_neighbor WHERE router = %s ORDER BY time ASC LIMIT 1",(dbid,),"time")
neighfetch = mysql.fetchall("""
(SELECT quality, mac, time FROM router_stats_old_neighbor WHERE router = %s AND time < %s ORDER BY time ASC)
UNION (SELECT quality, mac, time FROM router_stats_neighbor WHERE router = %s ORDER BY time ASC)
""",(dbid,threshold,dbid,))
mysql.close()
neighfetch = influ.fetchlist('SELECT quality, mac, time FROM router_neighbor.stat WHERE router = $router ORDER BY time ASC',{"router": dbid})
neighdata = {}
for ns in neighfetch:
ns["time"] = {"$date": int(mysql.utcawareint(ns["time"]).timestamp()*1000)}
ns["time"] = {"$date": int(influ.utcawareint(ns["time"]).timestamp()*1000)}
if not ns["mac"] in neighdata:
neighdata[ns["mac"]] = []
neighdata[ns["mac"]].append(ns)
@ -74,6 +55,7 @@ def load_neighbor_stats(dbid):
r = make_response(json.dumps(neighdata))
r.mimetype = 'application/json'
return r
#return make_response(json.dumps({}))
# map ajax
@api.route('/get_nearest_router')

View File

@ -7,6 +7,7 @@ sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + '/' + '../..'))
from ffmap.web.api import api
from ffmap.web.filters import filters
from ffmap.mysqltools import FreifunkMySQL
from ffmap.influxtools import FreifunkInflux
from ffmap import stattools
from ffmap.usertools import *
from ffmap.routertools import delete_router, ban_router
@ -127,6 +128,8 @@ def router_mac(mac):
def router_info(dbid):
try:
mysql = FreifunkMySQL()
influ = FreifunkInflux()
router = mysql.findone("""
SELECT router.*, hoods.id AS hoodid, hoods.name AS hoodname FROM router
INNER JOIN hoods ON router.hood = hoods.id
@ -267,54 +270,53 @@ def router_info(dbid):
for n in router["neighbours"]:
n["color"] = neighbor_color(n["quality"],n["netif"],router["routing_protocol"])
statsthreshold = mysql.findone("SELECT time FROM router_stats WHERE router = %s ORDER BY time ASC LIMIT 1",(dbid,),"time")
router["stats"] = mysql.fetchall("""
(SELECT * FROM router_stats_old WHERE router = %s AND time < %s ORDER BY time ASC)
UNION (SELECT * FROM router_stats WHERE router = %s ORDER BY time ASC)
""",(dbid,statsthreshold,dbid,))
router["stats"] = influ.fetchlist('SELECT * FROM router_default.stat WHERE router = $router ORDER BY time ASC',{"router": dbid})
for s in router["stats"]:
s["time"] = mysql.utcawareint(s["time"])
threshold_neighstats = (utcnow() - datetime.timedelta(hours=24)).timestamp()
neighfetch = mysql.fetchall("""
SELECT quality, mac, time FROM router_stats_neighbor WHERE router = %s AND time > %s ORDER BY time ASC
""",(dbid,threshold_neighstats,))
neighfetch = influ.fetchlist('SELECT quality, mac, time FROM router_neighbor.stat WHERE router = $router AND time > now() - 24h ORDER BY time ASC',{"router": dbid})
neighdata = {}
neighmacint = []
for ns in neighfetch:
ns["time"] = {"$date": int(mysql.utcawareint(ns["time"]).timestamp()*1000)}
if not ns["mac"] in neighdata:
neighdata[ns["mac"]] = []
neighmacint.append(mac2int(ns["mac"]))
neighdata[ns["mac"]].append(ns)
neighident = mysql.fetchall("""
SELECT snb.mac, r.hostname, n.netif
FROM router_stats_neighbor AS snb
INNER JOIN router_netif AS n ON snb.mac = n.mac
INNER JOIN router AS r ON n.router = r.id
WHERE snb.router = %s AND n.netif <> 'w2ap' AND n.netif <> 'w5ap'
GROUP BY snb.mac, r.hostname, n.netif
""",(dbid,))
#neighident = mysql.fetchall("""
# SELECT snb.mac, r.hostname, n.netif
# FROM router_stats_neighbor AS snb
# INNER JOIN router_netif AS n ON snb.mac = n.mac
# INNER JOIN router AS r ON n.router = r.id
# WHERE snb.router = %s AND n.netif <> 'w2ap' AND n.netif <> 'w5ap'
# GROUP BY snb.mac, r.hostname, n.netif
#""",(dbid,))
neighlabel = {}
for ni in neighident:
label = ni["hostname"]
# add network interface when there are multiple links to same node
for ni2 in neighident:
if label == ni2["hostname"] and ni["mac"] != ni2["mac"]:
# This shows the NEIGHBOR'S interface name
label += "@" + ni["netif"]
append = " (old)"
for nnn in router["neighbours"]:
if nnn["mac"] == ni["mac"]:
append = ""
neighlabel[ni["mac"]] = label + append
if neighmacint:
format_macs = ','.join(['%s'] * len(neighmacint))
neighident = mysql.fetchall("""
SELECT n.mac, r.hostname, n.netif
FROM router_netif AS n
INNER JOIN router AS r ON n.router = r.id
WHERE n.netif <> 'w2ap' AND n.netif <> 'w5ap' AND n.mac IN ({})
GROUP BY n.mac, r.hostname, n.netif
""".format(format_macs),tuple(neighmacint))
for ni in neighident:
label = ni["hostname"]
# add network interface when there are multiple links to same node
for ni2 in neighident:
if label == ni2["hostname"] and ni["mac"] != ni2["mac"]:
# This shows the NEIGHBOR'S interface name
label += "@" + ni["netif"]
append = " (old)"
for nnn in router["neighbours"]:
if nnn["mac"] == ni["mac"]:
append = ""
neighlabel[int2shortmac(ni["mac"])] = label + append
gwthreshold = mysql.findone("SELECT time FROM router_stats_gw WHERE router = %s ORDER BY time ASC LIMIT 1",(dbid,),"time")
gwfetch = mysql.fetchall("""
(SELECT quality, mac, time FROM router_stats_old_gw WHERE router = %s AND time < %s ORDER BY time ASC)
UNION (SELECT quality, mac, time FROM router_stats_gw WHERE router = %s ORDER BY time ASC)
""",(dbid,gwthreshold,dbid,))
gwfetch = influ.fetchlist('SELECT quality, mac, time FROM router_gw.stat WHERE router = $router ORDER BY time ASC',{"router": dbid})
for ns in gwfetch:
ns["time"] = mysql.utcawareint(ns["time"])
@ -521,55 +523,53 @@ def user_info(nickname):
@app.route('/statistics')
def global_statistics():
mysql = FreifunkMySQL()
threshold=(utcnow() - datetime.timedelta(days=CONFIG["global_stat_show_days"])).timestamp()
stats = mysql.fetchall("SELECT * FROM stats_global WHERE time > %s",(threshold,))
return helper_statistics(mysql,stats,None,None)
influ = FreifunkInflux()
stats = influ.fetchlist('SELECT * FROM global_default.stat WHERE time > now() - {}d'.format(CONFIG["global_stat_show_days"]))
return helper_statistics(stats,None,None)
@app.route('/hoodstatistics/<selecthood>')
def global_hoodstatistics(selecthood):
selecthood = int(selecthood)
mysql = FreifunkMySQL()
threshold=(utcnow() - datetime.timedelta(days=CONFIG["global_stat_show_days"])).timestamp()
stats = mysql.fetchall("SELECT * FROM stats_hood WHERE hood = %s AND time > %s",(selecthood,threshold,))
return helper_statistics(mysql,stats,selecthood,None)
influ = FreifunkInflux()
stats = influ.fetchlist('SELECT * FROM global_hoods.stat WHERE hood = $hood AND time > now() - {}d'.format(CONFIG["global_stat_show_days"]),{"hood": str(selecthood)})
return helper_statistics(stats,selecthood,None)
@app.route('/gwstatistics/<selectgw>')
def global_gwstatistics(selectgw):
mysql = FreifunkMySQL()
threshold=(utcnow() - datetime.timedelta(days=CONFIG["global_gwstat_show_days"])).timestamp()
stats = mysql.fetchall("SELECT * FROM stats_gw WHERE mac = %s AND time > %s",(mac2int(selectgw),threshold,))
influ = FreifunkInflux()
stats = influ.fetchlist('SELECT * FROM global_gw.stat WHERE mac = $mac AND time > now() - {}d'.format(CONFIG["global_gwstat_show_days"]),{"mac": selectgw})
selectgw = shortmac2mac(selectgw)
return helper_statistics(mysql,stats,None,selectgw)
return helper_statistics(stats,None,selectgw)
def helper_statistics(mysql,stats,selecthood,selectgw):
def helper_statistics(stats,selecthood,selectgw):
try:
mysql = FreifunkMySQL()
hoods = stattools.hoods(mysql,selectgw)
gws = stattools.gws_ifs(mysql,selecthood)
if selecthood:
selecthoodname = mysql.findone("SELECT name FROM hoods WHERE id = %s",(selecthood,),'name')
else:
selecthoodname = None
if selectgw:
selectgwint = mac2int(selectgw)
else:
selectgwint = None
if selecthood and not selecthoodname:
mysql.close()
return "Hood not found"
if selectgw and not selectgwint in gws:
mysql.close()
return "Gateway not found"
stats = mysql.utcawaretupleint(stats,"time")
numnew = len(hoods)-27
if numnew < 1:
numnew = 1
if selectgw:
newest_routers = mysql.fetchall("""
SELECT router.id, hostname, hoods.id AS hoodid, hoods.name AS hood, created
@ -596,7 +596,7 @@ def helper_statistics(mysql,stats,selecthood,selectgw):
LIMIT %s
""".format(where),tup)
newest_routers = mysql.utcawaretuple(newest_routers,"created")
clients = stattools.total_clients(mysql)
router_status = stattools.router_status(mysql)
router_models = stattools.router_models(mysql,selecthood,selectgw)
@ -607,7 +607,7 @@ def helper_statistics(mysql,stats,selecthood,selectgw):
gws_info = stattools.gws_info(mysql,selecthood)
gws_admin = stattools.gws_admin(mysql,selectgw)
mysql.close()
return render_template("statistics.html",
selecthood = selecthood,
selecthoodname = selecthoodname,

View File

@ -110,7 +110,7 @@ function neighbour_graph(neigh_label) {
if(j in neigh_label) {
label = neigh_label[j];
} else {
label = int2mac(j);
label = shortmac2mac(j);
}
for (len=dataset.length, i=0; i<len; i++) {
try {

View File

@ -4,3 +4,8 @@ function int2mac(input) {
.match( /.{1,2}/g ) // ["4a", "89", "26", "c4", "45", "78"]
.join( ':' ) // "78:45:c4:26:89:4a"
}
function shortmac2mac(input) {
return input.match( /.{1,2}/g ) // ["4a", "89", "26", "c4", "45", "78"]
.join( ':' ) // "78:45:c4:26:89:4a"
}

View File

@ -9,6 +9,7 @@ sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + '/' + '..'))
from ffmap.routertools import *
from ffmap.maptools import *
from ffmap.mysqltools import FreifunkMySQL
from ffmap.influxtools import FreifunkInflux
from ffmap.stattools import record_global_stats, record_hood_stats, record_gw_stats
from ffmap.hoodtools import update_hoods_v2
@ -16,14 +17,14 @@ import time
start_time = time.time()
mysql = FreifunkMySQL()
influ = FreifunkInflux()
detect_offline_routers(mysql)
detect_orphaned_routers(mysql)
delete_orphaned_routers(mysql)
#delete_old_stats(mysql) # Only execute once daily, takes 2 minutes
update_hoods_v2(mysql)
record_global_stats(mysql)
record_hood_stats(mysql)
record_gw_stats(mysql)
record_global_stats(influ,mysql)
record_hood_stats(influ,mysql)
record_gw_stats(influ,mysql)
update_mapnik_csv(mysql)
mysql.close()