2015-10-10 17:42:44 +02:00
#!/usr/bin/python3
2015-10-07 16:19:07 +02:00
2015-10-10 17:42:44 +02:00
import os
import sys
sys . path . insert ( 0 , os . path . abspath ( os . path . dirname ( __file__ ) + ' / ' + ' .. ' ) )
2017-11-05 19:48:29 +01:00
from ffmap . mysqltools import FreifunkMySQL
2016-03-10 14:22:23 +01:00
from ffmap . misc import *
2017-11-19 00:43:48 +01:00
from ffmap . config import CONFIG
2017-12-04 16:09:42 +01:00
import MySQLdb as my
2015-10-07 16:19:07 +02:00
import lxml . etree
import datetime
2015-10-10 17:42:44 +02:00
import requests
2017-12-04 16:09:42 +01:00
import time
2018-05-10 18:27:28 +02:00
import ipaddress
2015-10-16 18:08:14 +02:00
from bson import SON
2015-11-03 16:03:14 +01:00
from contextlib import suppress
2015-10-07 16:19:07 +02:00
api/alfred: Disable router_rate_limit_list
With router_rate_limit_list, routers were not processed if the
time difference between calls was less than 5 minutes for the
same MAC address.
While this is generally not bad, there are some drawbacks:
- Not having been aware of this fact, we have established other
mechanisms to dilute data density, which might have interfered
- With KeyXchangeV2, two gateways will send data with less than
5 min. difference. As gateways are not connected, we know that
we alternately receive newer and older data. With
router_rate_limit_list, some of this data has been discarded
before its "age" was evaluated. This caused an unwanted additional
dilution of data which might have caused "offline" routers not
being actually offline (for a short period)
- With KeyXchangeV1, if the second call was a little earlier, the
a big share of the data would not be "new enough" and just be
discarded
- With KeyXchangeV1, the same would happen for the order of records
varying between alfred calls, were some records would have more and
some less than 5 minutes time difference
To get rid of these issues, we remove router_rate_limit_list and
test whether the newer measures to dilute data are effective.
Signed-off-by: Adrian Schmutzler <freifunk@adrianschmutzler.de>
2018-03-15 16:35:59 +01:00
#router_rate_limit_list = {}
2016-07-26 22:17:39 +02:00
2017-11-05 19:48:29 +01:00
def delete_router ( mysql , dbid ) :
2017-11-16 12:51:35 +01:00
mysql . execute ( " DELETE FROM router WHERE id = %s " , ( dbid , ) )
mysql . execute ( " DELETE FROM router_netif WHERE router = %s " , ( dbid , ) )
mysql . execute ( " DELETE FROM router_ipv6 WHERE router = %s " , ( dbid , ) )
mysql . execute ( " DELETE FROM router_neighbor WHERE router = %s " , ( dbid , ) )
2018-04-12 20:56:59 +02:00
mysql . execute ( " DELETE FROM router_gw WHERE router = %s " , ( dbid , ) )
2017-11-16 12:51:35 +01:00
mysql . execute ( " DELETE FROM router_events WHERE router = %s " , ( dbid , ) )
mysql . execute ( " DELETE FROM router_stats WHERE router = %s " , ( dbid , ) )
mysql . execute ( " DELETE FROM router_stats_neighbor WHERE router = %s " , ( dbid , ) )
mysql . execute ( " DELETE FROM router_stats_netif WHERE router = %s " , ( dbid , ) )
2018-04-12 20:56:59 +02:00
mysql . execute ( " DELETE FROM router_stats_gw WHERE router = %s " , ( dbid , ) )
2017-11-05 19:48:29 +01:00
mysql . commit ( )
2017-12-11 23:16:05 +01:00
def ban_router ( mysql , dbid ) :
mac = mysql . findone ( """
SELECT mac
FROM router_netif
WHERE router = % s AND netif = ' br-mesh '
""" ,(dbid,), " mac " )
added = mysql . utcnow ( )
if mac :
mysql . execute ( " INSERT INTO banned (mac, added) VALUES ( %s , %s ) " , ( mac , added , ) )
mysql . commit ( )
2018-08-28 15:28:25 +02:00
def import_nodewatcher_xml ( mysql , mac , xml , banned , hoodsv2 , netifdict , hoodsdict , statstime ) :
api/alfred: Disable router_rate_limit_list
With router_rate_limit_list, routers were not processed if the
time difference between calls was less than 5 minutes for the
same MAC address.
While this is generally not bad, there are some drawbacks:
- Not having been aware of this fact, we have established other
mechanisms to dilute data density, which might have interfered
- With KeyXchangeV2, two gateways will send data with less than
5 min. difference. As gateways are not connected, we know that
we alternately receive newer and older data. With
router_rate_limit_list, some of this data has been discarded
before its "age" was evaluated. This caused an unwanted additional
dilution of data which might have caused "offline" routers not
being actually offline (for a short period)
- With KeyXchangeV1, if the second call was a little earlier, the
a big share of the data would not be "new enough" and just be
discarded
- With KeyXchangeV1, the same would happen for the order of records
varying between alfred calls, were some records would have more and
some less than 5 minutes time difference
To get rid of these issues, we remove router_rate_limit_list and
test whether the newer measures to dilute data are effective.
Signed-off-by: Adrian Schmutzler <freifunk@adrianschmutzler.de>
2018-03-15 16:35:59 +01:00
#global router_rate_limit_list
2016-07-26 22:17:39 +02:00
api/alfred: Disable router_rate_limit_list
With router_rate_limit_list, routers were not processed if the
time difference between calls was less than 5 minutes for the
same MAC address.
While this is generally not bad, there are some drawbacks:
- Not having been aware of this fact, we have established other
mechanisms to dilute data density, which might have interfered
- With KeyXchangeV2, two gateways will send data with less than
5 min. difference. As gateways are not connected, we know that
we alternately receive newer and older data. With
router_rate_limit_list, some of this data has been discarded
before its "age" was evaluated. This caused an unwanted additional
dilution of data which might have caused "offline" routers not
being actually offline (for a short period)
- With KeyXchangeV1, if the second call was a little earlier, the
a big share of the data would not be "new enough" and just be
discarded
- With KeyXchangeV1, the same would happen for the order of records
varying between alfred calls, were some records would have more and
some less than 5 minutes time difference
To get rid of these issues, we remove router_rate_limit_list and
test whether the newer measures to dilute data are effective.
Signed-off-by: Adrian Schmutzler <freifunk@adrianschmutzler.de>
2018-03-15 16:35:59 +01:00
#if mac in router_rate_limit_list:
# if (statstime - router_rate_limit_list[mac]) < datetime.timedelta(minutes=5):
# return
#router_rate_limit_list[mac] = statstime
2016-07-26 22:17:39 +02:00
2017-11-20 15:46:45 +01:00
# The following values should stay available after router reset
keepvalues = [ ' lat ' , ' lng ' , ' description ' , ' position_comment ' , ' contact ' ]
2015-12-14 00:05:37 +01:00
router_id = None
2017-11-22 23:22:08 +01:00
olddata = False
2017-11-05 19:48:29 +01:00
uptime = 0
2015-11-19 22:05:37 +01:00
events = [ ]
2017-11-05 19:48:29 +01:00
status_comment = " "
2017-11-22 23:22:08 +01:00
reset = False
2017-11-05 19:48:29 +01:00
2015-10-07 16:19:07 +02:00
try :
2018-08-27 12:33:27 +02:00
findrouter = mysql . findone ( " SELECT router FROM router_netif WHERE mac = %s LIMIT 1 " , ( mac2int ( mac ) , ) )
2018-03-15 16:30:22 +01:00
router_update = parse_nodewatcher_xml ( xml , statstime )
2018-11-24 15:27:34 +01:00
router_update [ " local " ] = bool ( router_update [ " v2 " ] and router_update [ " hood " ] and not router_update [ " hood " ] in hoodsv2 )
2017-12-11 23:16:05 +01:00
# cancel if banned mac found
for n in router_update [ " netifs " ] :
if n [ " mac " ] in banned :
return
2017-12-08 16:42:24 +01:00
if router_update [ " status " ] == " wrongpos " :
router_update [ " status " ] = " unknown "
status_comment = " Coordinates are wrong "
2017-12-08 16:06:52 +01:00
status = router_update [ " status " ]
2017-12-08 16:42:24 +01:00
2017-11-16 12:51:35 +01:00
if findrouter :
router_id = findrouter [ " router " ]
2018-08-28 15:28:25 +02:00
olddata = mysql . findone ( """
SELECT sys_uptime , sys_time , firmware , hostname , hoods . id AS hoodid , hoods . name AS hood , status , lat , lng , contact , description , position_comment , w2_active , w2_busy , w5_active , w5_busy
FROM router
LEFT JOIN hoods ON router . hood = hoods . id
WHERE router . id = % s
LIMIT 1
""" ,(router_id,))
2017-11-16 12:51:35 +01:00
if olddata :
2017-11-20 16:15:15 +01:00
uptime = olddata [ " sys_uptime " ]
2018-11-30 18:25:56 +01:00
2018-06-20 13:43:41 +02:00
# Filter old data (Alfred keeps data for 10 min.; old and new can mix if gateways do not sync)
# We only use data where system time is bigger than before (last entry) or more than 1 hour smaller (to catch cases without timeserver)
newtime = router_update [ " sys_time " ] . timestamp ( )
oldtime = olddata [ " sys_time " ] . timestamp ( )
if not ( newtime > oldtime or newtime < ( oldtime - 3600 ) ) :
return
2018-11-30 18:25:56 +01:00
# Keep hood if not set (V2 only, "" != None)
if router_update [ " hood " ] == " " :
router_update [ " hood " ] = olddata [ " hood " ]
2018-06-20 13:43:41 +02:00
else :
delete_router ( mysql , router_id )
2015-11-03 16:03:14 +01:00
2018-11-30 18:25:56 +01:00
# keep hood up to date (extremely rare case where no olddata but no hood)
2018-11-24 15:27:34 +01:00
if router_update [ " hood " ] == " " :
router_update [ " hood " ] = " NoHood "
2017-11-05 19:48:29 +01:00
if not router_update [ " hood " ] :
2018-11-30 18:33:02 +01:00
# V1 Router
router_update [ " hood " ] = " Legacy "
2018-11-24 15:27:34 +01:00
2017-11-22 23:22:08 +01:00
if not router_update [ ' lat ' ] and not router_update [ ' lng ' ] and olddata and olddata [ ' lat ' ] and olddata [ ' lng ' ] :
# Enable reset state; do before variable fallback
reset = True
2018-08-28 15:28:25 +02:00
if not router_update [ " hood " ] in hoodsdict . keys ( ) :
checkagain = mysql . findone ( " SELECT id FROM hoods WHERE name = %s " , ( router_update [ " hood " ] , ) , " id " )
# Prevent adding the same hood for all routers (won't break, but each will raise the AUTO_INCREMENT)
if not checkagain :
mysql . execute ( """
INSERT INTO hoods ( name )
VALUES ( % s )
ON DUPLICATE KEY UPDATE name = name
""" ,(router_update[ " hood " ],))
hoodsdict = mysql . fetchdict ( " SELECT id, name FROM hoods " , ( ) , " name " , " id " )
router_update [ " hoodid " ] = hoodsdict [ router_update [ " hood " ] ]
2017-12-20 16:42:41 +01:00
if not router_update [ ' hostname ' ] :
router_update [ ' hostname ' ] = ' Give Me A Name '
2017-11-20 15:46:45 +01:00
if olddata :
# Has to be done after hood detection, so default hood is selected if no lat/lng
for v in keepvalues :
if not router_update [ v ] :
router_update [ v ] = olddata [ v ] # preserve contact information after router reset
2018-01-21 20:10:30 +01:00
# Calculate airtime
router_update [ " w2_airtime " ] = None
router_update [ " w5_airtime " ] = None
# Only use new data
if olddata and router_update [ " sys_uptime " ] > olddata [ " sys_uptime " ] :
fields_w2 = ( router_update [ " w2_active " ] , router_update [ " w2_busy " ] , olddata [ " w2_busy " ] , olddata [ " w2_active " ] , )
if not any ( w == None for w in fields_w2 ) :
diff_active = router_update [ " w2_active " ] - olddata [ " w2_active " ]
diff_busy = router_update [ " w2_busy " ] - olddata [ " w2_busy " ]
if diff_active :
router_update [ " w2_airtime " ] = diff_busy / diff_active # auto float-division in Python3
else :
router_update [ " w2_airtime " ] = 0
fields_w5 = ( router_update [ " w5_active " ] , router_update [ " w5_busy " ] , olddata [ " w5_busy " ] , olddata [ " w5_active " ] , )
if not any ( w == None for w in fields_w5 ) :
diff_active = router_update [ " w5_active " ] - olddata [ " w5_active " ]
diff_busy = router_update [ " w5_busy " ] - olddata [ " w5_busy " ]
if diff_active :
router_update [ " w5_airtime " ] = diff_busy / diff_active # auto float-division in Python3
else :
router_update [ " w5_airtime " ] = 0
2018-06-20 13:43:41 +02:00
if olddata :
2015-11-03 16:03:14 +01:00
# statistics
2017-11-16 12:51:35 +01:00
calculate_network_io ( mysql , router_id , uptime , router_update )
2017-11-05 19:48:29 +01:00
ru = router_update
2017-11-16 12:51:35 +01:00
mysql . execute ( """
2017-11-05 19:48:29 +01:00
UPDATE router
SET status = % s , hostname = % s , last_contact = % s , sys_time = % s , sys_uptime = % s , sys_memfree = % s , sys_membuff = % s , sys_memcache = % s ,
2018-01-21 20:10:30 +01:00
sys_loadavg = % s , sys_procrun = % s , sys_proctot = % s , clients = % s , clients_eth = % s , clients_w2 = % s , clients_w5 = % s ,
2018-02-10 14:52:06 +01:00
w2_active = % s , w2_busy = % s , w5_active = % s , w5_busy = % s , w2_airtime = % s , w5_airtime = % s , wan_uplink = % s , tc_enabled = % s , tc_in = % s , tc_out = % s ,
cpu = % s , chipset = % s , hardware = % s , os = % s ,
2018-08-27 21:36:51 +02:00
batman = % s , routing_protocol = % s , kernel = % s , nodewatcher = % s , firmware = % s , firmware_rev = % s , description = % s , position_comment = % s , community = % s , hood = % s , v2 = % s , local = % s , gateway = % s ,
2017-11-22 23:22:08 +01:00
status_text = % s , contact = % s , lng = % s , lat = % s , neighbors = % s , reset = % s
2017-11-05 19:48:29 +01:00
WHERE id = % s
""" ,(
2018-02-14 16:38:39 +01:00
ru [ " status " ] , ru [ " hostname " ] , ru [ " last_contact " ] , ru [ " sys_time " ] . strftime ( ' % Y- % m- %d % H: % M: % S ' ) , ru [ " sys_uptime " ] , ru [ " memory " ] [ " free " ] , ru [ " memory " ] [ " buffering " ] , ru [ " memory " ] [ " caching " ] ,
2018-01-21 20:10:30 +01:00
ru [ " sys_loadavg " ] , ru [ " processes " ] [ " runnable " ] , ru [ " processes " ] [ " total " ] , ru [ " clients " ] , ru [ " clients_eth " ] , ru [ " clients_w2 " ] , ru [ " clients_w5 " ] ,
2018-02-10 14:52:06 +01:00
ru [ " w2_active " ] , ru [ " w2_busy " ] , ru [ " w5_active " ] , ru [ " w5_busy " ] , ru [ " w2_airtime " ] , ru [ " w5_airtime " ] , ru [ " has_wan_uplink " ] , ru [ " tc_enabled " ] , ru [ " tc_in " ] , ru [ " tc_out " ] ,
ru [ " cpu " ] , ru [ " chipset " ] , ru [ " hardware " ] , ru [ " os " ] ,
2018-08-28 15:28:25 +02:00
ru [ " batman_adv " ] , ru [ " rt_protocol " ] , ru [ " kernel " ] , ru [ " nodewatcher " ] , ru [ " firmware " ] , ru [ " firmware_rev " ] , ru [ " description " ] , ru [ " position_comment " ] , ru [ " community " ] , ru [ " hoodid " ] , ru [ " v2 " ] , ru [ " local " ] , ru [ " gateway " ] ,
2017-11-22 23:22:08 +01:00
ru [ " status_text " ] , ru [ " contact " ] , ru [ " lng " ] , ru [ " lat " ] , ru [ " visible_neighbours " ] , reset , router_id , ) )
2017-11-05 19:48:29 +01:00
2017-12-29 19:04:34 +01:00
# Previously, I just deleted all entries and recreated them again with INSERT.
# Although this is simple to write and actually includes less queries, it causes a lot more write IO.
# Since most of the neighbors and interfaces do NOT change frequently, it is worth the extra effort to delete only those really gone since the last update.
nkeys = [ ]
akeys = [ ]
for n in router_update [ " netifs " ] :
nkeys . append ( n [ " name " ] )
if n [ " name " ] == ' br-mesh ' : # Only br-mesh will normally have assigned IPv6 addresses
akeys = n [ " ipv6_addrs " ]
if nkeys :
2018-03-15 13:03:11 +01:00
ndata = mysql . fetchall ( " SELECT netif FROM router_netif WHERE router = %s " , ( router_id , ) , " netif " )
for n in ndata :
if n in nkeys :
continue
mysql . execute ( " DELETE FROM router_netif WHERE router = %s AND netif = %s " , ( router_id , n , ) )
2017-12-29 19:04:34 +01:00
else :
mysql . execute ( " DELETE FROM router_netif WHERE router = %s " , ( router_id , ) )
if akeys :
2018-03-15 13:36:28 +01:00
adata = mysql . fetchall ( " SELECT netif, ipv6 FROM router_ipv6 WHERE router = %s " , ( router_id , ) )
2018-03-15 13:03:11 +01:00
for a in adata :
2018-03-15 13:36:28 +01:00
if a [ " netif " ] == ' br-mesh ' and a [ " ipv6 " ] in akeys :
2018-03-15 13:03:11 +01:00
continue
2018-03-15 13:36:28 +01:00
mysql . execute ( " DELETE FROM router_ipv6 WHERE router = %s AND netif = %s AND ipv6 = %s " , ( router_id , a [ " netif " ] , a [ " ipv6 " ] , ) )
2017-12-29 19:04:34 +01:00
else :
mysql . execute ( " DELETE FROM router_ipv6 WHERE router = %s " , ( router_id , ) )
nbkeys = [ ]
for n in router_update [ " neighbours " ] :
nbkeys . append ( n [ " mac " ] )
if nbkeys :
2018-03-15 13:03:11 +01:00
nbdata = mysql . fetchall ( " SELECT mac FROM router_neighbor WHERE router = %s " , ( router_id , ) , " mac " )
for n in nbdata :
if n in nbkeys :
continue
mysql . execute ( " DELETE FROM router_neighbor WHERE router = %s AND mac = %s " , ( router_id , n , ) )
2017-12-29 19:04:34 +01:00
else :
mysql . execute ( " DELETE FROM router_neighbor WHERE router = %s " , ( router_id , ) )
2017-11-05 19:48:29 +01:00
2018-01-09 13:35:47 +01:00
gwkeys = [ ]
for g in router_update [ " gws " ] :
gwkeys . append ( g [ " mac " ] )
if gwkeys :
2018-03-15 13:03:11 +01:00
gwdata = mysql . fetchall ( " SELECT mac FROM router_gw WHERE router = %s " , ( router_id , ) , " mac " )
for g in gwdata :
if g in gwkeys :
continue
mysql . execute ( " DELETE FROM router_gw WHERE router = %s AND mac = %s " , ( router_id , g , ) )
2018-01-09 13:35:47 +01:00
else :
mysql . execute ( " DELETE FROM router_gw WHERE router = %s " , ( router_id , ) )
2015-11-03 16:03:14 +01:00
else :
2015-11-15 20:49:52 +01:00
# insert new router
2018-03-15 16:30:22 +01:00
created = mysql . formatdt ( statstime )
2017-11-05 19:48:29 +01:00
#events = [] # don't fire sub-events of created events
ru = router_update
2017-12-08 17:37:12 +01:00
router_update [ " status " ] = " online " # use 'online' here, as anything different is only evaluated if olddata is present
2017-11-16 12:51:35 +01:00
mysql . execute ( """
2017-11-05 19:48:29 +01:00
INSERT INTO router ( status , hostname , created , last_contact , sys_time , sys_uptime , sys_memfree , sys_membuff , sys_memcache ,
2018-01-21 20:10:30 +01:00
sys_loadavg , sys_procrun , sys_proctot , clients , clients_eth , clients_w2 , clients_w5 ,
2018-02-10 14:52:06 +01:00
w2_active , w2_busy , w5_active , w5_busy , w2_airtime , w5_airtime , wan_uplink , tc_enabled , tc_in , tc_out ,
cpu , chipset , hardware , os ,
2018-08-27 21:36:51 +02:00
batman , routing_protocol , kernel , nodewatcher , firmware , firmware_rev , description , position_comment , community , hood , v2 , local , gateway ,
2017-11-05 19:48:29 +01:00
status_text , contact , lng , lat , neighbors )
2018-08-27 21:36:51 +02:00
VALUES ( % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s )
2017-11-05 19:48:29 +01:00
""" ,(
2018-02-14 16:38:39 +01:00
ru [ " status " ] , ru [ " hostname " ] , created , ru [ " last_contact " ] , ru [ " sys_time " ] . strftime ( ' % Y- % m- %d % H: % M: % S ' ) , ru [ " sys_uptime " ] , ru [ " memory " ] [ " free " ] , ru [ " memory " ] [ " buffering " ] , ru [ " memory " ] [ " caching " ] ,
2018-01-21 20:10:30 +01:00
ru [ " sys_loadavg " ] , ru [ " processes " ] [ " runnable " ] , ru [ " processes " ] [ " total " ] , ru [ " clients " ] , ru [ " clients_eth " ] , ru [ " clients_w2 " ] , ru [ " clients_w5 " ] ,
2018-02-10 14:52:06 +01:00
None , None , None , None , None , None , ru [ " has_wan_uplink " ] , ru [ " tc_enabled " ] , ru [ " tc_in " ] , ru [ " tc_out " ] ,
ru [ " cpu " ] , ru [ " chipset " ] , ru [ " hardware " ] , ru [ " os " ] ,
2018-08-28 15:28:25 +02:00
ru [ " batman_adv " ] , ru [ " rt_protocol " ] , ru [ " kernel " ] , ru [ " nodewatcher " ] , ru [ " firmware " ] , ru [ " firmware_rev " ] , ru [ " description " ] , ru [ " position_comment " ] , ru [ " community " ] , ru [ " hoodid " ] , ru [ " v2 " ] , ru [ " local " ] , ru [ " gateway " ] ,
2017-11-20 16:15:15 +01:00
ru [ " status_text " ] , ru [ " contact " ] , ru [ " lng " ] , ru [ " lat " ] , ru [ " visible_neighbours " ] , ) )
2017-11-16 12:51:35 +01:00
router_id = mysql . cursor ( ) . lastrowid
2017-11-05 19:48:29 +01:00
events_append ( mysql , router_id , " created " , " " )
2017-11-16 18:16:08 +01:00
ndata = [ ]
adata = [ ]
2017-11-05 19:48:29 +01:00
for n in router_update [ " netifs " ] :
2018-08-27 16:58:00 +02:00
ndata . append ( ( router_id , n [ " name " ] , n [ " mtu " ] , n [ " traffic " ] [ " rx_bytes " ] , n [ " traffic " ] [ " tx_bytes " ] , n [ " traffic " ] [ " rx " ] , n [ " traffic " ] [ " tx " ] , n [ " fe80_addr " ] , n [ " ipv4_addr " ] , n [ " mac " ] , n [ " wlan_channel " ] , n [ " wlan_type " ] , n [ " wlan_width " ] , n [ " wlan_ssid " ] , n [ " wlan_txpower " ] , ) )
2017-11-05 19:48:29 +01:00
for a in n [ " ipv6_addrs " ] :
2017-11-16 18:16:08 +01:00
adata . append ( ( router_id , n [ " name " ] , a , ) )
2017-11-05 19:48:29 +01:00
2017-12-29 19:04:34 +01:00
# As for deletion, it is more complex to do work with ON DUPLICATE KEY UPDATE instead of plain DELETE and INSERT,
# but with this we have much less IO and UPDATE is better than INSERT in terms of locking
2017-11-16 18:16:08 +01:00
mysql . executemany ( """
2017-12-29 15:24:37 +01:00
INSERT INTO router_netif ( router , netif , mtu , rx_bytes , tx_bytes , rx , tx , fe80_addr , ipv4_addr , mac , wlan_channel , wlan_type , wlan_width , wlan_ssid , wlan_txpower )
VALUES ( % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s )
2017-12-29 19:04:34 +01:00
ON DUPLICATE KEY UPDATE
mtu = VALUES ( mtu ) ,
rx_bytes = VALUES ( rx_bytes ) ,
tx_bytes = VALUES ( tx_bytes ) ,
rx = VALUES ( rx ) ,
tx = VALUES ( tx ) ,
fe80_addr = VALUES ( fe80_addr ) ,
ipv4_addr = VALUES ( ipv4_addr ) ,
mac = VALUES ( mac ) ,
wlan_channel = VALUES ( wlan_channel ) ,
wlan_type = VALUES ( wlan_type ) ,
wlan_width = VALUES ( wlan_width ) ,
wlan_ssid = VALUES ( wlan_ssid ) ,
wlan_txpower = VALUES ( wlan_txpower )
2017-11-16 18:16:08 +01:00
""" ,ndata)
2017-12-29 19:04:34 +01:00
mysql . executemany ( """
INSERT INTO router_ipv6 ( router , netif , ipv6 )
VALUES ( % s , % s , % s )
ON DUPLICATE KEY UPDATE
ipv6 = ipv6
""" ,adata)
2017-11-16 18:16:08 +01:00
nbdata = [ ]
2017-11-05 19:48:29 +01:00
for n in router_update [ " neighbours " ] :
2017-12-29 19:23:22 +01:00
nbdata . append ( ( router_id , n [ " mac " ] , n [ " netif " ] , n [ " quality " ] , n [ " type " ] , ) )
2017-11-16 18:16:08 +01:00
2017-12-29 19:04:34 +01:00
mysql . executemany ( """
2017-12-29 19:23:22 +01:00
INSERT INTO router_neighbor ( router , mac , netif , quality , type )
2017-12-29 19:04:34 +01:00
VALUES ( % s , % s , % s , % s , % s )
ON DUPLICATE KEY UPDATE
2017-12-29 19:23:22 +01:00
netif = VALUES ( netif ) ,
2017-12-29 19:04:34 +01:00
quality = VALUES ( quality ) ,
type = VALUES ( type )
""" ,nbdata)
2017-11-05 19:48:29 +01:00
2018-01-09 13:35:47 +01:00
gwdata = [ ]
for g in router_update [ " gws " ] :
gwdata . append ( ( router_id , g [ " mac " ] , g [ " quality " ] , g [ " nexthop " ] , g [ " netif " ] , g [ " gw_class " ] , g [ " selected " ] , ) )
mysql . executemany ( """
INSERT INTO router_gw ( router , mac , quality , nexthop , netif , gw_class , selected )
VALUES ( % s , % s , % s , % s , % s , % s , % s )
ON DUPLICATE KEY UPDATE
quality = VALUES ( quality ) ,
nexthop = VALUES ( nexthop ) ,
netif = VALUES ( netif ) ,
gw_class = VALUES ( gw_class ) ,
selected = VALUES ( selected )
""" ,gwdata)
2017-12-05 08:52:25 +01:00
if router_id :
2018-03-15 16:30:22 +01:00
new_router_stats ( mysql , router_id , uptime , router_update , netifdict , statstime )
2017-12-05 08:52:25 +01:00
2015-11-15 20:49:52 +01:00
except ValueError as e :
2016-02-29 18:51:58 +01:00
import traceback
2017-12-10 19:04:31 +01:00
writefulllog ( " Warning: Unable to parse xml from %s : %s \n __ %s " % ( mac , e , traceback . format_exc ( ) . replace ( " \n " , " \n __ " ) ) )
2017-11-05 19:48:29 +01:00
if router_id :
set_status ( mysql , router_id , " unknown " )
2015-11-03 16:03:14 +01:00
status = " unknown "
2016-03-10 14:03:27 +01:00
status_comment = " Invalid XML "
except OverflowError as e :
import traceback
2017-12-10 19:04:31 +01:00
writefulllog ( " Warning: Overflow Error when saving %s : %s \n __ %s " % ( mac , e , traceback . format_exc ( ) . replace ( " \n " , " \n __ " ) ) )
2017-11-05 19:48:29 +01:00
if router_id :
set_status ( mysql , router_id , " unknown " )
2016-03-10 14:03:27 +01:00
status = " unknown "
status_comment = " Integer Overflow "
2017-12-08 14:42:50 +01:00
except my . OperationalError as e :
import traceback
2017-12-10 19:04:31 +01:00
writefulllog ( " Warning: Operational error in MySQL when saving %s : %s \n __ %s " % ( mac , e , traceback . format_exc ( ) . replace ( " \n " , " \n __ " ) ) )
2017-12-08 16:32:59 +01:00
writelog ( CONFIG [ " debug_dir " ] + " /fail_readrouter.txt " , " MySQL Error: {} - {} " . format ( router_update [ " hostname " ] , e ) )
2017-01-28 18:20:36 +01:00
except Exception as e :
import traceback
2017-12-10 19:04:31 +01:00
writefulllog ( " Warning: Exception occurred when saving %s : %s \n __ %s " % ( mac , e , traceback . format_exc ( ) . replace ( " \n " , " \n __ " ) ) )
2017-11-05 19:48:29 +01:00
if router_id :
set_status ( mysql , router_id , " unknown " )
2017-01-28 18:20:36 +01:00
status = " unknown "
status_comment = " Exception occurred "
2017-12-08 16:32:59 +01:00
writelog ( CONFIG [ " debug_dir " ] + " /fail_readrouter.txt " , " General Exception: {} - {} " . format ( router_update [ " hostname " ] , e ) )
2015-11-03 16:03:14 +01:00
2017-11-05 19:48:29 +01:00
if olddata :
2015-11-03 16:03:14 +01:00
# fire events
2016-02-29 18:51:58 +01:00
with suppress ( KeyError , TypeError , UnboundLocalError ) :
2018-02-14 16:38:39 +01:00
#if (olddata["sys_uptime"] - 300) > router_update["sys_uptime"]:
if olddata [ " sys_uptime " ] > router_update [ " sys_uptime " ] :
2017-11-05 19:48:29 +01:00
events_append ( mysql , router_id , " reboot " , " " )
2015-11-03 16:03:14 +01:00
2016-02-29 18:51:58 +01:00
with suppress ( KeyError , TypeError , UnboundLocalError ) :
2017-11-20 16:15:15 +01:00
if olddata [ " firmware " ] != router_update [ " firmware " ] :
2017-11-05 19:48:29 +01:00
events_append ( mysql , router_id , " update " ,
2017-11-20 16:15:15 +01:00
" %s -> %s " % ( olddata [ " firmware " ] , router_update [ " firmware " ] ) )
2015-11-03 16:03:14 +01:00
2016-02-29 18:51:58 +01:00
with suppress ( KeyError , TypeError , UnboundLocalError ) :
2017-11-05 19:48:29 +01:00
if olddata [ " hostname " ] != router_update [ " hostname " ] :
events_append ( mysql , router_id , " hostname " ,
" %s -> %s " % ( olddata [ " hostname " ] , router_update [ " hostname " ] ) )
2015-11-19 22:05:37 +01:00
2016-02-29 18:51:58 +01:00
with suppress ( KeyError , TypeError , UnboundLocalError ) :
2017-11-05 19:48:29 +01:00
if olddata [ " hood " ] != router_update [ " hood " ] :
events_append ( mysql , router_id , " hood " ,
" %s -> %s " % ( olddata [ " hood " ] , router_update [ " hood " ] ) )
2015-11-19 22:05:37 +01:00
2015-11-03 16:03:14 +01:00
with suppress ( KeyError , TypeError ) :
2017-11-05 19:48:29 +01:00
if olddata [ " status " ] != status :
events_append ( mysql , router_id , status , status_comment )
def detect_offline_routers ( mysql ) :
2017-11-15 20:33:31 +01:00
# Offline after X minutes (online -> offline)
2017-11-05 19:48:29 +01:00
threshold = mysql . formatdt ( utcnow ( ) - datetime . timedelta ( minutes = CONFIG [ " offline_threshold_minutes " ] ) )
now = mysql . utcnow ( )
2017-11-16 12:51:35 +01:00
result = mysql . fetchall ( """
2017-11-05 19:48:29 +01:00
SELECT id
FROM router
2017-11-15 20:33:31 +01:00
WHERE last_contact < % s AND status < > ' offline ' AND status < > ' orphaned '
2017-11-05 19:48:29 +01:00
""" ,(threshold,))
2017-11-16 18:16:08 +01:00
rdata = [ ]
2017-11-05 19:48:29 +01:00
for r in result :
2017-11-16 18:16:08 +01:00
rdata . append ( ( r [ " id " ] , now , ) )
mysql . executemany ( """
INSERT INTO router_events ( router , time , type , comment )
VALUES ( % s , % s , ' offline ' , ' ' )
""" ,rdata)
2017-11-05 19:48:29 +01:00
2018-01-16 00:16:27 +01:00
mysql . execute ( """
UPDATE router_netif AS n
INNER JOIN router AS r ON r . id = n . router
SET n . rx = 0 , n . tx = 0
WHERE r . last_contact < % s AND r . status < > ' offline ' AND r . status < > ' orphaned '
""" ,(threshold,))
# Online to Offline has to be updated after other queries!
2017-11-16 12:51:35 +01:00
mysql . execute ( """
2017-11-05 19:48:29 +01:00
UPDATE router
SET status = ' offline ' , clients = 0
2017-11-15 20:33:31 +01:00
WHERE last_contact < % s AND status < > ' offline ' AND status < > ' orphaned '
2017-11-05 19:48:29 +01:00
""" ,(threshold,))
mysql . commit ( )
2017-11-15 20:33:31 +01:00
def detect_orphaned_routers ( mysql ) :
# Orphan after X days (offline -> orphaned)
2017-11-05 19:48:29 +01:00
threshold = mysql . formatdt ( utcnow ( ) - datetime . timedelta ( days = CONFIG [ " orphan_threshold_days " ] ) )
2017-11-15 20:33:31 +01:00
mysql . execute ( """
UPDATE router
SET status = ' orphaned '
WHERE last_contact < % s AND status = ' offline '
""" ,(threshold,))
mysql . commit ( )
def delete_orphaned_routers ( mysql ) :
# Deleted after X days (orphaned -> deletion)
threshold = mysql . formatdt ( utcnow ( ) - datetime . timedelta ( days = CONFIG [ " delete_threshold_days " ] ) )
2017-11-16 12:51:35 +01:00
mysql . execute ( """
2017-11-05 19:48:29 +01:00
DELETE r , e , i , nb , net FROM router AS r
INNER JOIN router_events AS e ON r . id = e . router
INNER JOIN router_ipv6 AS i ON r . id = i . router
INNER JOIN router_neighbor AS nb ON r . id = nb . router
INNER JOIN router_netif AS net ON r . id = net . router
WHERE r . last_contact < % s AND r . status < > ' offline '
""" ,(threshold,))
mysql . commit ( )
2018-01-13 00:00:27 +01:00
def delete_unlinked_routers ( mysql ) :
# Delete entries in router_* tables without corresponding router in master table
tables = [ " router_events " , " router_gw " , " router_ipv6 " , " router_neighbor " , " router_netif " , " router_stats " , " router_stats_gw " , " router_stats_neighbor " , " router_stats_netif " ]
for t in tables :
start_time = time . time ( )
mysql . execute ( """
DELETE d FROM { } AS d
LEFT JOIN router AS r ON r . id = d . router
WHERE r . id IS NULL
""" .format(t))
#mysql.execute("""
# DELETE FROM {}
# WHERE {}.router NOT IN (
# SELECT id FROM router
# )
#""".format(t,t))
print ( " --- Deleted %i rows from %s : %.3f seconds --- " % ( mysql . cursor ( ) . rowcount , t , time . time ( ) - start_time ) )
mysql . commit ( )
2018-02-12 16:56:21 +01:00
def delete_stats_helper ( mysql , label , query , tuple ) :
minustime = 0
rowsaffected = 1
allrows = 0
start_time = time . time ( )
while rowsaffected > 0 :
try :
rowsaffected = mysql . execute ( query , tuple )
mysql . commit ( )
allrows + = rowsaffected
except my . OperationalError :
rowsaffected = 1
time . sleep ( 10 )
minustime + = 10
end_time = time . time ( )
writelog ( CONFIG [ " debug_dir " ] + " /deletetime.txt " , " Deleted %i rows from %s stats: %.3f seconds " % ( allrows , label , end_time - start_time - minustime ) )
print ( " --- Deleted %i rows from %s stats: %.3f seconds --- " % ( allrows , label , end_time - start_time - minustime ) )
2017-11-05 19:48:29 +01:00
def delete_old_stats ( mysql ) :
2018-03-08 23:19:51 +01:00
threshold = ( utcnow ( ) - datetime . timedelta ( days = CONFIG [ " router_stat_days " ] ) ) . timestamp ( )
threshold_netif = ( utcnow ( ) - datetime . timedelta ( days = CONFIG [ " router_stat_netif " ] ) ) . timestamp ( )
threshold_gw = ( utcnow ( ) - datetime . timedelta ( days = CONFIG [ " router_stat_gw " ] ) ) . timestamp ( )
threshold_gw_netif = mysql . formatdt ( utcnow ( ) - datetime . timedelta ( hours = CONFIG [ " gw_netif_threshold_hours " ] ) )
2017-11-05 19:48:29 +01:00
2017-12-10 20:09:40 +01:00
start_time = time . time ( )
2018-01-15 13:51:16 +01:00
rowsaffected = mysql . execute ( """
2017-11-21 11:39:35 +01:00
DELETE s FROM router_stats AS s
LEFT JOIN router AS r ON s . router = r . id
WHERE s . time < % s AND ( r . status = ' online ' OR r . status IS NULL )
2017-11-05 19:48:29 +01:00
""" ,(threshold,))
2017-12-04 16:09:42 +01:00
mysql . commit ( )
2018-01-15 13:51:16 +01:00
writelog ( CONFIG [ " debug_dir " ] + " /deletetime.txt " , " Deleted %i rows from stats: %.3f seconds " % ( rowsaffected , time . time ( ) - start_time ) )
print ( " --- Deleted %i rows from stats: %.3f seconds --- " % ( rowsaffected , time . time ( ) - start_time ) )
2017-11-05 19:48:29 +01:00
2018-01-09 13:35:47 +01:00
time . sleep ( 10 )
2018-02-12 16:56:21 +01:00
query = """
DELETE FROM router_stats_gw
WHERE router_stats_gw . time < % s
LIMIT 100000
"""
2018-03-08 23:19:51 +01:00
delete_stats_helper ( mysql , " gw-stats " , query , ( threshold_gw , ) )
2018-01-09 13:35:47 +01:00
2018-02-12 16:56:21 +01:00
time . sleep ( 30 )
query = """
DELETE FROM router_stats_neighbor
WHERE router_stats_neighbor . time < % s
LIMIT 100000
"""
delete_stats_helper ( mysql , " neighbor-stats " , query , ( threshold , ) )
2017-11-05 19:48:29 +01:00
2017-12-04 16:09:42 +01:00
time . sleep ( 30 )
2018-02-12 16:56:21 +01:00
query = """
2017-12-04 16:09:42 +01:00
DELETE FROM router_stats_netif
2018-01-15 13:51:16 +01:00
WHERE router_stats_netif . time < % s
LIMIT 100000
2018-02-12 16:56:21 +01:00
"""
delete_stats_helper ( mysql , " netif-stats " , query , ( threshold_netif , ) )
2017-11-05 19:48:29 +01:00
2018-03-05 14:20:13 +01:00
start_time = time . time ( )
2018-03-08 23:19:51 +01:00
allrows = mysql . execute ( " DELETE FROM gw_netif WHERE last_contact < %s " , ( threshold_gw_netif , ) )
2018-03-05 14:20:13 +01:00
mysql . commit ( )
writelog ( CONFIG [ " debug_dir " ] + " /deletetime.txt " , " Deleted %i rows from gw_netif: %.3f seconds " % ( allrows , time . time ( ) - start_time ) )
print ( " --- Deleted %i rows from gw_netif: %.3f seconds --- " % ( allrows , time . time ( ) - start_time ) )
time . sleep ( 10 )
2017-12-10 20:09:40 +01:00
start_time = time . time ( )
2018-01-15 13:51:16 +01:00
allrows = 0
2017-11-10 13:59:16 +01:00
events = mysql . fetchall ( """
SELECT router , COUNT ( time ) AS count FROM router_events
GROUP BY router
""" )
for e in events :
delnum = int ( e [ " count " ] - CONFIG [ " event_num_entries " ] )
if delnum > 0 :
2018-01-15 13:51:16 +01:00
allrows + = mysql . execute ( """
2017-11-10 13:59:16 +01:00
DELETE FROM router_events
WHERE router = % s
ORDER BY time ASC
LIMIT % s
""" ,(e[ " router " ],delnum,))
2017-11-05 19:48:29 +01:00
mysql . commit ( )
2018-01-15 13:51:16 +01:00
writelog ( CONFIG [ " debug_dir " ] + " /deletetime.txt " , " Deleted %i rows from events: %.3f seconds " % ( allrows , time . time ( ) - start_time ) )
2018-01-08 14:01:04 +01:00
writelog ( CONFIG [ " debug_dir " ] + " /deletetime.txt " , " ------- " )
2018-01-15 13:51:16 +01:00
print ( " --- Deleted %i rows from events: %.3f seconds --- " % ( allrows , time . time ( ) - start_time ) )
2017-11-05 19:48:29 +01:00
def events_append ( mysql , router_id , event , comment ) :
mysql . execute ( """
INSERT INTO router_events ( router , time , type , comment )
VALUES ( % s , % s , % s , % s )
""" ,(
router_id ,
mysql . utcnow ( ) ,
event ,
comment , ) )
def set_status ( mysql , router_id , status ) :
mysql . execute ( """
UPDATE router
SET status = % s , last_contact = % s
WHERE id = % s
""" ,(
status ,
mysql . utcnow ( ) ,
router_id , ) )
2018-03-15 16:30:22 +01:00
def new_router_stats ( mysql , router_id , uptime , router_update , netifdict , statstime ) :
2018-02-14 16:38:39 +01:00
#if not (uptime + CONFIG["router_stat_mindiff_secs"]) < router_update["sys_uptime"]:
# return
2018-03-15 16:30:22 +01:00
time = mysql . formattimestamp ( statstime )
2018-02-14 16:38:39 +01:00
stattime = mysql . findone ( " SELECT time FROM router_stats WHERE router = %s ORDER BY time DESC LIMIT 1 " , ( router_id , ) , " time " )
if not stattime or ( stattime + CONFIG [ " router_stat_mindiff_default " ] ) < time :
mysql . execute ( """
INSERT INTO router_stats ( time , router , sys_memfree , sys_membuff , sys_memcache , loadavg , sys_procrun , sys_proctot ,
clients , clients_eth , clients_w2 , clients_w5 , airtime_w2 , airtime_w5 )
VALUES ( % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s , % s )
2018-03-15 11:46:22 +01:00
ON DUPLICATE KEY UPDATE time = time
2018-02-14 16:38:39 +01:00
""" ,(
time ,
router_id ,
router_update [ " memory " ] [ ' free ' ] ,
router_update [ " memory " ] [ ' buffering ' ] ,
router_update [ " memory " ] [ ' caching ' ] ,
router_update [ " sys_loadavg " ] ,
router_update [ " processes " ] [ ' runnable ' ] ,
router_update [ " processes " ] [ ' total ' ] ,
router_update [ " clients " ] ,
router_update [ " clients_eth " ] ,
router_update [ " clients_w2 " ] ,
router_update [ " clients_w5 " ] ,
router_update [ " w2_airtime " ] ,
router_update [ " w5_airtime " ] ,
) )
netiftime = mysql . findone ( " SELECT time FROM router_stats_netif WHERE router = %s ORDER BY time DESC LIMIT 1 " , ( router_id , ) , " time " )
if not netiftime or ( netiftime + CONFIG [ " router_stat_mindiff_netif " ] ) < time :
ndata = [ ]
nkeys = [ ]
for netif in router_update [ " netifs " ] :
# sanitize name
name = netif [ " name " ] . replace ( " . " , " " ) . replace ( " $ " , " " )
with suppress ( KeyError ) :
if name in netifdict . keys ( ) :
ndata . append ( ( time , router_id , netifdict [ name ] , netif [ " traffic " ] [ " rx " ] , netif [ " traffic " ] [ " tx " ] , ) )
else :
nkeys . append ( ( name , ) )
2017-12-28 16:16:32 +01:00
2018-02-14 16:38:39 +01:00
# 99.9 % of the routers will NOT enter this, so the doubled code is not a problem
if nkeys :
mysql . executemany ( """
INSERT INTO netifs ( name )
VALUES ( % s )
ON DUPLICATE KEY UPDATE name = name
""" ,nkeys)
netifdict = mysql . fetchdict ( " SELECT id, name FROM netifs " , ( ) , " name " , " id " )
2017-12-28 16:16:32 +01:00
ndata = [ ]
for netif in router_update [ " netifs " ] :
# sanitize name
name = netif [ " name " ] . replace ( " . " , " " ) . replace ( " $ " , " " )
with suppress ( KeyError ) :
2018-02-14 16:38:39 +01:00
ndata . append ( ( time , router_id , netifdict [ name ] , netif [ " traffic " ] [ " rx " ] , netif [ " traffic " ] [ " tx " ] , ) )
2017-11-16 18:16:08 +01:00
2018-02-14 16:38:39 +01:00
mysql . executemany ( """
INSERT INTO router_stats_netif ( time , router , netif , rx , tx )
VALUES ( % s , % s , % s , % s , % s )
2018-03-15 11:46:22 +01:00
ON DUPLICATE KEY UPDATE time = time
2018-02-14 16:38:39 +01:00
""" ,ndata)
# reuse timestamp from router_stats to avoid additional queries
if not stattime or ( stattime + CONFIG [ " router_stat_mindiff_default " ] ) < time :
nbdata = [ ]
for neighbour in router_update [ " neighbours " ] :
with suppress ( KeyError ) :
nbdata . append ( ( time , router_id , neighbour [ " mac " ] , neighbour [ " quality " ] , ) )
mysql . executemany ( """
INSERT INTO router_stats_neighbor ( time , router , mac , quality )
VALUES ( % s , % s , % s , % s )
2018-03-15 11:46:22 +01:00
ON DUPLICATE KEY UPDATE time = time
2018-02-14 16:38:39 +01:00
""" ,nbdata)
# reuse timestamp from router_stats to avoid additional queries
if not stattime or ( stattime + CONFIG [ " router_stat_mindiff_default " ] ) < time :
gwdata = [ ]
for gw in router_update [ " gws " ] :
with suppress ( KeyError ) :
gwdata . append ( ( time , router_id , gw [ " mac " ] , gw [ " quality " ] , ) )
mysql . executemany ( """
INSERT INTO router_stats_gw ( time , router , mac , quality )
VALUES ( % s , % s , % s , % s )
2018-03-15 11:46:22 +01:00
ON DUPLICATE KEY UPDATE time = time
2018-02-14 16:38:39 +01:00
""" ,gwdata)
2017-11-05 19:48:29 +01:00
2017-11-16 12:51:35 +01:00
def calculate_network_io ( mysql , router_id , uptime , router_update ) :
2015-11-03 16:03:14 +01:00
"""
router : old router dict
2015-11-15 20:49:52 +01:00
router_update : new router dict ( which will be updated with new data )
2015-11-03 16:03:14 +01:00
"""
2017-12-29 16:26:28 +01:00
results = mysql . fetchall ( " SELECT netif, rx_bytes, tx_bytes FROM router_netif WHERE router = %s " , ( router_id , ) ) ;
2017-11-05 19:48:29 +01:00
2015-11-04 17:21:15 +01:00
with suppress ( KeyError , StopIteration ) :
2017-11-20 16:15:15 +01:00
if uptime < router_update [ " sys_uptime " ] :
timediff = router_update [ " sys_uptime " ] - uptime
2017-11-05 19:48:29 +01:00
for row in results :
netif_update = next ( filter ( lambda n : n [ " name " ] == row [ " netif " ] , router_update [ " netifs " ] ) )
rx_diff = netif_update [ " traffic " ] [ " rx_bytes " ] - int ( row [ " rx_bytes " ] )
tx_diff = netif_update [ " traffic " ] [ " tx_bytes " ] - int ( row [ " tx_bytes " ] )
2015-11-27 15:58:35 +01:00
if rx_diff > = 0 and tx_diff > = 0 :
2015-11-03 16:03:14 +01:00
netif_update [ " traffic " ] [ " rx " ] = int ( rx_diff / timediff )
netif_update [ " traffic " ] [ " tx " ] = int ( tx_diff / timediff )
else :
2017-11-05 19:48:29 +01:00
for row in results :
netif_update = next ( filter ( lambda n : n [ " name " ] == row [ " netif " ] , router_update [ " netifs " ] ) )
2017-12-29 16:26:28 +01:00
netif_update [ " traffic " ] [ " rx " ] = int ( netif_update [ " traffic " ] [ " rx_bytes " ] / router_update [ " sys_uptime " ] )
netif_update [ " traffic " ] [ " tx " ] = int ( netif_update [ " traffic " ] [ " tx_bytes " ] / router_update [ " sys_uptime " ] )
2017-11-05 19:48:29 +01:00
return uptime
2015-11-03 16:03:14 +01:00
2017-11-14 22:22:55 +01:00
def evalxpath ( tree , p , default = " " ) :
tmp = default
with suppress ( IndexError ) :
tmp = tree . xpath ( p ) [ 0 ]
return tmp
2017-12-29 15:24:37 +01:00
def evalxpathfloat ( tree , p , default = 0 ) :
tmp = default
2017-11-14 22:22:55 +01:00
with suppress ( IndexError ) :
tmp = float ( tree . xpath ( p ) [ 0 ] )
return tmp
2017-12-29 15:24:37 +01:00
def evalxpathint ( tree , p , default = 0 ) :
tmp = default
2017-11-14 22:22:55 +01:00
with suppress ( IndexError ) :
tmp = int ( tree . xpath ( p ) [ 0 ] )
return tmp
2018-02-10 14:52:06 +01:00
def evalxpathbool ( tree , p , default = False ) :
tmp = default
2018-01-09 13:35:47 +01:00
with suppress ( IndexError ) :
tmp = tree . xpath ( p ) [ 0 ]
if tmp :
return ( tmp . lower ( ) == " true " or tmp == " 1 " )
2018-02-10 14:52:06 +01:00
return default
2018-01-09 13:35:47 +01:00
2018-03-15 16:30:22 +01:00
def parse_nodewatcher_xml ( xml , statstime ) :
2015-11-03 16:03:14 +01:00
try :
2015-10-07 16:19:07 +02:00
assert xml != " "
tree = lxml . etree . fromstring ( xml )
router_update = {
2017-11-14 22:22:55 +01:00
" status " : evalxpath ( tree , " /data/system_data/status/text() " ) ,
" hostname " : evalxpath ( tree , " /data/system_data/hostname/text() " ) ,
2018-03-15 16:30:22 +01:00
" last_contact " : statstime . strftime ( ' % Y- % m- %d % H: % M: % S ' ) ,
2018-01-09 13:35:47 +01:00
" gws " : [ ] ,
2015-10-07 16:19:07 +02:00
" neighbours " : [ ] ,
" netifs " : [ ] ,
2017-11-20 16:15:15 +01:00
# hardware
" chipset " : evalxpath ( tree , " /data/system_data/chipset/text() " , " Unknown " ) ,
" cpu " : evalxpath ( tree , " /data/system_data/cpu/text() " ) ,
" hardware " : evalxpath ( tree , " /data/system_data/model/text() " , " Legacy " ) ,
# config
" description " : evalxpath ( tree , " /data/system_data/description/text() " ) ,
" position_comment " : evalxpath ( tree , " /data/system_data/position_comment/text() " ) ,
" community " : evalxpath ( tree , " /data/system_data/firmware_community/text() " ) ,
2018-11-24 15:27:34 +01:00
" hood " : evalxpath ( tree , " /data/system_data/hood/text() " , None ) , # return None if not present and "" if empty => distinction between V1 and V2 with no hood
2017-11-20 16:15:15 +01:00
" status_text " : evalxpath ( tree , " /data/system_data/status_text/text() " ) ,
" contact " : evalxpath ( tree , " /data/system_data/contact/text() " ) ,
# system
2018-02-14 16:38:39 +01:00
" sys_time " : datetime . datetime . fromtimestamp ( evalxpathint ( tree , " /data/system_data/local_time/text() " ) ) ,
2017-11-20 16:15:15 +01:00
" sys_uptime " : int ( evalxpathfloat ( tree , " /data/system_data/uptime/text() " ) ) ,
" memory " : {
" free " : evalxpathint ( tree , " /data/system_data/memory_free/text() " ) ,
" buffering " : evalxpathint ( tree , " /data/system_data/memory_buffering/text() " ) ,
" caching " : evalxpathint ( tree , " /data/system_data/memory_caching/text() " ) ,
2015-10-07 16:19:07 +02:00
} ,
2017-11-20 16:15:15 +01:00
" processes " : {
" runnable " : int ( evalxpath ( tree , " /data/system_data/processes/text() " ) . split ( " / " ) [ 0 ] ) ,
" total " : int ( evalxpath ( tree , " /data/system_data/processes/text() " ) . split ( " / " ) [ 1 ] ) ,
2015-10-07 16:19:07 +02:00
} ,
2017-11-20 16:15:15 +01:00
" clients " : evalxpathint ( tree , " /data/client_count/text() " ) ,
2018-01-21 01:50:18 +01:00
" clients_eth " : evalxpathint ( tree , " /data/clients/*[starts-with(name(), ' eth ' )]/text() " , None ) ,
" clients_w2 " : evalxpathint ( tree , " /data/clients/w2ap/text() " , None ) ,
" clients_w5 " : evalxpathint ( tree , " /data/clients/w5ap/text() " , None ) ,
2018-01-21 20:10:30 +01:00
" w2_busy " : evalxpathint ( tree , " /data/airtime2/busy/text() " , None ) ,
" w2_active " : evalxpathint ( tree , " /data/airtime2/active/text() " , None ) ,
" w5_busy " : evalxpathint ( tree , " /data/airtime5/busy/text() " , None ) ,
" w5_active " : evalxpathint ( tree , " /data/airtime5/active/text() " , None ) ,
2017-11-20 16:15:15 +01:00
" has_wan_uplink " : (
( len ( tree . xpath ( " /data/system_data/vpn_active " ) ) > 0
and evalxpathint ( tree , " /data/system_data/vpn_active/text() " ) == 1 )
or len ( tree . xpath ( " /data/interface_data/ %s " % CONFIG [ " vpn_netif " ] ) ) > 0
or len ( tree . xpath ( " /data/interface_data/*[starts-with(name(), ' %s ' )] " % CONFIG [ " vpn_netif_l2tp " ] ) ) > 0
or len ( tree . xpath ( " /data/interface_data/ %s " % CONFIG [ " vpn_netif_aux " ] ) ) > 0 ) ,
2018-02-10 14:52:06 +01:00
" tc_enabled " : evalxpathbool ( tree , " /data/traffic_control/wan/enabled/text() " , None ) ,
" tc_in " : evalxpathfloat ( tree , " /data/traffic_control/wan/in/text() " , None ) ,
" tc_out " : evalxpathfloat ( tree , " /data/traffic_control/wan/out/text() " , None ) ,
2017-11-20 16:15:15 +01:00
# software
" os " : " %s ( %s ) " % ( evalxpath ( tree , " /data/system_data/distname/text() " ) ,
evalxpath ( tree , " /data/system_data/distversion/text() " ) ) ,
" batman_adv " : evalxpath ( tree , " /data/system_data/batman_advanced_version/text() " ) ,
2018-02-10 16:45:01 +01:00
" rt_protocol " : evalxpath ( tree , " /data/system_data/rt_protocol/text() " , None ) ,
2017-11-20 16:15:15 +01:00
" kernel " : evalxpath ( tree , " /data/system_data/kernel_version/text() " ) ,
" nodewatcher " : evalxpath ( tree , " /data/system_data/nodewatcher_version/text() " ) ,
#"fastd": evalxpath(tree,"/data/system_data/fastd_version/text()"),
" firmware " : evalxpath ( tree , " /data/system_data/firmware_version/text() " ) ,
" firmware_rev " : evalxpath ( tree , " /data/system_data/firmware_revision/text() " ) ,
2015-10-07 16:19:07 +02:00
}
2018-11-24 15:27:34 +01:00
router_update [ " v2 " ] = bool ( router_update [ " hood " ] is not None ) # None = V1, "" or content = V2
2018-02-02 14:35:19 +01:00
2018-06-18 11:57:17 +02:00
loadavg = evalxpathfloat ( tree , " /data/system_data/loadavg/text() " , None )
if not loadavg == None :
router_update [ " sys_loadavg " ] = loadavg
else :
router_update [ " sys_loadavg " ] = evalxpathfloat ( tree , " /data/system_data/loadavg5/text() " )
2017-12-08 16:42:24 +01:00
try :
lng = evalxpathfloat ( tree , " /data/system_data/geo/lng/text() " )
except ValueError :
lng = None
router_update [ " status " ] = " wrongpos "
try :
lat = evalxpathfloat ( tree , " /data/system_data/geo/lat/text() " )
except ValueError :
lat = None
router_update [ " status " ] = " wrongpos "
2017-11-14 22:22:55 +01:00
if lng == 0 :
lng = None
if lat == 0 :
lat = None
router_update [ " lng " ] = lng
router_update [ " lat " ] = lat
2016-01-07 23:08:23 +01:00
2015-10-07 16:19:07 +02:00
for netif in tree . xpath ( " /data/interface_data/* " ) :
interface = {
2017-11-14 22:22:55 +01:00
" name " : evalxpath ( netif , " name/text() " ) ,
" mtu " : evalxpathint ( netif , " mtu/text() " ) ,
2015-10-07 16:19:07 +02:00
" traffic " : {
2017-11-14 22:22:55 +01:00
" rx_bytes " : evalxpathint ( netif , " traffic_rx/text() " ) ,
" tx_bytes " : evalxpathint ( netif , " traffic_tx/text() " ) ,
2017-11-05 19:48:29 +01:00
" rx " : 0 ,
" tx " : 0 ,
2015-10-07 16:19:07 +02:00
} ,
2018-08-27 16:58:00 +02:00
" ipv4_addr " : ipv4toint ( evalxpath ( netif , " ipv4_addr/text() " ) ) ,
2018-08-27 12:33:27 +02:00
" mac " : mac2int ( evalxpath ( netif , " mac_addr/text() " ) ) ,
2017-12-29 15:24:37 +01:00
" wlan_channel " : evalxpathint ( netif , " wlan_channel/text() " , None ) ,
" wlan_type " : evalxpath ( netif , " wlan_type/text() " , None ) ,
" wlan_width " : evalxpathint ( netif , " wlan_width/text() " , None ) ,
" wlan_ssid " : evalxpath ( netif , " wlan_ssid/text() " , None ) ,
" wlan_txpower " : evalxpath ( netif , " wlan_tx_power/text() " , None ) ,
2015-10-07 16:19:07 +02:00
}
2016-03-10 14:01:54 +01:00
with suppress ( IndexError ) :
2018-08-27 16:58:00 +02:00
interface [ " fe80_addr " ] = None
interface [ " fe80_addr " ] = ipv6tobin ( netif . xpath ( " ipv6_link_local_addr/text() " ) [ 0 ] . split ( " / " ) [ 0 ] )
2017-11-05 19:48:29 +01:00
interface [ " ipv6_addrs " ] = [ ]
2016-01-30 14:48:48 +01:00
if len ( netif . xpath ( " ipv6_addr/text() " ) ) > 0 :
for ipv6_addr in netif . xpath ( " ipv6_addr/text() " ) :
2018-12-14 16:07:06 +01:00
interface [ " ipv6_addrs " ] . append ( ipv6tobinmasked ( ipv6_addr . split ( " / " ) [ 0 ] ) )
2016-05-16 12:17:49 +02:00
2015-10-07 16:19:07 +02:00
router_update [ " netifs " ] . append ( interface )
visible_neighbours = 0
for originator in tree . xpath ( " /data/batman_adv_originators/* " ) :
visible_neighbours + = 1
2018-08-27 12:33:27 +02:00
o_mac = mac2int ( evalxpath ( originator , " originator/text() " ) )
o_nexthop = mac2int ( evalxpath ( originator , " nexthop/text() " ) )
2015-10-07 16:19:07 +02:00
# mac is the mac of the neighbour w2/5mesh if
# (which might also be called wlan0-1)
2017-11-14 22:22:55 +01:00
o_out_if = evalxpath ( originator , " outgoing_interface/text() " )
2018-08-27 12:33:27 +02:00
if o_mac == o_nexthop :
2015-10-07 16:19:07 +02:00
# skip vpn server
if o_out_if == CONFIG [ " vpn_netif " ] :
continue
2017-06-16 16:41:54 +02:00
elif o_out_if . startswith ( CONFIG [ " vpn_netif_l2tp " ] ) :
2016-03-07 12:27:43 +01:00
continue
2015-11-15 20:49:52 +01:00
elif o_out_if == CONFIG [ " vpn_netif_aux " ] :
continue
2015-10-07 16:19:07 +02:00
neighbour = {
2018-08-27 12:33:27 +02:00
" mac " : o_mac ,
2017-12-29 19:23:22 +01:00
" netif " : o_out_if ,
2018-02-04 21:51:37 +01:00
" quality " : evalxpathfloat ( originator , " link_quality/text() " ) ,
2017-11-05 19:48:29 +01:00
" type " : " l2 "
2015-10-07 16:19:07 +02:00
}
router_update [ " neighbours " ] . append ( neighbour )
2017-04-17 13:31:31 +02:00
l3_neighbours = get_l3_neighbours ( tree )
visible_neighbours + = len ( l3_neighbours )
2017-11-20 16:15:15 +01:00
router_update [ " visible_neighbours " ] = visible_neighbours
2017-04-17 13:31:31 +02:00
router_update [ " neighbours " ] + = l3_neighbours
2018-01-09 13:35:47 +01:00
2018-04-27 15:10:45 +02:00
router_update [ " gateway " ] = False # Default: false
2018-01-09 13:35:47 +01:00
for gw in tree . xpath ( " /data/batman_adv_gateway_list/* " ) :
gw_mac = evalxpath ( gw , " gateway/text() " )
if ( gw_mac and len ( gw_mac ) > 12 ) : # Throw away headline
2018-08-27 12:33:27 +02:00
if len ( gw_mac ) > 17 :
gw_mac = gw_mac [ 0 : 17 ]
2018-01-09 13:35:47 +01:00
gw = {
2018-08-27 12:33:27 +02:00
" mac " : mac2int ( gw_mac ) ,
2018-02-04 21:38:17 +01:00
" quality " : evalxpath ( gw , " link_quality/text() " ) ,
2018-08-27 12:33:27 +02:00
" nexthop " : mac2int ( evalxpath ( gw , " nexthop/text() " , None ) ) ,
2018-01-09 13:35:47 +01:00
" netif " : evalxpath ( gw , " outgoing_interface/text() " , None ) ,
" gw_class " : evalxpath ( gw , " gw_class/text() " , None ) ,
" selected " : evalxpathbool ( gw , " selected/text() " )
}
2018-04-27 15:10:45 +02:00
if gw [ " netif " ] == " internal " :
router_update [ " gateway " ] = True # If "internal" exists, device must be a gateway
2018-02-04 21:38:17 +01:00
if gw [ " quality " ] . startswith ( " false " ) :
gw [ " quality " ] = gw [ " quality " ] [ 5 : ]
if gw [ " quality " ] :
2018-02-04 21:51:37 +01:00
gw [ " quality " ] = float ( gw [ " quality " ] )
2018-02-04 21:38:17 +01:00
else :
gw [ " quality " ] = 0
2018-01-09 13:35:47 +01:00
if gw [ " netif " ] == " false " :
tmp = gw [ " gw_class " ] . split ( None , 1 )
gw [ " netif " ] = tmp [ 0 ]
gw [ " gw_class " ] = tmp [ 1 ]
router_update [ " gws " ] . append ( gw )
2015-10-07 16:19:07 +02:00
2018-04-27 15:10:45 +02:00
if not router_update [ " gws " ] :
# Only change if list is empty (this will keep previously set value in all other cases)
router_update [ " gateway " ] = True
2015-11-03 16:03:14 +01:00
return router_update
2015-11-15 20:49:52 +01:00
except ( AssertionError , lxml . etree . XMLSyntaxError , IndexError ) as e :
2015-12-14 00:05:37 +01:00
raise ValueError ( " %s : %s " % ( e . __class__ . __name__ , str ( e ) ) )
2017-04-17 13:31:31 +02:00
def get_l3_neighbours ( tree ) :
l3_neighbours = list ( )
for neighbour in tree . xpath ( " /data/babel_neighbours/* " ) :
2018-06-18 12:05:10 +02:00
iptmp = evalxpath ( neighbour , " ip/text() " , None )
if not iptmp :
iptmp = neighbour . text
2017-04-17 13:31:31 +02:00
neighbour = {
2018-08-27 12:33:27 +02:00
" mac " : mac2int ( get_mac_from_v6_link_local ( iptmp ) ) ,
2018-02-07 20:30:57 +01:00
" netif " : neighbour . xpath ( " outgoing_interface/text() " ) [ 0 ] ,
" quality " : - 1.0 * evalxpathfloat ( neighbour , " link_cost/text() " , 1 ) ,
2017-04-17 13:31:31 +02:00
" type " : " l3 "
}
l3_neighbours . append ( neighbour )
return l3_neighbours
def get_mac_from_v6_link_local ( v6_fe80 ) :
2018-05-10 18:27:28 +02:00
fullip = ipaddress . ip_address ( v6_fe80 ) . exploded
first = ' %02x ' % ( int ( fullip [ 20 : 22 ] , 16 ) ^ 2 )
mac = ( first , fullip [ 22 : 24 ] , fullip [ 25 : 27 ] , fullip [ 32 : 34 ] , fullip [ 35 : 37 ] , fullip [ 37 : 39 ] , )
2017-04-17 13:31:31 +02:00
return ' : ' . join ( mac )