2015-10-10 17:42:44 +02:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + '/' + '..'))
|
|
|
|
|
2017-11-05 19:48:29 +01:00
|
|
|
from ffmap.mysqltools import FreifunkMySQL
|
2017-11-19 00:43:48 +01:00
|
|
|
from ffmap.config import CONFIG
|
2015-10-10 17:42:44 +02:00
|
|
|
|
|
|
|
import math
|
|
|
|
import numpy as np
|
|
|
|
from scipy.spatial import Voronoi
|
|
|
|
|
2017-10-14 16:42:13 +02:00
|
|
|
import urllib.request, json
|
|
|
|
|
|
|
|
EARTH_RADIUS = 6378137.0
|
|
|
|
|
2015-10-10 17:42:44 +02:00
|
|
|
def touch(fname, times=None):
|
|
|
|
with open(fname, 'a'):
|
|
|
|
os.utime(fname, times)
|
|
|
|
|
2017-10-14 16:42:13 +02:00
|
|
|
def merc_sphere(lng, lat):
|
|
|
|
x = math.radians(lng) * EARTH_RADIUS
|
|
|
|
y = math.log(math.tan(math.pi/4 + math.radians(lat)/2)) * EARTH_RADIUS
|
|
|
|
return (x,y)
|
|
|
|
|
|
|
|
def merc_sphere_inv(x, y):
|
|
|
|
lng = math.degrees(x / EARTH_RADIUS)
|
|
|
|
lat = math.degrees(2*math.atan(math.exp(y / EARTH_RADIUS)) - math.pi/2)
|
|
|
|
return (lng,lat)
|
|
|
|
|
|
|
|
def draw_voronoi_lines(csv, hoods):
|
|
|
|
points = np.array(hoods)
|
|
|
|
vor = Voronoi(points)
|
|
|
|
#mp = voronoi_plot_2d(vor)
|
|
|
|
#mp.show()
|
|
|
|
|
|
|
|
lines = [vor.vertices[line] for line in vor.ridge_vertices if -1 not in line]
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
x = [line[0][0], line[1][0]]
|
|
|
|
y = [line[0][1], line[1][1]]
|
|
|
|
for i in range(len(x)-1):
|
|
|
|
# convert mercator coordinates back into lng/lat
|
|
|
|
lng1, lat1 = merc_sphere_inv(x[i], y[i])
|
|
|
|
lng2, lat2 = merc_sphere_inv(x[i+1], y[i+1])
|
|
|
|
csv.write("\"LINESTRING (%f %f,%f %f)\"\n" % (lng1, lat1, lng2, lat2))
|
|
|
|
|
|
|
|
ptp_bound = np.array(merc_sphere(180, 360))
|
|
|
|
center = vor.points.mean(axis=0)
|
|
|
|
|
|
|
|
for pointidx, simplex in zip(vor.ridge_points, vor.ridge_vertices):
|
|
|
|
simplex = np.asarray(simplex)
|
|
|
|
if np.any(simplex < 0):
|
|
|
|
i = simplex[simplex >= 0][0] # finite end Voronoi vertex
|
|
|
|
|
|
|
|
t = vor.points[pointidx[1]] - vor.points[pointidx[0]] # tangent
|
|
|
|
t /= np.linalg.norm(t)
|
|
|
|
n = np.array([-t[1], t[0]]) # normal
|
|
|
|
|
|
|
|
midpoint = vor.points[pointidx].mean(axis=0)
|
|
|
|
direction = np.sign(np.dot(midpoint - center, n)) * n
|
|
|
|
far_point = vor.vertices[i] + direction * ptp_bound.max()
|
|
|
|
|
|
|
|
# convert mercator coordinates back into lng/lat
|
|
|
|
lng1, lat1 = merc_sphere_inv(vor.vertices[i,0], vor.vertices[i,1])
|
|
|
|
lng2, lat2 = merc_sphere_inv(far_point[0], far_point[1])
|
|
|
|
csv.write("\"LINESTRING (%f %f,%f %f)\"\n" % (lng1, lat1, lng2, lat2))
|
|
|
|
|
|
|
|
|
2017-11-05 19:48:29 +01:00
|
|
|
def update_mapnik_csv(mysql):
|
2017-11-15 23:17:59 +01:00
|
|
|
routers = mysql.fetchall("""
|
|
|
|
SELECT router.status, router.lat, router.lng, hoods.name AS hood FROM router
|
|
|
|
LEFT JOIN hoods ON router.hood = hoods.name
|
|
|
|
WHERE router.lat IS NOT NULL AND router.lng IS NOT NULL
|
|
|
|
""")
|
|
|
|
|
|
|
|
rv1 = "lng,lat,status\n"
|
|
|
|
rv2 = "lng,lat,status\n"
|
|
|
|
|
|
|
|
for router in routers:
|
|
|
|
tmp = "%f,%f,%s\n" % (
|
|
|
|
router["lng"],
|
|
|
|
router["lat"],
|
|
|
|
router["status"]
|
|
|
|
)
|
|
|
|
if router["hood"]:
|
|
|
|
rv1 += tmp
|
|
|
|
else:
|
|
|
|
rv2 += tmp
|
|
|
|
|
2015-10-10 17:42:44 +02:00
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "routers.csv"), "w") as csv:
|
2017-11-15 23:17:59 +01:00
|
|
|
csv.write(rv1)
|
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "routers_v2.csv"), "w") as csv:
|
|
|
|
csv.write(rv2)
|
2015-10-10 17:42:44 +02:00
|
|
|
|
2017-11-05 19:48:29 +01:00
|
|
|
dblinks = mysql.fetchall("""
|
2018-04-29 15:32:23 +02:00
|
|
|
SELECT r1.id AS rid, r2.id AS nid, r1.lat AS rlat, r1.lng AS rlng, r2.lat AS nlat, r2.lng AS nlng, n.netif AS netif, n.type AS type, MAX(quality) AS quality, hoods.name AS hood
|
2017-11-05 19:48:29 +01:00
|
|
|
FROM router AS r1
|
2017-11-15 23:17:59 +01:00
|
|
|
LEFT JOIN hoods ON r1.hood = hoods.name
|
2017-11-05 19:48:29 +01:00
|
|
|
INNER JOIN router_neighbor AS n ON r1.id = n.router
|
|
|
|
INNER JOIN (
|
|
|
|
SELECT router, mac FROM router_netif GROUP BY mac, router
|
|
|
|
) AS net ON n.mac = net.mac
|
|
|
|
INNER JOIN router AS r2 ON net.router = r2.id
|
|
|
|
WHERE r1.lat IS NOT NULL AND r1.lng IS NOT NULL AND r2.lat IS NOT NULL AND r2.lng IS NOT NULL
|
|
|
|
AND r1.status = 'online'
|
2018-04-29 15:32:23 +02:00
|
|
|
GROUP BY r1.id, r1.lat, r1.lng, r2.id, r2.lat, r2.lng, n.netif, n.type, hoods.name
|
2017-11-05 19:48:29 +01:00
|
|
|
""")
|
|
|
|
links = []
|
|
|
|
linksl3 = []
|
2017-11-15 23:17:59 +01:00
|
|
|
linksv2 = []
|
|
|
|
linksl3v2 = []
|
2018-01-22 18:13:30 +01:00
|
|
|
dictl3 = {}
|
|
|
|
dictl2 = {}
|
|
|
|
# The following code is very ugly, but works and is not too slow. Maybe make it nicer at some point ...
|
2017-11-05 19:48:29 +01:00
|
|
|
for row in dblinks:
|
|
|
|
if row.get("type")=="l3":
|
2018-01-22 18:13:30 +01:00
|
|
|
# Check for duplicate
|
|
|
|
if row["nid"] in dictl3.keys() and row["rid"] in dictl3[row["nid"]]:
|
|
|
|
continue
|
|
|
|
# Write current set to dict
|
|
|
|
if not row["rid"] in dictl3.keys():
|
|
|
|
dictl3[row["rid"]] = []
|
|
|
|
dictl3[row["rid"]].append(row["nid"])
|
|
|
|
|
2017-11-15 23:17:59 +01:00
|
|
|
tmp = (
|
2017-11-05 19:48:29 +01:00
|
|
|
row["rlng"],
|
|
|
|
row["rlat"],
|
|
|
|
row["nlng"],
|
2018-01-22 18:13:30 +01:00
|
|
|
row["nlat"],
|
2017-11-15 23:17:59 +01:00
|
|
|
)
|
|
|
|
if row["hood"]:
|
|
|
|
linksl3.append(tmp)
|
|
|
|
else:
|
|
|
|
linksl3v2.append(tmp)
|
2017-11-05 19:48:29 +01:00
|
|
|
else:
|
2018-01-22 18:13:30 +01:00
|
|
|
# Check for duplicate
|
|
|
|
if row["nid"] in dictl2.keys() and row["rid"] in dictl2[row["nid"]].keys():
|
2018-04-30 13:27:51 +02:00
|
|
|
oldqual = dictl2[row["nid"]][row["rid"]]["data"][4]
|
|
|
|
# - Check for ethernet (ethernet always wins)
|
|
|
|
# - Take maximum quality (thus continue if current is lower)
|
|
|
|
if oldqual == 0 or oldqual > row["quality"]:
|
2018-04-29 15:33:31 +02:00
|
|
|
continue
|
2018-04-30 13:27:51 +02:00
|
|
|
# Delete old entry, as we create a new one with averaged quality
|
|
|
|
del dictl2[row["nid"]][row["rid"]]
|
2018-04-29 15:32:23 +02:00
|
|
|
if row["rid"] in dictl2.keys() and row["nid"] in dictl2[row["rid"]].keys():
|
2018-04-30 13:27:51 +02:00
|
|
|
oldqual = dictl2[row["rid"]][row["nid"]]["data"][4]
|
|
|
|
# - Check for ethernet (ethernet always wins)
|
|
|
|
# - Take maximum quality (thus continue if current is lower)
|
|
|
|
if oldqual == 0 or oldqual > row["quality"]:
|
2018-04-29 15:33:31 +02:00
|
|
|
continue
|
2018-04-29 15:32:23 +02:00
|
|
|
# No need to delete, since we overwrite later
|
2018-01-22 18:13:30 +01:00
|
|
|
# Write current set to dict
|
|
|
|
if not row["rid"] in dictl2.keys():
|
|
|
|
dictl2[row["rid"]] = {}
|
2018-04-29 15:33:31 +02:00
|
|
|
# Check for ethernet
|
|
|
|
if row["netif"].startswith("eth"):
|
|
|
|
row["quality"] = 0
|
2018-01-22 18:13:30 +01:00
|
|
|
|
2017-11-15 23:17:59 +01:00
|
|
|
tmp = (
|
2017-11-05 19:48:29 +01:00
|
|
|
row["rlng"],
|
|
|
|
row["rlat"],
|
|
|
|
row["nlng"],
|
|
|
|
row["nlat"],
|
2018-01-22 18:13:30 +01:00
|
|
|
row["quality"],
|
2017-11-15 23:17:59 +01:00
|
|
|
)
|
2018-01-22 18:13:30 +01:00
|
|
|
dictl2[row["rid"]][row["nid"]] = {'hood':row["hood"],'data':tmp}
|
|
|
|
|
|
|
|
for d1 in dictl2.values():
|
|
|
|
for d2 in d1.values():
|
|
|
|
if d2["hood"]:
|
|
|
|
links.append(d2["data"])
|
2017-11-15 23:17:59 +01:00
|
|
|
else:
|
2018-01-22 18:13:30 +01:00
|
|
|
linksv2.append(d2["data"])
|
2017-11-15 23:17:59 +01:00
|
|
|
|
2015-10-10 17:42:44 +02:00
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "links.csv"), "w") as csv:
|
|
|
|
csv.write("WKT,quality\n")
|
2016-03-10 13:56:06 +01:00
|
|
|
for link in sorted(links, key=lambda l: l[4]):
|
|
|
|
csv.write("\"LINESTRING (%f %f,%f %f)\",%i\n" % link)
|
2015-10-10 17:42:44 +02:00
|
|
|
|
2017-11-15 23:17:59 +01:00
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "links_v2.csv"), "w") as csv:
|
|
|
|
csv.write("WKT,quality\n")
|
|
|
|
for link in sorted(linksv2, key=lambda l: l[4]):
|
|
|
|
csv.write("\"LINESTRING (%f %f,%f %f)\",%i\n" % link)
|
|
|
|
|
2017-04-17 13:31:31 +02:00
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "l3_links.csv"), "w") as csv:
|
|
|
|
csv.write("WKT\n")
|
2017-11-05 19:48:29 +01:00
|
|
|
for link in linksl3:
|
|
|
|
csv.write("\"LINESTRING (%f %f,%f %f)\"\n" % link)
|
2017-04-17 13:31:31 +02:00
|
|
|
|
2017-11-15 23:17:59 +01:00
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "l3_links_v2.csv"), "w") as csv:
|
|
|
|
csv.write("WKT\n")
|
|
|
|
for link in linksl3v2:
|
|
|
|
csv.write("\"LINESTRING (%f %f,%f %f)\"\n" % link)
|
|
|
|
|
2017-11-05 19:48:29 +01:00
|
|
|
dbhoods = mysql.fetchall("""
|
|
|
|
SELECT name, lat, lng FROM hoods
|
|
|
|
WHERE lat IS NOT NULL AND lng IS NOT NULL
|
|
|
|
""")
|
2015-10-10 17:42:44 +02:00
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "hood-points.csv"), "w", encoding="UTF-8") as csv:
|
|
|
|
csv.write("lng,lat,name\n")
|
2017-11-05 19:48:29 +01:00
|
|
|
for hood in dbhoods:
|
2015-10-10 17:42:44 +02:00
|
|
|
csv.write("%f,%f,\"%s\"\n" % (
|
2017-11-05 19:48:29 +01:00
|
|
|
hood["lng"],
|
|
|
|
hood["lat"],
|
2015-10-10 17:42:44 +02:00
|
|
|
hood["name"]
|
|
|
|
))
|
|
|
|
|
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "hoods.csv"), "w") as csv:
|
|
|
|
csv.write("WKT\n")
|
|
|
|
hoods = []
|
2017-11-05 19:48:29 +01:00
|
|
|
for hood in dbhoods:
|
2015-10-10 17:42:44 +02:00
|
|
|
# convert coordinates info marcator sphere as voronoi doesn't work with lng/lat
|
2017-11-05 19:48:29 +01:00
|
|
|
x, y = merc_sphere(hood["lng"], hood["lat"])
|
2015-10-10 17:42:44 +02:00
|
|
|
hoods.append([x, y])
|
2017-10-14 16:42:13 +02:00
|
|
|
draw_voronoi_lines(csv, hoods)
|
2015-10-10 17:42:44 +02:00
|
|
|
|
2017-11-05 19:48:29 +01:00
|
|
|
with urllib.request.urlopen("http://keyserver.freifunk-franken.de/v2/hoods.php") as url:
|
|
|
|
dbhoodsv2 = json.loads(url.read().decode())
|
|
|
|
|
2017-10-14 16:42:13 +02:00
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "hood-points-v2.csv"), "w", encoding="UTF-8") as csv:
|
|
|
|
csv.write("lng,lat,name\n")
|
|
|
|
|
2017-11-05 19:48:29 +01:00
|
|
|
for hood in dbhoodsv2:
|
2017-10-14 16:42:13 +02:00
|
|
|
if not ( 'lon' in hood and 'lat' in hood ):
|
|
|
|
continue
|
|
|
|
csv.write("%f,%f,\"%s\"\n" % (
|
|
|
|
hood["lon"],
|
|
|
|
hood["lat"],
|
|
|
|
hood["name"]
|
|
|
|
))
|
2015-10-10 17:42:44 +02:00
|
|
|
|
2017-11-15 23:31:43 +01:00
|
|
|
with open(os.path.join(CONFIG["csv_dir"], "hoods_v2.csv"), "w") as csv:
|
2017-10-14 16:42:13 +02:00
|
|
|
csv.write("WKT\n")
|
|
|
|
hoods = []
|
|
|
|
|
2017-11-05 19:48:29 +01:00
|
|
|
for hood in dbhoodsv2:
|
2017-10-14 16:42:13 +02:00
|
|
|
if not ( 'lon' in hood and 'lat' in hood ):
|
|
|
|
continue
|
|
|
|
# convert coordinates info marcator sphere as voronoi doesn't work with lng/lat
|
|
|
|
x, y = merc_sphere(hood["lon"], hood["lat"])
|
|
|
|
hoods.append([x, y])
|
|
|
|
draw_voronoi_lines(csv, hoods)
|
2015-10-10 17:42:44 +02:00
|
|
|
|
|
|
|
# touch mapnik XML files to trigger tilelite watcher
|
|
|
|
touch("/usr/share/ffmap/hoods.xml")
|
2017-11-15 23:31:43 +01:00
|
|
|
touch("/usr/share/ffmap/hoods_v2.xml")
|
2017-11-15 23:17:59 +01:00
|
|
|
touch("/usr/share/ffmap/routers.xml")
|
|
|
|
touch("/usr/share/ffmap/routers_v2.xml")
|
2016-03-10 13:56:06 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
update_mapnik_csv()
|