add scripts for generating graph json files
had to strip out a bunch of other code, might not work lol
This commit is contained in:
parent
dfc9dc96a9
commit
76fc3918bb
160
megahub_auto.py
Normal file
160
megahub_auto.py
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
from time import time
|
||||||
|
import os.path, os
|
||||||
|
|
||||||
|
graph = {
|
||||||
|
"nodes":[],
|
||||||
|
"links":[]
|
||||||
|
}
|
||||||
|
graph3d = {
|
||||||
|
"nodes":[],
|
||||||
|
"links":[]
|
||||||
|
}
|
||||||
|
graph_full = {
|
||||||
|
"nodes":[],
|
||||||
|
"links":[]
|
||||||
|
}
|
||||||
|
graph3d_full = {
|
||||||
|
"nodes":[],
|
||||||
|
"links":[]
|
||||||
|
}
|
||||||
|
|
||||||
|
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
graph_path = os.path.join(dir_path, 'plebnet/graphs/')
|
||||||
|
megahubs_path = os.path.join(dir_path, 'known_plebs.json')
|
||||||
|
ln_path = os.path.join(dir_path, 'describegraph.json')
|
||||||
|
|
||||||
|
neighbours_dict = {}
|
||||||
|
known_megahubs = set()
|
||||||
|
nodes_list = []
|
||||||
|
nodes_set = set()
|
||||||
|
edges_set = set()
|
||||||
|
add_everything = False
|
||||||
|
i=0
|
||||||
|
|
||||||
|
def load_ln():
|
||||||
|
with open(ln_path, 'r', encoding='utf8') as file:
|
||||||
|
data=file.read()
|
||||||
|
return json.loads(data)
|
||||||
|
|
||||||
|
|
||||||
|
def search_nodes():
|
||||||
|
i=0
|
||||||
|
for node in ln_graph["nodes"]:
|
||||||
|
pub_key = node.get("pub_key")
|
||||||
|
if pub_key in known_megahubs or add_everything:
|
||||||
|
nodes_list.append(pub_key)
|
||||||
|
nodes_set.add(pub_key)
|
||||||
|
i+=1
|
||||||
|
color = node.get("color")
|
||||||
|
alias = node.get("alias")
|
||||||
|
if alias == "":
|
||||||
|
alias = pub_key[:20]
|
||||||
|
node_info = {
|
||||||
|
"name": alias,
|
||||||
|
"id": pub_key,
|
||||||
|
"color": color,
|
||||||
|
"group": 1
|
||||||
|
}
|
||||||
|
graph["nodes"].append(node_info)
|
||||||
|
graph3d["nodes"].append(node_info)
|
||||||
|
graph_full["nodes"].append(node_info)
|
||||||
|
graph3d_full["nodes"].append(node_info)
|
||||||
|
|
||||||
|
|
||||||
|
def search_edges():
|
||||||
|
for edge in ln_graph["edges"]:
|
||||||
|
node1 = edge.get("node1_pub")
|
||||||
|
if node1 in nodes_set:
|
||||||
|
node2 = edge.get("node2_pub")
|
||||||
|
if node2 in nodes_set:
|
||||||
|
edges_set.add((node1,node2))
|
||||||
|
graph_full["links"].append({
|
||||||
|
"source": nodes_list.index(node1),
|
||||||
|
"target": nodes_list.index(node2),
|
||||||
|
"value": 1
|
||||||
|
})
|
||||||
|
graph3d_full["links"].append({
|
||||||
|
"source": node1,
|
||||||
|
"target": node2,
|
||||||
|
"value": 1
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def remove_non_triangles():
|
||||||
|
#global graph, graph_full, graph3d, graph3d_full
|
||||||
|
triangle_edges = set()
|
||||||
|
for node in set(nodes_set):
|
||||||
|
neighbours = set()
|
||||||
|
for edge in edges_set:
|
||||||
|
if node in edge:
|
||||||
|
neighbour = edge[edge.index(node)-1]
|
||||||
|
neighbours.add(neighbour)
|
||||||
|
#if not neighbours:
|
||||||
|
# kill = nodes_list.index(node)
|
||||||
|
# graph["nodes"].pop(kill)
|
||||||
|
# graph3d["nodes"].pop(kill)
|
||||||
|
# graph_full["nodes"].pop(kill)
|
||||||
|
# graph3d_full["nodes"].pop(kill)
|
||||||
|
# nodes_list.remove(node)
|
||||||
|
# nodes_set.remove(node)
|
||||||
|
#else:
|
||||||
|
neighbours_dict[node] = list(neighbours)
|
||||||
|
for edge in edges_set:
|
||||||
|
if edge[0] in neighbours and edge[1] in neighbours:
|
||||||
|
triangle_edges.add(edge)
|
||||||
|
for triangle in triangle_edges:
|
||||||
|
node1 = triangle[0]
|
||||||
|
node2 = triangle[1]
|
||||||
|
graph["links"].append({
|
||||||
|
"source": nodes_list.index(node1),
|
||||||
|
"target": nodes_list.index(node2),
|
||||||
|
"value": 1
|
||||||
|
})
|
||||||
|
graph3d["links"].append({
|
||||||
|
"source": node1,
|
||||||
|
"target": node2,
|
||||||
|
"value": 1
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def save_graph(file, graph_to_save):
|
||||||
|
with open(graph_path+file, "w") as outfile:
|
||||||
|
outfile.write(json.dumps(graph_to_save, indent=2))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
#plebnet
|
||||||
|
|
||||||
|
t0 = time()
|
||||||
|
if os.path.isfile(megahubs_path):
|
||||||
|
with open(megahubs_path, 'r', encoding='utf8') as file:
|
||||||
|
data = file.read()
|
||||||
|
known_megahubs = set(json.loads(data))
|
||||||
|
|
||||||
|
t1 = time()
|
||||||
|
search_nodes()
|
||||||
|
search_edges()
|
||||||
|
|
||||||
|
t2 = time()
|
||||||
|
remove_non_triangles()
|
||||||
|
|
||||||
|
t3 = time()
|
||||||
|
save_graph("graph.json", graph)
|
||||||
|
save_graph("graph3d.json", graph3d)
|
||||||
|
save_graph("graph_full.json", graph_full)
|
||||||
|
save_graph("graph3d_full.json", graph3d_full)
|
||||||
|
save_graph("neighbours.json", neighbours_dict)
|
||||||
|
|
||||||
|
t4=time()
|
||||||
|
print("\nPlebnet")
|
||||||
|
print("Fetching known megahubs took: " + str(round(t1 - t0, 2)) + "s")
|
||||||
|
print("Searching nodes took: " + str(round(t2 - t1, 2)) + "s")
|
||||||
|
print("Finding and removing triangles took: " + str(round(t3 - t2, 2)) + "s")
|
||||||
|
print("Saving files took: " + str(round(t4 - t3, 2)) + "s")
|
||||||
|
print("Total: " + str(round(t4 - t0, 2)) + "s")
|
||||||
|
print("\nNodes: " + str(len(graph["nodes"])))
|
||||||
|
print("Channels (triangles): " + str(len(graph["links"])))
|
||||||
|
print("Channels: " + str(len(graph_full["links"])))
|
||||||
|
|
||||||
|
|
41
megahub_getknowns.py
Normal file
41
megahub_getknowns.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import os.path, os
|
||||||
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
known_plebs = set()
|
||||||
|
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
plebs_path = os.path.join(dir_path, 'known_plebs.json')
|
||||||
|
|
||||||
|
|
||||||
|
def get_plebs():
|
||||||
|
url='https://lightningwiki.net/g/?g=-1001234988097&t=Group&ns=False'
|
||||||
|
response = requests.get(url)
|
||||||
|
soup = BeautifulSoup(response.text, 'html.parser')
|
||||||
|
nodes = soup.findAll('g', attrs={"class":"node"})
|
||||||
|
for node in nodes:
|
||||||
|
id = node.title.text
|
||||||
|
known_plebs.add(id)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
#Plebnet
|
||||||
|
if os.path.isfile(plebs_path):
|
||||||
|
with open(plebs_path, 'r', encoding='utf8') as file:
|
||||||
|
data = file.read()
|
||||||
|
known_plebs = set(json.loads(data))
|
||||||
|
|
||||||
|
t0 = time.time()
|
||||||
|
n0 = len(known_plebs)
|
||||||
|
|
||||||
|
get_plebs()
|
||||||
|
|
||||||
|
with open(plebs_path, "w") as outfile:
|
||||||
|
out = list(known_plebs)
|
||||||
|
outfile.write(json.dumps(out, indent=2))
|
||||||
|
t3 = time.time()
|
||||||
|
n3 = len(known_plebs)
|
||||||
|
print("\nPlebnet")
|
||||||
|
print("Total new nodes: " + str(n3-n0))
|
||||||
|
print("Total time: " + str(round(t3-t0, 2)) + "s")
|
Loading…
Reference in New Issue
Block a user