422 lines
14 KiB
Python
Raw Normal View History

2020-10-12 21:32:21 -07:00
import re
import logging
from random import shuffle
2020-11-21 22:30:31 -08:00
from datetime import datetime, timedelta
import geoip2.database
import arrow
import requests
2021-04-19 10:07:09 -07:00
from flask import Flask, request, redirect, jsonify
from flask import render_template, flash, Response
2022-03-18 23:29:41 -07:00
from urllib.parse import urlparse, urlencode
from xmrnodes.helpers import determine_crypto, is_onion, make_request
from xmrnodes.helpers import retrieve_peers, rw_cache, get_highest_block
from xmrnodes.forms import SubmitNode
from xmrnodes.models import Node, HealthCheck, Peer
2020-10-21 22:12:24 -07:00
from xmrnodes import config
2020-10-12 21:32:21 -07:00
logging.basicConfig(
level=logging.INFO,
2020-10-21 22:12:24 -07:00
format="%(asctime)s - %(levelname)s - %(message)s"
)
2020-10-12 21:32:21 -07:00
app = Flask(__name__)
app.config.from_envvar("FLASK_SECRETS")
app.secret_key = app.config["SECRET_KEY"]
2023-01-19 10:22:20 -08:00
HEALTHY_BLOCK_DIFF = 500 # idc to config this. hardcode is fine.
2020-10-12 21:32:21 -07:00
@app.route("/", methods=["GET", "POST"])
def index():
form = SubmitNode()
2023-01-19 11:15:31 -08:00
nettype = request.args.get("network", "mainnet")
crypto = request.args.get("chain", "monero")
2020-10-23 23:46:14 -07:00
onion = request.args.get("onion", False)
2022-03-18 23:29:41 -07:00
show_all = "true" == request.args.get("all", "false")
2023-01-19 11:15:31 -08:00
web_compatible = request.args.get("cors", False)
highest_block = get_highest_block(nettype, crypto)
healthy_block = highest_block - HEALTHY_BLOCK_DIFF
2020-10-18 01:18:35 -07:00
nodes = Node.select().where(
2022-03-18 23:29:41 -07:00
Node.validated == True,
Node.nettype == nettype,
Node.crypto == crypto
)
if web_compatible:
nodes = nodes.where(Node.web_compatible == True)
2022-03-18 23:29:41 -07:00
nodes_all = nodes.count()
nodes_unhealthy = nodes.where(
(Node.available == False) | (Node.last_height < healthy_block)
).count()
2022-03-18 23:29:41 -07:00
if not show_all:
nodes = nodes.where(
Node.available == True,
Node.last_height > healthy_block
)
2022-03-18 23:29:41 -07:00
nodes = nodes.order_by(
Node.datetime_entered.desc()
2020-10-17 15:01:38 -07:00
)
2020-10-23 23:46:14 -07:00
if onion:
nodes = nodes.where(Node.is_tor == True)
2020-10-24 01:29:10 -07:00
nodes = [n for n in nodes]
shuffle(nodes)
return render_template(
"index.html",
nodes=nodes,
2022-03-18 23:29:41 -07:00
nodes_all=nodes_all,
nodes_unhealthy=nodes_unhealthy,
nettype=nettype,
crypto=crypto,
form=form,
web_compatible=web_compatible
)
2020-10-12 21:32:21 -07:00
2021-04-19 10:18:16 -07:00
@app.route("/nodes.json")
def nodes_json():
nodes = Node.select().where(
Node.validated==True
).where(
Node.nettype=="mainnet"
)
xmr_nodes = [n for n in nodes if n.crypto == "monero"]
wow_nodes = [n for n in nodes if n.crypto == "wownero"]
return jsonify({
"monero": {
"clear": [n.url for n in xmr_nodes if n.is_tor == False],
"onion": [n.url for n in xmr_nodes if n.is_tor == True],
"web_compatible": [n.url for n in xmr_nodes if n.web_compatible == True],
2021-04-19 10:18:16 -07:00
},
"wownero": {
"clear": [n.url for n in wow_nodes if n.is_tor == False],
"onion": [n.url for n in wow_nodes if n.is_tor == True]
}
})
2023-02-11 23:04:16 -08:00
@app.route("/health.json")
def health_json():
data = {}
nodes = Node.select().where(
Node.validated == True
)
for node in nodes:
if node.crypto not in data:
data[node.crypto] = {}
_d = {
"available": node.available,
"last_height": node.last_height,
"datetime_entered": node.datetime_entered,
"datetime_checked": node.datetime_checked,
"datetime_failed": node.datetime_failed,
"checks": [c.health for c in node.get_all_checks()]
}
nettype = "clear"
if node.is_tor:
nettype = "onion"
elif node.web_compatible:
if "web_compatible" not in data[node.crypto]:
data[node.crypto]["web_compatible"] = {}
data[node.crypto]["web_compatible"][node.url] = _d
if nettype not in data[node.crypto]:
data[node.crypto][nettype] = {}
data[node.crypto][nettype][node.url] = _d
return jsonify(data)
@app.route("/haproxy.cfg")
def haproxy():
crypto = request.args.get('chain') or 'monero'
nettype = request.args.get('network') or 'mainnet'
cors = request.args.get('cors') or False
tor = request.args.get('onion') or False
nodes = Node.select().where(
Node.validated == True,
Node.nettype == nettype,
Node.crypto == crypto,
Node.is_tor == tor,
Node.web_compatible == cors
)
tpl = render_template("haproxy.html", nodes=nodes)
print(tpl)
res = Response(tpl)
res.headers['Content-Disposition'] = f'attachment; filename="haproxy-{crypto}-{nettype}-cors_{cors}-tor_{tor}.cfg"'
return res
2021-04-19 10:18:16 -07:00
@app.route("/wow_nodes.json")
def wow_nodes_json():
2021-04-19 10:07:09 -07:00
nodes = Node.select().where(
Node.validated==True
2021-04-19 10:13:50 -07:00
).where(
Node.nettype=="mainnet"
).where(
2021-04-19 10:18:16 -07:00
Node.crypto=="wownero"
2021-04-19 10:07:09 -07:00
)
nodes = [n for n in nodes]
return jsonify({
2021-04-19 10:13:50 -07:00
"clear": [n.url for n in nodes if n.is_tor == False],
"onion": [n.url for n in nodes if n.is_tor == True]
2021-04-19 10:07:09 -07:00
})
@app.route("/map")
def map():
try:
peers = rw_cache('map_peers')
except:
flash('Couldn\'t load the map. Try again later.')
return redirect('/')
return render_template(
"map.html",
peers=peers,
source_node=config.NODE_HOST
)
2020-10-24 02:35:50 -07:00
@app.route("/resources")
def resources():
return render_template("resources.html")
2020-10-12 21:32:21 -07:00
@app.route("/add", methods=["GET", "POST"])
def add():
if request.method == "POST":
url = request.form.get("node_url")
2020-10-12 21:32:21 -07:00
regex = re.compile(
2020-10-21 22:12:24 -07:00
r"^(?:http)s?://" # http:// or https://
r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|" #domain...
r"localhost|" #localhost...
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip
r"(?::\d+)?" # optional port
r"(?:/?|[/?]\S+)$", re.IGNORECASE
)
2020-10-12 21:32:21 -07:00
re_match = re.match(regex, url)
if re_match is None:
2020-10-21 22:12:24 -07:00
flash("This doesn\"t look like a valid URL")
2020-10-12 21:32:21 -07:00
else:
_url = urlparse(url)
2021-03-06 23:10:06 -08:00
url = f"{_url.scheme}://{_url.netloc}".lower()
if Node.select().where(Node.url == url).exists():
flash("This node is already in the database.")
else:
flash("Seems like a valid node URL. Added to the database and will check soon.")
node = Node(url=url)
node.save()
2020-10-12 21:32:21 -07:00
return redirect("/")
2020-11-21 22:30:31 -08:00
def cleanup_health_checks():
diff = datetime.utcnow() - timedelta(hours=24)
2020-11-21 22:30:31 -08:00
checks = HealthCheck.select().where(HealthCheck.datetime <= diff)
for check in checks:
print("Deleting check", check.id)
check.delete_instance()
2020-10-17 16:27:54 -07:00
@app.cli.command("check")
def check():
2020-11-21 22:30:31 -08:00
cleanup_health_checks()
2020-10-17 16:27:54 -07:00
nodes = Node.select().where(Node.validated == True)
for node in nodes:
now = datetime.utcnow()
2020-10-24 01:29:10 -07:00
hc = HealthCheck(node=node)
2020-10-17 16:27:54 -07:00
logging.info(f"Attempting to check {node.url}")
try:
2020-10-23 22:55:34 -07:00
r = make_request(node.url)
2020-10-17 16:27:54 -07:00
assert "status" in r.json()
assert "offline" in r.json()
assert "height" in r.json()
has_cors = 'Access-Control-Allow-Origin' in r.headers
is_ssl = node.url.startswith('https://')
2020-10-17 16:27:54 -07:00
if r.json()["status"] == "OK":
node.web_compatible = has_cors and is_ssl
2020-10-17 16:27:54 -07:00
node.last_height = r.json()["height"]
2020-10-24 01:29:10 -07:00
hc.health = True
highest_block = get_highest_block(node.nettype, node.crypto)
healthy_block = highest_block - HEALTHY_BLOCK_DIFF
if r.json()["height"] < healthy_block:
node.available = False
logging.info("unhealthy")
else:
node.available = True
logging.info("success")
2020-10-17 16:27:54 -07:00
else:
raise
except:
logging.info("fail")
node.datetime_failed = now
node.available = False
2020-10-24 01:29:10 -07:00
hc.health = False
2020-10-17 16:27:54 -07:00
finally:
node.datetime_checked = now
node.save()
2020-10-24 01:29:10 -07:00
hc.save()
if node.get_failed_checks().count() == node.get_all_checks().count() and node.get_all_checks().count() > 5:
print('this node fails all of its health checks - deleting it!')
for _hc in node.get_all_checks():
_hc.delete_instance()
node.delete_instance()
2020-10-17 16:27:54 -07:00
2021-03-26 14:27:21 -07:00
@app.cli.command("get_peers")
def get_peers():
all_peers = []
print('[+] Preparing to crawl Monero p2p network')
print(f'[.] Retrieving initial peers from {config.NODE_HOST}:{config.NODE_PORT}')
initial_peers = retrieve_peers(config.NODE_HOST, config.NODE_PORT)
with geoip2.database.Reader('./data/GeoLite2-City.mmdb') as reader:
for peer in initial_peers:
if peer not in all_peers:
all_peers.append(peer)
_url = urlparse(peer)
url = f"{_url.scheme}://{_url.netloc}".lower()
if not Peer.select().where(Peer.url == peer).exists():
response = reader.city(_url.hostname)
p = Peer(
url=peer,
country=response.country.name,
city=response.city.name,
postal=response.postal.code,
lat=response.location.latitude,
lon=response.location.longitude,
)
p.save()
print(f'{peer} - saving new peer')
else:
p = Peer.select().where(Peer.url == peer).first()
p.datetime = datetime.now()
p.save()
try:
print(f'[.] Retrieving crawled peers from {_url.netloc}')
new_peers = retrieve_peers(_url.hostname, _url.port)
for peer in new_peers:
if peer not in all_peers:
all_peers.append(peer)
_url = urlparse(peer)
url = f"{_url.scheme}://{_url.netloc}".lower()
if not Peer.select().where(Peer.url == peer).exists():
response = reader.city(_url.hostname)
p = Peer(
url=peer,
country=response.country.name,
city=response.city.name,
postal=response.postal.code,
lat=response.location.latitude,
lon=response.location.longitude,
)
p.save()
print(f'{peer} - saving new peer')
else:
p = Peer.select().where(Peer.url == peer).first()
p.datetime = datetime.now()
p.save()
except:
pass
print(f'[+] Found {len(all_peers)} peers from {config.NODE_HOST}:{config.NODE_PORT}')
print('[+] Deleting old Monero p2p peers')
for p in Peer.select():
if p.hours_elapsed() > 24:
print(f'[.] Deleting {p.url}')
p.delete_instance()
rw_cache('map_peers', list(Peer.select().execute()))
2021-03-26 14:27:21 -07:00
2023-04-03 22:24:05 -07:00
@app.cli.command('init')
def init():
pass
@app.cli.command("validate")
def validate():
nodes = Node.select().where(Node.validated == False)
for node in nodes:
now = datetime.utcnow()
logging.info(f"Attempting to validate {node.url}")
try:
2020-10-23 22:55:34 -07:00
r = make_request(node.url)
assert "height" in r.json()
assert "nettype" in r.json()
has_cors = 'Access-Control-Allow-Origin' in r.headers
is_ssl = node.url.startswith('https://')
nettype = r.json()["nettype"]
2020-10-18 01:18:35 -07:00
crypto = determine_crypto(node.url)
logging.info("success")
if nettype in ["mainnet", "stagenet", "testnet"]:
node.nettype = nettype
node.available = True
node.validated = True
node.web_compatible = has_cors and is_ssl
2020-10-17 16:27:54 -07:00
node.last_height = r.json()["height"]
node.datetime_checked = now
2020-10-18 01:18:35 -07:00
node.crypto = crypto
2020-10-23 22:55:34 -07:00
node.is_tor = is_onion(node.url)
node.save()
else:
logging.info("unexpected nettype")
except requests.exceptions.ConnectTimeout:
logging.info("connection timed out")
node.delete_instance()
except requests.exceptions.SSLError:
logging.info("invalid certificate")
node.delete_instance()
except requests.exceptions.ConnectionError:
logging.info("connection error")
node.delete_instance()
except requests.exceptions.HTTPError:
logging.info("http error, 4xx or 5xx")
node.delete_instance()
except Exception as e:
logging.info("failed for reasons unknown")
node.delete_instance()
2020-10-12 21:32:21 -07:00
2020-10-21 22:13:20 -07:00
@app.cli.command("export")
def export():
all_nodes = []
ts = int(arrow.get().timestamp())
2020-10-21 22:13:20 -07:00
export_dir = f"{config.DATA_DIR}/export.txt"
export_dir_stamped = f"{config.DATA_DIR}/export-{ts}.txt"
2020-10-21 22:13:20 -07:00
nodes = Node.select().where(Node.validated == True)
for node in nodes:
2020-10-21 22:50:40 -07:00
logging.info(f"Adding {node.url}")
2020-10-21 22:13:20 -07:00
all_nodes.append(node.url)
with open(export_dir, "w") as f:
f.write("\n".join(all_nodes))
with open(export_dir_stamped, "w") as f:
f.write("\n".join(all_nodes))
logging.info(f"{nodes.count()} nodes written to {export_dir} and {export_dir_stamped}")
2020-10-21 22:13:20 -07:00
2020-10-21 22:50:40 -07:00
@app.cli.command("import")
def import_():
all_nodes = []
export_dir = f"{config.DATA_DIR}/export.txt"
2021-04-19 10:07:09 -07:00
with open(export_dir, "r") as f:
2020-10-21 22:50:40 -07:00
for url in f.readlines():
2020-10-24 01:29:10 -07:00
try:
2021-03-06 23:10:06 -08:00
n = url.rstrip().lower()
2020-10-24 01:29:10 -07:00
logging.info(f"Adding {n}")
node = Node(url=n)
node.save()
all_nodes.append(n)
except:
pass
2020-10-21 22:50:40 -07:00
logging.info(f"{len(all_nodes)} node urls imported and ready to be validated")
2020-10-21 22:12:24 -07:00
@app.template_filter("humanize")
def humanize(d):
2020-10-21 22:12:24 -07:00
t = arrow.get(d, "UTC")
return t.humanize()
2020-10-12 21:32:21 -07:00
@app.template_filter("hours_elapsed")
def hours_elapsed(d):
now = datetime.utcnow()
diff = now - d
return diff.total_seconds() / 60 / 60
2022-03-18 23:29:41 -07:00
@app.template_filter("pop_arg")
def trim_arg(all_args, arg_to_trim):
d = all_args.to_dict()
d.pop(arg_to_trim)
return urlencode(d)
2020-10-12 21:32:21 -07:00
if __name__ == "__main__":
app.run()