mirror of
https://github.com/NeoCloud/NeoNetwork
synced 2024-11-24 16:40:42 +08:00
format code (#174)
This commit is contained in:
parent
54c02595ea
commit
fb8fece6f2
5 changed files with 113 additions and 38 deletions
|
@ -3,19 +3,22 @@ import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import toml
|
import toml
|
||||||
|
|
||||||
from rfc2317 import gen_reverse_pointers
|
from rfc2317 import gen_reverse_pointers
|
||||||
|
|
||||||
RESOLVE_FILE = Path("dns", "db.10.127")
|
RESOLVE_FILE = Path("dns", "db.10.127")
|
||||||
RFC2317_FILE = Path("dns", "rfc2317.toml")
|
RFC2317_FILE = Path("dns", "rfc2317.toml")
|
||||||
|
|
||||||
|
|
||||||
def iter_rfc2317_entry():
|
def iter_rfc2317_entry():
|
||||||
entries = toml.loads(RFC2317_FILE.read_text())
|
entries = toml.loads(RFC2317_FILE.read_text())
|
||||||
for (route, attributes) in entries.items():
|
for (route, attributes) in entries.items():
|
||||||
ns = attributes.get('NS')
|
ns = attributes.get("NS")
|
||||||
ds = attributes.get('DS', list())
|
ds = attributes.get("DS", list())
|
||||||
ttl = attributes.get('TTL', -1)
|
ttl = attributes.get("TTL", -1)
|
||||||
yield (route, ns, ds, ttl)
|
yield (route, ns, ds, ttl)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
orignal = RESOLVE_FILE.read_text()
|
orignal = RESOLVE_FILE.read_text()
|
||||||
records = [orignal, "; AUTOGENERATED"]
|
records = [orignal, "; AUTOGENERATED"]
|
||||||
|
|
|
@ -6,11 +6,11 @@ import argparse
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser('named-formatzone')
|
parser = argparse.ArgumentParser("named-formatzone")
|
||||||
parser.add_argument("file")
|
parser.add_argument("file")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
zonefile = Path(args.file)
|
zonefile = Path(args.file)
|
||||||
zonelines = zonefile.read_text().split('\n')
|
zonelines = zonefile.read_text().split("\n")
|
||||||
formatted = list()
|
formatted = list()
|
||||||
max_length = [0, 0, 0, 0, 0]
|
max_length = [0, 0, 0, 0, 0]
|
||||||
in_soa = False
|
in_soa = False
|
||||||
|
@ -18,7 +18,7 @@ if __name__ == "__main__":
|
||||||
def iter_lines(scan_only=True):
|
def iter_lines(scan_only=True):
|
||||||
soafound = None
|
soafound = None
|
||||||
for rline in zonelines:
|
for rline in zonelines:
|
||||||
line, *comments = rline.split(';')
|
line, *comments = rline.split(";")
|
||||||
comments = ";".join(comments)
|
comments = ";".join(comments)
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if "SOA" in line and soafound is None:
|
if "SOA" in line and soafound is None:
|
||||||
|
@ -50,6 +50,7 @@ if __name__ == "__main__":
|
||||||
else:
|
else:
|
||||||
if not scan_only:
|
if not scan_only:
|
||||||
formatted.append(rline)
|
formatted.append(rline)
|
||||||
|
|
||||||
iter_lines()
|
iter_lines()
|
||||||
iter_lines(False)
|
iter_lines(False)
|
||||||
|
|
||||||
|
|
|
@ -2,12 +2,15 @@
|
||||||
|
|
||||||
import ipaddress
|
import ipaddress
|
||||||
|
|
||||||
ZONE = '.127.10.in-addr.arpa'
|
ZONE = ".127.10.in-addr.arpa"
|
||||||
|
|
||||||
|
|
||||||
def truncate(rev: str) -> str:
|
def truncate(rev: str) -> str:
|
||||||
assert rev.endswith(ZONE)
|
assert rev.endswith(ZONE)
|
||||||
rev = rev[: -len(ZONE)]
|
rev = rev[: -len(ZONE)]
|
||||||
return rev
|
return rev
|
||||||
|
|
||||||
|
|
||||||
def gen_reverse_pointers(network: str, ns: list, ds: list = [], ttl: int = -1) -> list:
|
def gen_reverse_pointers(network: str, ns: list, ds: list = [], ttl: int = -1) -> list:
|
||||||
ttl = f"{ttl} " if 900 <= ttl <= 86400 else ""
|
ttl = f"{ttl} " if 900 <= ttl <= 86400 else ""
|
||||||
buf = list()
|
buf = list()
|
||||||
|
@ -25,5 +28,17 @@ def gen_reverse_pointers(network: str, ns: list, ds: list = [], ttl: int = -1) -
|
||||||
buf.append(f"{cnamefr} {ttl}IN CNAME {cnameto}")
|
buf.append(f"{cnamefr} {ttl}IN CNAME {cnameto}")
|
||||||
return buf
|
return buf
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
print("\n".join(gen_reverse_pointers('10.127.8.64/26', ['ns1.jerry.neo.'], ['18792 13 2 2F335456EEE70FC4833886E5EEDC28E7195E90E2A337860B3E805D5EB9F3A804'], ttl=1500)))
|
print(
|
||||||
|
"\n".join(
|
||||||
|
gen_reverse_pointers(
|
||||||
|
"10.127.8.64/26",
|
||||||
|
["ns1.jerry.neo."],
|
||||||
|
[
|
||||||
|
"18792 13 2 2F335456EEE70FC4833886E5EEDC28E7195E90E2A337860B3E805D5EB9F3A804"
|
||||||
|
],
|
||||||
|
ttl=1500,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -3,22 +3,22 @@ import argparse
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
# dnssec
|
||||||
|
from base64 import b64decode
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from contextlib import redirect_stdout
|
from contextlib import redirect_stdout
|
||||||
|
from functools import wraps
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from ipaddress import IPv4Network, IPv6Network, ip_network
|
from ipaddress import IPv4Network, IPv6Network, ip_network
|
||||||
from itertools import combinations
|
from itertools import combinations
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from functools import wraps
|
|
||||||
|
|
||||||
import netaddr
|
import netaddr
|
||||||
import toml
|
import toml
|
||||||
from tabulate import tabulate
|
|
||||||
# dnssec
|
|
||||||
from base64 import b64decode
|
|
||||||
from dns.dnssec import make_ds
|
from dns.dnssec import make_ds
|
||||||
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||||
|
from tabulate import tabulate
|
||||||
|
|
||||||
NEO_NETWORK_POOL = [ip_network("10.127.0.0/16"), ip_network("fd10:127::/32")]
|
NEO_NETWORK_POOL = [ip_network("10.127.0.0/16"), ip_network("fd10:127::/32")]
|
||||||
|
|
||||||
|
@ -66,8 +66,11 @@ def iter_toml_file(path: str):
|
||||||
def _sort_as_iterator(func):
|
def _sort_as_iterator(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapped(*args, **kwargs):
|
def wrapped(*args, **kwargs):
|
||||||
for item in sorted(list(func(*args, **kwargs)), key=lambda x: x[0], reverse=False):
|
for item in sorted(
|
||||||
|
list(func(*args, **kwargs)), key=lambda x: x[0], reverse=False
|
||||||
|
):
|
||||||
yield item
|
yield item
|
||||||
|
|
||||||
return wrapped
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
@ -144,7 +147,9 @@ def route_to_roa(asn_table: dict):
|
||||||
try:
|
try:
|
||||||
assert net1["prefix"] != net2["prefix"]
|
assert net1["prefix"] != net2["prefix"]
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
assert net1['asn'] != net2['asn'] and entity_from_net(net1) == entity_from_net(net2)
|
assert net1["asn"] != net2["asn"] and entity_from_net(
|
||||||
|
net1
|
||||||
|
) == entity_from_net(net2)
|
||||||
continue
|
continue
|
||||||
assert net1["prefix"].supernet_of(net2["prefix"])
|
assert net1["prefix"].supernet_of(net2["prefix"])
|
||||||
s1net, s2net = (net1["supernet"], net2["supernet"])
|
s1net, s2net = (net1["supernet"], net2["supernet"])
|
||||||
|
@ -181,10 +186,19 @@ def prehandle_roa(asn_table: dict, args):
|
||||||
r["prefix"] = r["prefix"].with_prefixlen
|
r["prefix"] = r["prefix"].with_prefixlen
|
||||||
return roa4, roa6
|
return roa4, roa6
|
||||||
|
|
||||||
|
|
||||||
def export_dnssec_dnskey():
|
def export_dnssec_dnskey():
|
||||||
def ds_from_dnskey(zone, flags, protocol, algorithm, *key):
|
def ds_from_dnskey(zone, flags, protocol, algorithm, *key):
|
||||||
dnspy_dnskey = DNSKEY("IN", "DNSKEY", int(flags), int(protocol), int(algorithm), b64decode(" ".join(key)))
|
dnspy_dnskey = DNSKEY(
|
||||||
|
"IN",
|
||||||
|
"DNSKEY",
|
||||||
|
int(flags),
|
||||||
|
int(protocol),
|
||||||
|
int(algorithm),
|
||||||
|
b64decode(" ".join(key)),
|
||||||
|
)
|
||||||
return make_ds(zone, dnspy_dnskey, "SHA256").to_text()
|
return make_ds(zone, dnspy_dnskey, "SHA256").to_text()
|
||||||
|
|
||||||
dnskey_path = Path("dns") / "dnssec"
|
dnskey_path = Path("dns") / "dnssec"
|
||||||
dnskeys = list()
|
dnskeys = list()
|
||||||
for f in dnskey_path.iterdir():
|
for f in dnskey_path.iterdir():
|
||||||
|
@ -199,14 +213,17 @@ def export_dnssec_dnskey():
|
||||||
zonekey["zone"] = zone
|
zonekey["zone"] = zone
|
||||||
else:
|
else:
|
||||||
assert zonekey["zone"] == zone
|
assert zonekey["zone"] == zone
|
||||||
zonekey["records"].append({
|
zonekey["records"].append(
|
||||||
|
{
|
||||||
"dnskey": " ".join(dnskey),
|
"dnskey": " ".join(dnskey),
|
||||||
"ds": ds_from_dnskey(zone, *dnskey),
|
"ds": ds_from_dnskey(zone, *dnskey),
|
||||||
})
|
}
|
||||||
|
)
|
||||||
if zonekey["zone"]:
|
if zonekey["zone"]:
|
||||||
dnskeys.append(zonekey)
|
dnskeys.append(zonekey)
|
||||||
return dnskeys
|
return dnskeys
|
||||||
|
|
||||||
|
|
||||||
def make_export(roa4, roa6):
|
def make_export(roa4, roa6):
|
||||||
def modify_entity(entity):
|
def modify_entity(entity):
|
||||||
entity["nic_hdl"] = name_to_nic_hdl(entity["name"])
|
entity["nic_hdl"] = name_to_nic_hdl(entity["name"])
|
||||||
|
@ -245,7 +262,7 @@ def make_export(roa4, roa6):
|
||||||
}
|
}
|
||||||
for owner, entity in entities.items()
|
for owner, entity in entities.items()
|
||||||
},
|
},
|
||||||
"dnssec": export_dnssec_dnskey()
|
"dnssec": export_dnssec_dnskey(),
|
||||||
}
|
}
|
||||||
return json.dumps(output, indent=2)
|
return json.dumps(output, indent=2)
|
||||||
|
|
||||||
|
@ -274,7 +291,10 @@ def make_rfc8416(roa4, roa6):
|
||||||
"bgpsecAssertions": [],
|
"bgpsecAssertions": [],
|
||||||
"prefixAssertions": [
|
"prefixAssertions": [
|
||||||
pick(
|
pick(
|
||||||
roa, ["asn", "prefix"], maxLength="maxPrefixLength", name="comment",
|
roa,
|
||||||
|
["asn", "prefix"],
|
||||||
|
maxLength="maxPrefixLength",
|
||||||
|
name="comment",
|
||||||
)
|
)
|
||||||
for roa in (*roa4, *roa6)
|
for roa in (*roa4, *roa6)
|
||||||
],
|
],
|
||||||
|
@ -382,19 +402,51 @@ def make_summary():
|
||||||
print(prefix)
|
print(prefix)
|
||||||
print("```")
|
print("```")
|
||||||
IP_VRSIONS = {4, 6}
|
IP_VRSIONS = {4, 6}
|
||||||
total_ip_count = {ver: sum([prefix.num_addresses for prefix in NEO_NETWORK_POOL if prefix.version == ver]) for ver in IP_VRSIONS}
|
total_ip_count = {
|
||||||
used_ip_count = {ver: sum([ip_network(str(prefix)).num_addresses for prefix in prefixes if prefix.version == ver]) for ver in IP_VRSIONS}
|
ver: sum(
|
||||||
|
[
|
||||||
|
prefix.num_addresses
|
||||||
|
for prefix in NEO_NETWORK_POOL
|
||||||
|
if prefix.version == ver
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for ver in IP_VRSIONS
|
||||||
|
}
|
||||||
|
used_ip_count = {
|
||||||
|
ver: sum(
|
||||||
|
[
|
||||||
|
ip_network(str(prefix)).num_addresses
|
||||||
|
for prefix in prefixes
|
||||||
|
if prefix.version == ver
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for ver in IP_VRSIONS
|
||||||
|
}
|
||||||
print()
|
print()
|
||||||
print("## Address Space Usage")
|
print("## Address Space Usage")
|
||||||
print()
|
print()
|
||||||
address_space_usage_table = tabulate(
|
address_space_usage_table = tabulate(
|
||||||
(
|
(
|
||||||
(f"IPv{ver}", f"{(t:=total_ip_count.get(ver)):.5g}", f"{(u:=used_ip_count.get(ver)):.5g}", f"{t-u:.5g}", f"{u/t*100:.2f}%", f"{(t-u)/t*100:.2f}%")
|
(
|
||||||
|
f"IPv{ver}",
|
||||||
|
f"{(t:=total_ip_count.get(ver)):.5g}",
|
||||||
|
f"{(u:=used_ip_count.get(ver)):.5g}",
|
||||||
|
f"{t-u:.5g}",
|
||||||
|
f"{u/t*100:.2f}%",
|
||||||
|
f"{(t-u)/t*100:.2f}%",
|
||||||
|
)
|
||||||
for ver in IP_VRSIONS
|
for ver in IP_VRSIONS
|
||||||
),
|
),
|
||||||
headers=["IP Version", "Total", "Used", "Free", "Percent Used", "Percent Free"],
|
headers=[
|
||||||
|
"IP Version",
|
||||||
|
"Total",
|
||||||
|
"Used",
|
||||||
|
"Free",
|
||||||
|
"Percent Used",
|
||||||
|
"Percent Free",
|
||||||
|
],
|
||||||
tablefmt="github",
|
tablefmt="github",
|
||||||
disable_numparse=True
|
disable_numparse=True,
|
||||||
)
|
)
|
||||||
print(address_space_usage_table)
|
print(address_space_usage_table)
|
||||||
return stream.getvalue()
|
return stream.getvalue()
|
||||||
|
|
|
@ -1,19 +1,20 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from pathlib import Path
|
|
||||||
import subprocess
|
import subprocess
|
||||||
from time import time
|
|
||||||
from re import match
|
|
||||||
from os import chdir
|
from os import chdir
|
||||||
|
from pathlib import Path
|
||||||
|
from re import match
|
||||||
|
from time import time
|
||||||
|
|
||||||
zone_files = [
|
zone_files = [
|
||||||
'neonetwork',
|
"neonetwork",
|
||||||
'db.10.127',
|
"db.10.127",
|
||||||
'db.fd10.127',
|
"db.fd10.127",
|
||||||
]
|
]
|
||||||
|
|
||||||
serial_base = 1586876035
|
serial_base = 1586876035
|
||||||
new_serial = int(time()) - serial_base
|
new_serial = int(time()) - serial_base
|
||||||
|
|
||||||
|
|
||||||
def update_serial_to(zone: Path, serial: int = 0) -> int:
|
def update_serial_to(zone: Path, serial: int = 0) -> int:
|
||||||
lines = zone.read_text().split("\n")
|
lines = zone.read_text().split("\n")
|
||||||
processed = list()
|
processed = list()
|
||||||
|
@ -34,6 +35,7 @@ def update_serial_to(zone: Path, serial: int = 0) -> int:
|
||||||
zone.write_text("\n".join(processed))
|
zone.write_text("\n".join(processed))
|
||||||
return old_serial
|
return old_serial
|
||||||
|
|
||||||
|
|
||||||
for zone in zone_files:
|
for zone in zone_files:
|
||||||
gen_zone = Path("generated") / "dns" / zone
|
gen_zone = Path("generated") / "dns" / zone
|
||||||
repo_zone = Path("dns") / zone
|
repo_zone = Path("dns") / zone
|
||||||
|
@ -42,7 +44,9 @@ for zone in zone_files:
|
||||||
old_serial = update_serial_to(gen_zone)
|
old_serial = update_serial_to(gen_zone)
|
||||||
update_serial_to(repo_zone, old_serial)
|
update_serial_to(repo_zone, old_serial)
|
||||||
gen_zone.write_text(repo_zone.read_text())
|
gen_zone.write_text(repo_zone.read_text())
|
||||||
p = subprocess.run(['git', 'diff', '--exit-code', gen_zone.name], cwd=gen_zone.parent)
|
p = subprocess.run(
|
||||||
|
["git", "diff", "--exit-code", gen_zone.name], cwd=gen_zone.parent
|
||||||
|
)
|
||||||
if p.returncode == 0:
|
if p.returncode == 0:
|
||||||
print(f"skip {repo_zone.name}")
|
print(f"skip {repo_zone.name}")
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in a new issue