mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2024-12-30 09:03:24 -05:00
81 lines
2.4 KiB
Python
81 lines
2.4 KiB
Python
|
#!/usr/bin/env python3
|
||
|
#
|
||
|
# This script generates a global equivalent domains JSON file from
|
||
|
# the upstream Bitwarden source repo.
|
||
|
#
|
||
|
import json
|
||
|
import re
|
||
|
import sys
|
||
|
import urllib.request
|
||
|
|
||
|
from collections import OrderedDict
|
||
|
|
||
|
if len(sys.argv) != 2:
|
||
|
print("usage: %s <OUTPUT-FILE>" % sys.argv[0])
|
||
|
print()
|
||
|
print("This script generates a global equivalent domains JSON file from")
|
||
|
print("the upstream Bitwarden source repo.")
|
||
|
sys.exit(1)
|
||
|
|
||
|
OUTPUT_FILE = sys.argv[1]
|
||
|
|
||
|
BASE_URL = 'https://github.com/bitwarden/server/raw/master'
|
||
|
ENUMS_URL = '%s/src/Core/Enums/GlobalEquivalentDomainsType.cs' % BASE_URL
|
||
|
DOMAIN_LISTS_URL = '%s/src/Core/Utilities/StaticStore.cs' % BASE_URL
|
||
|
|
||
|
# Enum lines look like:
|
||
|
#
|
||
|
# EnumName0 = 0,
|
||
|
# EnumName1 = 1,
|
||
|
#
|
||
|
ENUM_RE = re.compile(
|
||
|
r'\s*' # Leading whitespace (optional).
|
||
|
r'([_0-9a-zA-Z]+)' # Enum name (capture group 1).
|
||
|
r'\s*=\s*' # '=' with optional surrounding whitespace.
|
||
|
r'([0-9]+)' # Enum value (capture group 2).
|
||
|
)
|
||
|
|
||
|
# Global domains lines look like:
|
||
|
#
|
||
|
# GlobalDomains.Add(GlobalEquivalentDomainsType.EnumName, new List<string> { "x.com", "y.com" });
|
||
|
#
|
||
|
DOMAIN_LIST_RE = re.compile(
|
||
|
r'\s*' # Leading whitespace (optional).
|
||
|
r'GlobalDomains\.Add\(GlobalEquivalentDomainsType\.'
|
||
|
r'([_0-9a-zA-Z]+)' # Enum name (capture group 1).
|
||
|
r'\s*,\s*new List<string>\s*{'
|
||
|
r'([^}]+)' # Domain list (capture group 2).
|
||
|
r'}\);'
|
||
|
)
|
||
|
|
||
|
enums = dict()
|
||
|
domain_lists = OrderedDict()
|
||
|
|
||
|
# Read in the enum names and values.
|
||
|
with urllib.request.urlopen(ENUMS_URL) as response:
|
||
|
for ln in response.read().decode('utf-8').split('\n'):
|
||
|
m = ENUM_RE.match(ln)
|
||
|
if m:
|
||
|
enums[m.group(1)] = int(m.group(2))
|
||
|
|
||
|
# Read in the domain lists.
|
||
|
with urllib.request.urlopen(DOMAIN_LISTS_URL) as response:
|
||
|
for ln in response.read().decode('utf-8').split('\n'):
|
||
|
m = DOMAIN_LIST_RE.match(ln)
|
||
|
if m:
|
||
|
# Strip double quotes and extraneous spaces in each domain.
|
||
|
domain_lists[m.group(1)] = [d.strip(' "') for d in m.group(2).split(",")]
|
||
|
|
||
|
# Build the global domains data structure.
|
||
|
global_domains = []
|
||
|
for name, domain_list in domain_lists.items():
|
||
|
entry = OrderedDict()
|
||
|
entry["Type"] = enums[name]
|
||
|
entry["Domains"] = domain_list
|
||
|
entry["Excluded"] = False
|
||
|
global_domains.append(entry)
|
||
|
|
||
|
# Write out the global domains JSON file.
|
||
|
with open(OUTPUT_FILE, 'w') as f:
|
||
|
json.dump(global_domains, f, indent=2)
|