Fixes
This commit is contained in:
parent
6d5f8781aa
commit
f85f5c6bd8
4 changed files with 341 additions and 172 deletions
201
trustor_poc.py
201
trustor_poc.py
|
|
@ -10,8 +10,8 @@ import datetime
|
|||
import requests
|
||||
from stem.control import Controller
|
||||
from stem.util.tor_tools import *
|
||||
from urllib.parse import urlparse
|
||||
from urllib3.util.retry import Retry
|
||||
# from urllib.parse import urlparse
|
||||
from urllib3.util import parse_url as urlparse
|
||||
|
||||
try:
|
||||
# unbound is not on pypi
|
||||
|
|
@ -207,8 +207,7 @@ def find_validation_candidates(controller, trusted_domains=[],validation_cache=[
|
|||
result[domain] = {prooftype : [fingerprint]}
|
||||
return result
|
||||
|
||||
def oDownloadUrl(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
|
||||
uri="https://"+domain+"/.well-known/tor-relay/rsa-fingerprint.txt"
|
||||
def oDownloadUrlRequests(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
|
||||
# socks proxy used for outbound web requests (for validation of proofs)
|
||||
proxy = {'https': 'socks5h://' +host +':' +str(port)}
|
||||
# we use this UA string when connecting to webservers to fetch rsa-fingerprint.txt proof files
|
||||
|
|
@ -229,24 +228,12 @@ def oDownloadUrl(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
|
|||
raise TrustorError(f"HTTP Content-Type != text/plain")
|
||||
if not os.path.exists(sCAfile):
|
||||
raise TrustorError(f"File not found CAfile {sCAfile}")
|
||||
try:
|
||||
from https_adapter import HTTPSAdapter
|
||||
except Exception as e:
|
||||
LOG.warn(f"Could not import HTTPSAdapter {e}")
|
||||
HTTPSAdapter = None
|
||||
raise SystemExit(f"{e}")
|
||||
else:
|
||||
LOG.info(f"Loaded HTTPSAdapter")
|
||||
|
||||
try:
|
||||
with requests.sessions.Session() as session:
|
||||
if HTTPSAdapter:
|
||||
retries = Retry(connect=3, read=2, redirect=0)
|
||||
# FixMe: upgrade to TLS1.3
|
||||
session.mount("https://", HTTPSAdapter(pool_maxsize=1,
|
||||
max_retries=retries,))
|
||||
oReqResp = session.request(method="get", url=uri,
|
||||
proxies=proxy, timeout=timeout,
|
||||
proxies=proxy,
|
||||
timeout=timeout,
|
||||
headers=headers,
|
||||
allow_redirects=False,
|
||||
verify=True
|
||||
|
|
@ -265,8 +252,182 @@ def oDownloadUrl(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
|
|||
raise TrustorError(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.url))
|
||||
return oReqResp
|
||||
|
||||
logging.getLogger("urllib3").setLevel(logging.INFO)
|
||||
#import urllib3.contrib.pyopenssl
|
||||
#urllib3.contrib.pyopenssl.inject_into_urllib3()
|
||||
|
||||
import urllib3.util
|
||||
import ipaddress
|
||||
def ballow_subdomain_matching(hostname, dnsnames):
|
||||
for elt in dnsnames:
|
||||
if len(hostname.split('.')) > len(elt.split('.')) and \
|
||||
hostname.endswith(elt):
|
||||
# parent
|
||||
return True
|
||||
return False
|
||||
|
||||
from urllib3.util.ssl_match_hostname import (CertificateError,
|
||||
match_hostname,
|
||||
_dnsname_match,
|
||||
_ipaddress_match,
|
||||
)
|
||||
def my_match_hostname(cert, hostname):
|
||||
"""Verify that *cert* (in decoded format as returned by
|
||||
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
|
||||
rules are followed, but IP addresses are not accepted for *hostname*.
|
||||
|
||||
CertificateError is raised on failure. On success, the function
|
||||
returns nothing.
|
||||
"""
|
||||
if not cert:
|
||||
raise ValueError(
|
||||
"empty or no certificate, match_hostname needs a "
|
||||
"SSL socket or SSL context with either "
|
||||
"CERT_OPTIONAL or CERT_REQUIRED"
|
||||
)
|
||||
try:
|
||||
# Divergence from upstream: ipaddress can't handle byte str
|
||||
host_ip = ipaddress.ip_address(hostname)
|
||||
except (UnicodeError, ValueError):
|
||||
# ValueError: Not an IP address (common case)
|
||||
# UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking
|
||||
# byte strings. addresses should be all ascii, so we consider it not
|
||||
# an ipaddress in this case
|
||||
host_ip = None
|
||||
except AttributeError:
|
||||
# Divergence from upstream: Make ipaddress library optional
|
||||
if ipaddress is None:
|
||||
host_ip = None
|
||||
else: # Defensive
|
||||
raise
|
||||
dnsnames = []
|
||||
san = cert.get("subjectAltName", ())
|
||||
for key, value in san:
|
||||
if key == "DNS":
|
||||
if host_ip is None and _dnsname_match(value, hostname):
|
||||
return
|
||||
dnsnames.append(value)
|
||||
elif key == "IP Address":
|
||||
if host_ip is not None and _ipaddress_match(value, host_ip):
|
||||
return
|
||||
dnsnames.append(value)
|
||||
if not dnsnames:
|
||||
# The subject is only checked when there is no dNSName entry
|
||||
# in subjectAltName
|
||||
for sub in cert.get("subject", ()):
|
||||
for key, value in sub:
|
||||
# XXX according to RFC 2818, the most specific Common Name
|
||||
# must be used.
|
||||
if key == "commonName":
|
||||
if _dnsname_match(value, hostname):
|
||||
return
|
||||
dnsnames.append(value)
|
||||
if len(dnsnames) > 1:
|
||||
# soften this to allow subdomain matching
|
||||
if ballow_subdomain_matching(hostname, dnsnames):
|
||||
LOG.warn(f"Allowing {hostname} in {dnsnames}")
|
||||
return
|
||||
raise CertificateError(
|
||||
"hostname %r "
|
||||
"doesn't match any of %s" % (hostname, ", ".join(map(repr, dnsnames)))
|
||||
)
|
||||
elif len(dnsnames) == 1:
|
||||
raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
|
||||
else:
|
||||
raise CertificateError(
|
||||
"no appropriate commonName or subjectAltName fields were found"
|
||||
)
|
||||
match_hostname = my_match_hostname
|
||||
from urllib3.util.ssl_ import (
|
||||
is_ipaddress,
|
||||
)
|
||||
def _my_match_hostname(cert, asserted_hostname):
|
||||
# Our upstream implementation of ssl.match_hostname()
|
||||
# only applies this normalization to IP addresses so it doesn't
|
||||
# match DNS SANs so we do the same thing!
|
||||
stripped_hostname = asserted_hostname.strip("u[]")
|
||||
if is_ipaddress(stripped_hostname):
|
||||
asserted_hostname = stripped_hostname
|
||||
try:
|
||||
my_match_hostname(cert, asserted_hostname)
|
||||
except CertificateError as e:
|
||||
log.warning(
|
||||
"Certificate did not match hostname: %s. Certificate: %s",
|
||||
asserted_hostname,
|
||||
cert,
|
||||
)
|
||||
# Add cert to exception and reraise so client code can inspect
|
||||
# the cert when catching the exception, if they want to
|
||||
e._peer_cert = cert
|
||||
raise
|
||||
from urllib3.connection import _match_hostname, HTTPSConnection
|
||||
urllib3.connection._match_hostname = _my_match_hostname
|
||||
|
||||
from urllib3.contrib.socks import SOCKSProxyManager
|
||||
from urllib3 import Retry
|
||||
def oDownloadUrlUrllib3(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
|
||||
"""Theres no need to use requests here and it
|
||||
adds too many layers on the SSL to be able to get at things
|
||||
"""
|
||||
domain = urlparse(uri).netloc
|
||||
# socks proxy used for outbound web requests (for validation of proofs)
|
||||
proxy = SOCKSProxyManager(f'socks5h://{host}:{port}/',
|
||||
num_pools=1,
|
||||
timeout=timeout,
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
assert_hostname=domain,
|
||||
ca_certs=sCAfile)
|
||||
|
||||
# we use this UA string when connecting to webservers to fetch rsa-fingerprint.txt proof files
|
||||
# https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/#uri-rsa
|
||||
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0'}
|
||||
|
||||
LOG.debug("fetching %s...." % uri)
|
||||
try:
|
||||
# grr. fix urllib3
|
||||
# Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
|
||||
# retries are disabled, in which case the causing exception will be raised.
|
||||
head = proxy.request('HEAD', uri,
|
||||
headers=headers,
|
||||
redirect=False,
|
||||
retries=False)
|
||||
except Exception as e:
|
||||
LOG.error(f"HTTP HEAD request failed for {uri} {e}")
|
||||
raise
|
||||
|
||||
if head.status >= 300:
|
||||
raise TrustorError(f"HTTP Errorcode {head.status}")
|
||||
if not head.headers['Content-Type'].startswith('text/plain'):
|
||||
raise TrustorError(f"HTTP Content-Type != text/plain")
|
||||
if not os.path.exists(sCAfile):
|
||||
raise TrustorError(f"File not found CAfile {sCAfile}")
|
||||
|
||||
try:
|
||||
oReqResp = proxy.request("GET", uri,
|
||||
headers=headers,
|
||||
redirect=False,
|
||||
)
|
||||
except Exception as e:
|
||||
LOG.warn(f"HTTP GET request failed for {uri} {e}")
|
||||
raise
|
||||
if oReqResp.status != 200:
|
||||
raise TrustorError(f"HTTP Errorcode {head.status}")
|
||||
if not oReqResp.headers['Content-Type'].startswith('text/plain'):
|
||||
raise TrustorError(f"HTTP Content-Type != text/plain")
|
||||
|
||||
#check for redirects (not allowed as per spec)
|
||||
if oReqResp.geturl() != uri:
|
||||
LOG.error(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl()))
|
||||
raise TrustorError(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl()))
|
||||
oReqResp.decode_content = True
|
||||
|
||||
return oReqResp
|
||||
import urllib3.connectionpool
|
||||
urllib3.connectionpool.VerifiedHTTPSConnection = HTTPSConnection
|
||||
|
||||
def lDownloadUrlFps(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
|
||||
o = oDownloadUrl(domain, sCAfile, timeout=timeout, host=host, port=port)
|
||||
uri="https://"+domain+"/.well-known/tor-relay/rsa-fingerprint.txt"
|
||||
o = oDownloadUrlRequests(uri, sCAfile, timeout=timeout, host=host, port=port)
|
||||
well_known_content = o.text.upper().strip().split('\n')
|
||||
well_known_content = [i for i in well_known_content if i and len(i) == 40]
|
||||
return well_known_content
|
||||
|
|
@ -365,6 +526,8 @@ def configure_tor(controller, trusted_fingerprints, exitonly=True):
|
|||
LOG.exception('Failed to set ExitNodes tor config to trusted relays')
|
||||
sys.exit(20)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
trust_config = 'trust_config'
|
||||
assert os.path.exists(trust_config)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue