aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--manual tests/11 wrap imposter/meson.build8
-rw-r--r--manual tests/11 wrap imposter/subprojects/zlib.wrap10
-rw-r--r--manual tests/12 wrap mirror/meson.build4
-rw-r--r--manual tests/12 wrap mirror/subprojects/zlib.wrap10
-rw-r--r--mesonbuild/wrap/wrap.py44
5 files changed, 58 insertions, 18 deletions
diff --git a/manual tests/11 wrap imposter/meson.build b/manual tests/11 wrap imposter/meson.build
new file mode 100644
index 0000000..d0575ac
--- /dev/null
+++ b/manual tests/11 wrap imposter/meson.build
@@ -0,0 +1,8 @@
+project('evil URL')
+# showing that new Meson wrap.py code tries to stop imposter WrapDB URLs
+# a WrapException is raised.
+#
+# ERROR: https://wrapdb.mesonbuild.com.invalid/v1/projects/zlib/1.2.11/4/get_zip may be a WrapDB-impersonating URL
+#
+
+subproject('zlib') \ No newline at end of file
diff --git a/manual tests/11 wrap imposter/subprojects/zlib.wrap b/manual tests/11 wrap imposter/subprojects/zlib.wrap
new file mode 100644
index 0000000..b88f8f2
--- /dev/null
+++ b/manual tests/11 wrap imposter/subprojects/zlib.wrap
@@ -0,0 +1,10 @@
+[wrap-file]
+directory = zlib-1.2.8
+
+source_url = https://zlib.net/zlib-1.2.11.tar.gz
+source_filename = zlib-1.2.11.tar.gz
+source_hash = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1
+
+patch_url = https://wrapdb.mesonbuild.com.invalid/v1/projects/zlib/1.2.11/4/get_zip
+patch_filename = zlib-1.2.11-4-wrap.zip
+patch_hash = 886b67480dbe73b406ad83a1dd6d9596f93089d90c220ccfc91944c95f1c68c4 \ No newline at end of file
diff --git a/manual tests/12 wrap mirror/meson.build b/manual tests/12 wrap mirror/meson.build
new file mode 100644
index 0000000..6645bdf
--- /dev/null
+++ b/manual tests/12 wrap mirror/meson.build
@@ -0,0 +1,4 @@
+project('downloader')
+# this test will timeout, showing that a subdomain isn't caught as masquarading url
+
+subproject('zlib')
diff --git a/manual tests/12 wrap mirror/subprojects/zlib.wrap b/manual tests/12 wrap mirror/subprojects/zlib.wrap
new file mode 100644
index 0000000..de0b9ad
--- /dev/null
+++ b/manual tests/12 wrap mirror/subprojects/zlib.wrap
@@ -0,0 +1,10 @@
+[wrap-file]
+directory = zlib-1.2.8
+
+source_url = https://zlib.net/zlib-1.2.11.tar.gz
+source_filename = zlib-1.2.11.tar.gz
+source_hash = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1
+
+patch_url = https://mirror1.wrapdb.mesonbuild.com/v1/projects/zlib/1.2.11/4/get_zip
+patch_filename = zlib-1.2.11-4-wrap.zip
+patch_hash = 886b67480dbe73b406ad83a1dd6d9596f93089d90c220ccfc91944c95f1c68c4 \ No newline at end of file
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index a95cfd2..915640b 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -45,9 +45,7 @@ except ImportError:
req_timeout = 600.0
ssl_warning_printed = False
-whitelist_domain = 'https://wrapdb.mesonbuild.com/'
-whitelist_domain_nossl = 'http://wrapdb.mesonbuild.com/'
-masquerade_str = 'wrapdb.mesonbuild.com'
+whitelist_subdomain = 'wrapdb.mesonbuild.com'
def quiet_git(cmd: typing.List[str], workingdir: str) -> typing.Tuple[bool, str]:
@@ -60,26 +58,34 @@ def quiet_git(cmd: typing.List[str], workingdir: str) -> typing.Tuple[bool, str]
return False, pc.stderr
return True, pc.stdout
+def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
+ """ raises WrapException if not whitelisted subdomain """
+ url = urllib.parse.urlparse(urlstr)
+ if not url.hostname:
+ raise WrapException('{} is not a valid URL'.format(urlstr))
+ if not url.hostname.endswith(whitelist_subdomain):
+ raise WrapException('{} is not a whitelisted WrapDB URL'.format(urlstr))
+ if has_ssl and not url.scheme == 'https':
+ raise WrapException('WrapDB did not have expected SSL https url, instead got {}'.format(urlstr))
+ return url
+
def open_wrapdburl(urlstring: str) -> 'http.client.HTTPResponse':
global ssl_warning_printed
+ url = whitelist_wrapdb(urlstring)
if has_ssl:
- if not urlstring.startswith(whitelist_domain):
- raise WrapException('{} is not a whitelisted URL'.format(urlstring))
try:
- return urllib.request.urlopen(urlstring, timeout=req_timeout)
+ return urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=req_timeout)
except urllib.error.URLError as excp:
raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp))
# following code is only for those without Python SSL
- nossl_urlstring = urlstring.replace('https://', 'http://')
- if not nossl_urlstring.startswith(whitelist_domain_nossl):
- raise WrapException('{} is not a whitelisted URL'.format(nossl_urlstring))
+ nossl_url = url._replace(scheme='http')
if not ssl_warning_printed:
mlog.warning('SSL module not available in {}: WrapDB traffic not authenticated.'.format(sys.executable))
ssl_warning_printed = True
try:
- return urllib.request.urlopen(nossl_urlstring, timeout=req_timeout)
+ return urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=req_timeout)
except urllib.error.URLError as excp:
raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp))
@@ -320,20 +326,22 @@ class Resolver:
subprocess.check_call([svn, 'checkout', '-r', revno, self.wrap.get('url'),
self.directory], cwd=self.subdir_root)
- def get_data(self, url: str) -> typing.Tuple[str, str]:
+ def get_data(self, urlstring: str) -> typing.Tuple[str, str]:
blocksize = 10 * 1024
h = hashlib.sha256()
tmpfile = tempfile.NamedTemporaryFile(mode='wb', dir=self.cachedir, delete=False)
- hostname = urllib.parse.urlparse(url).hostname
- if hostname == 'wrapdb.mesonbuild.com' or hostname.endswith('.wrapdb.mesonbuild.com'):
- resp = open_wrapdburl(url)
- elif masquerade_str in url:
- raise WrapException('{} may be a WrapDB-impersonating URL'.format(url))
+ url = urllib.parse.urlparse(urlstring)
+ if not url.hostname:
+ raise WrapException('{} is not a valid URL'.format(urlstring))
+ if url.hostname.endswith(whitelist_subdomain):
+ resp = open_wrapdburl(urlstring)
+ elif whitelist_subdomain in urlstring:
+ raise WrapException('{} may be a WrapDB-impersonating URL'.format(urlstring))
else:
try:
- resp = urllib.request.urlopen(url, timeout=req_timeout)
+ resp = urllib.request.urlopen(urlstring, timeout=req_timeout)
except urllib.error.URLError:
- raise WrapException('could not get {} is the internet available?'.format(url))
+ raise WrapException('could not get {} is the internet available?'.format(urlstring))
with contextlib.closing(resp) as resp:
try:
dlsize = int(resp.info()['Content-Length'])