+ h = rs.get_http_headers() if rs else None
+ ct = h.get_content_type()
+
+ if self.verbose:
+ print('[PAGE ] Finished loading page %s' % uri, file=stderr)
+
+ # convert to normal dict
+ d = {}
+ h.foreach(lambda k, v: setitem(d, k.lower(), v))
+ # filter to interesting headers
+ fd = {name: v for name, v in d.items() if name.startswith('saml-') or name in COOKIE_FIELDS}
+
+ if fd:
+ if self.verbose:
+ print("[SAML ] Got SAML result headers: %r" % fd, file=stderr)
+ if self.verbose > 1:
+ # display everything we found
+ mr.get_data(None, self.log_resource_text, ct[0], ct.params.get('charset'), d)
+ self.saml_result.update(fd, server=urlparse(uri).netloc)
+ self.check_done()
+
+ if not self.success:
+ if self.verbose > 1:
+ print("[SAML ] No headers in response, searching body for xml comments", file=stderr)
+ # asynchronous call to fetch body content, continue processing in callback:
+ mr.get_data(None, self.response_callback, ct)
+
+ def response_callback(self, resource, result, ct):
+ data = resource.get_data_finish(result)
+ content = data.decode(ct.params.get("charset") or "utf-8")
+
+ html_parser = CommentHtmlParser()
+ html_parser.feed(content)
+
+ fd = {}
+ for comment in html_parser.comments:
+ if self.verbose > 1:
+ print("[SAML ] Found comment in response body: '%s'" % comment, file=stderr)
+ try:
+ # xml parser requires valid xml with a single root tag, but our expected content
+ # is just a list of data tags, so we need to improvise
+ xmlroot = ET.fromstring("<fakexmlroot>%s</fakexmlroot>" % comment)
+ # search for any valid first level xml tags (inside our fake root) that could contain SAML data
+ for elem in xmlroot:
+ if elem.tag.startswith("saml-") or elem.tag in COOKIE_FIELDS:
+ fd[elem.tag] = elem.text
+ except ET.ParseError:
+ pass # silently ignore any comments that don't contain valid xml
+
+ if self.verbose > 1:
+ print("[SAML ] Finished parsing response body for %s" % resource.get_uri(), file=stderr)
+ if fd:
+ if self.verbose:
+ print("[SAML ] Got SAML result tags: %s" % fd, file=stderr)
+ self.saml_result.update(fd, server=urlparse(resource.get_uri()).netloc)
+
+ if not self.check_done():
+ # Work around timing/race condition by retrying check_done after 1 second
+ GLib.timeout_add(1000, self.check_done)
+
+ def check_done(self):
+ d = self.saml_result
+ if 'saml-username' in d and ('prelogin-cookie' in d or 'portal-userauthcookie' in d):
+ if self.verbose:
+ print("[SAML ] Got all required SAML headers, done.", file=stderr)
+ self.success = True
+ Gtk.main_quit()
+ return True
+
+
+class TLSAdapter(requests.adapters.HTTPAdapter):
+ '''Adapt to older TLS stacks that would raise errors otherwise.
+
+ We try to work around different issues:
+ * Enable weak ciphers such as 3DES or RC4, that have been disabled by default
+ in OpenSSL 3.0 or recent Linux distributions.
+ * Enable weak Diffie-Hellman key exchange sizes.
+ * Enable unsafe legacy renegotiation for servers without RFC 5746 support.
+
+ See Also
+ --------
+ https://github.com/psf/requests/issues/4775#issuecomment-478198879
+
+ Notes
+ -----
+ Python is missing an ssl.OP_LEGACY_SERVER_CONNECT constant.
+ We have extracted the relevant value from <openssl/ssl.h>.
+
+ '''
+ def init_poolmanager(self, connections, maxsize, block=False):
+ ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ ssl_context.set_ciphers('DEFAULT:@SECLEVEL=1')
+ ssl_context.options |= 1<<2 # OP_LEGACY_SERVER_CONNECT
+ self.poolmanager = urllib3.PoolManager(
+ num_pools=connections,
+ maxsize=maxsize,
+ block=block,
+ ssl_context=ssl_context)