mirror of
https://github.com/GAM-team/GAM.git
synced 2025-07-06 12:43:35 +00:00
switch to modern urllib3 via a shim
This commit is contained in:
@ -108,6 +108,9 @@ from filelock import FileLock
|
||||
|
||||
from pathvalidate import sanitize_filename, sanitize_filepath
|
||||
|
||||
import httplib2shim
|
||||
httplib2shim.patch()
|
||||
|
||||
import googleapiclient
|
||||
import googleapiclient.discovery
|
||||
import googleapiclient.errors
|
||||
@ -120,6 +123,7 @@ import google.oauth2.service_account
|
||||
import google_auth_oauthlib.flow
|
||||
import google_auth_httplib2
|
||||
import httplib2
|
||||
import urllib3.exceptions
|
||||
|
||||
httplib2.RETRIES = 5
|
||||
|
||||
@ -2401,6 +2405,7 @@ def entityDoesNotExistWarning(entityType, entityName, i=0, count=0):
|
||||
|
||||
def entityUnknownWarning(entityType, entityName, i=0, count=0):
|
||||
domain = getEmailAddressDomain(entityName)
|
||||
|
||||
if (domain.endswith(GC.Values[GC.DOMAIN])) or (domain.endswith('google.com')):
|
||||
entityDoesNotExistWarning(entityType, entityName, i, count)
|
||||
else:
|
||||
@ -8870,8 +8875,11 @@ def _getServerTLSUsed(location):
|
||||
retries = 5
|
||||
for n in range(1, retries+1):
|
||||
try:
|
||||
httpObj.request(url, headers={'user-agent': GAM_USER_AGENT})
|
||||
cipher_name, tls_ver, _ = httpObj.connections[conn].sock.cipher()
|
||||
resp = httpObj.pool.request('GET',
|
||||
url,
|
||||
headers={'user-agent': GAM_USER_AGENT},
|
||||
preload_content=False)
|
||||
cipher_name, tls_ver, _ = resp.connection.sock.cipher()
|
||||
return tls_ver, cipher_name
|
||||
except (httplib2.HttpLib2Error, RuntimeError) as e:
|
||||
if n != retries:
|
||||
@ -8921,11 +8929,17 @@ def getOSPlatform():
|
||||
|
||||
# gam checkconnection
|
||||
def doCheckConnection():
|
||||
hosts = ['api.github.com', 'raw.githubusercontent.com',
|
||||
'accounts.google.com', 'oauth2.googleapis.com', 'www.googleapis.com']
|
||||
hosts = ['api.github.com',
|
||||
'raw.githubusercontent.com',
|
||||
'accounts.google.com',
|
||||
'oauth2.googleapis.com',
|
||||
'www.googleapis.com']
|
||||
fix_hosts = {'calendar-json.googleapis.com': 'www.googleapis.com',
|
||||
'storage-api.googleapis.com': 'storage.googleapis.com'}
|
||||
api_hosts = ['apps-apis.google.com', 'sites.google.com', 'versionhistory.googleapis.com', 'www.google.com']
|
||||
api_hosts = ['apps-apis.google.com',
|
||||
'sites.google.com',
|
||||
'versionhistory.googleapis.com',
|
||||
'www.google.com']
|
||||
for host in API.PROJECT_APIS:
|
||||
host = fix_hosts.get(host, host)
|
||||
if host not in api_hosts and host not in hosts:
|
||||
@ -8941,13 +8955,27 @@ def doCheckConnection():
|
||||
success_count = 0
|
||||
for host in hosts:
|
||||
try_count += 1
|
||||
ip = socket.getaddrinfo(host, None)[0][-1][0] # works with ipv6
|
||||
dns_err = None
|
||||
ip = 'unknown'
|
||||
try:
|
||||
ip = socket.getaddrinfo(host, None)[0][-1][0] # works with ipv6
|
||||
except socket.gaierror as err:
|
||||
dns_err = f'{not_okay}\n DNS failure: {err}\n'
|
||||
except Exception as e:
|
||||
dns_err = f'{not_okay}\n Unknown DNS failure: {err}\n'
|
||||
check_line = f'Checking {host} ({ip}) ({try_count}/{host_count})...'
|
||||
writeStdout(f'{check_line:<100}')
|
||||
flushStdout()
|
||||
if dns_err:
|
||||
writeStdout(dns_err)
|
||||
continue
|
||||
gen_firewall = 'You probably have security software or a firewall on your machine or network that is preventing GAM from making Internet connections. Check your network configuration or try running GAM on a hotspot or home network to see if the problem exists only on your organization\'s network.'
|
||||
try:
|
||||
httpObj.request(f'https://{host}/', 'HEAD', headers=headers)
|
||||
if host.startswith('http'):
|
||||
url = host
|
||||
else:
|
||||
url = f'https://{host}:443/'
|
||||
httpObj.request(url, 'HEAD', headers=headers)
|
||||
success_count += 1
|
||||
writeStdout(f'{okay}\n')
|
||||
except ConnectionRefusedError:
|
||||
@ -8956,15 +8984,16 @@ def doCheckConnection():
|
||||
writeStdout(f'{not_okay}\n Connection reset by peer. {gen_firewall}\n')
|
||||
except httplib2.error.ServerNotFoundError:
|
||||
writeStdout(f'{not_okay}\n Failed to find server. Your DNS is probably misconfigured.\n')
|
||||
except ssl.SSLError as e:
|
||||
if e.reason == 'SSLV3_ALERT_HANDSHAKE_FAILURE':
|
||||
except ssl.SSLError as err:
|
||||
err_type = type(err.args[0])
|
||||
if err_type == ssl.SSLError:
|
||||
writeStdout(f'{not_okay}\n GAM expects to connect with TLS 1.3 or newer and that failed. If your firewall / proxy server is not compatible with TLS 1.3 then you can tell GAM to allow TLS 1.2 by setting tls_min_version = TLSv1.2 in gam.cfg.\n')
|
||||
elif e.reason == 'CERTIFICATE_VERIFY_FAILED':
|
||||
elif err_type == ssl.SSLCertVerificationError:
|
||||
writeStdout(f'{not_okay}\n Certificate verification failed. If you are behind a firewall / proxy server that does TLS / SSL inspection you may need to point GAM at your certificate authority file by setting cacerts_pem = /path/to/your/certauth.pem in gam.cfg.\n')
|
||||
elif e.strerror.startswith('TLS/SSL connection has been closed\n'):
|
||||
elif err.strerror and err.strerror.startswith('TLS/SSL connection has been closed\n'):
|
||||
writeStdout(f'{not_okay}\n TLS connection was closed. {gen_firewall}\n')
|
||||
else:
|
||||
writeStdout(f'{not_okay}\n {str(e)}\n')
|
||||
writeStdout(f'{not_okay}\n {str(err)}\n')
|
||||
except TimeoutError:
|
||||
writeStdout(f'{not_okay}\n Timed out trying to connect to host\n')
|
||||
except Exception as e:
|
||||
|
@ -29,5 +29,6 @@ GAM_VER_LIBS = ['cryptography',
|
||||
'httplib2',
|
||||
'passlib',
|
||||
'python-dateutil',
|
||||
'urllib3',
|
||||
'yubikey-manager',
|
||||
]
|
||||
|
265
src/httplib2shim/__init__.py
Normal file
265
src/httplib2shim/__init__.py
Normal file
@ -0,0 +1,265 @@
|
||||
# Copyright (c) 2006 by Joe Gregorio, Google Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation
|
||||
# files (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
try:
|
||||
from collections.abc import Callable
|
||||
except ImportError:
|
||||
from collections import Callable
|
||||
|
||||
import errno
|
||||
import http.client
|
||||
import socket
|
||||
import ssl
|
||||
import warnings
|
||||
|
||||
import certifi
|
||||
import httplib2
|
||||
import urllib3
|
||||
|
||||
|
||||
def _default_make_pool(http, proxy_info, tls_maximum_version=None, tls_minimum_version=None):
|
||||
"""Creates a urllib3.PoolManager object that has SSL verification enabled
|
||||
and uses the certifi certificates."""
|
||||
|
||||
if not http.ca_certs:
|
||||
http.ca_certs = _certifi_where_for_ssl_version()
|
||||
|
||||
ssl_disabled = http.disable_ssl_certificate_validation
|
||||
|
||||
cert_reqs = 'CERT_REQUIRED' if http.ca_certs and not ssl_disabled else None
|
||||
|
||||
ssl_minimum_version = ssl.TLSVersion[tls_minimum_version] if tls_minimum_version else None
|
||||
ssl_maximum_version = ssl.TLSVersion[tls_maximum_version] if tls_maximum_version else None
|
||||
|
||||
if isinstance(proxy_info, Callable):
|
||||
proxy_info = proxy_info()
|
||||
if proxy_info:
|
||||
if proxy_info.proxy_user and proxy_info.proxy_pass:
|
||||
proxy_url = 'http://{}:{}@{}:{}/'.format(
|
||||
proxy_info.proxy_user, proxy_info.proxy_pass,
|
||||
proxy_info.proxy_host, proxy_info.proxy_port,
|
||||
)
|
||||
proxy_headers = urllib3.util.request.make_headers(
|
||||
proxy_basic_auth='{}:{}'.format(
|
||||
proxy_info.proxy_user, proxy_info.proxy_pass,
|
||||
)
|
||||
)
|
||||
else:
|
||||
proxy_url = 'http://{}:{}/'.format(
|
||||
proxy_info.proxy_host, proxy_info.proxy_port,
|
||||
)
|
||||
proxy_headers = {}
|
||||
|
||||
return urllib3.ProxyManager(
|
||||
proxy_url=proxy_url,
|
||||
proxy_headers=proxy_headers,
|
||||
ca_certs=http.ca_certs,
|
||||
cert_reqs=cert_reqs,
|
||||
ssl_minimum_version=ssl_minimum_version,
|
||||
ssl_maximum_version=ssl_maximum_version,
|
||||
)
|
||||
return urllib3.PoolManager(
|
||||
ca_certs=http.ca_certs,
|
||||
cert_reqs=cert_reqs,
|
||||
ssl_minimum_version=ssl_minimum_version,
|
||||
ssl_maximum_version=ssl_maximum_version,
|
||||
)
|
||||
|
||||
|
||||
def patch(make_pool=_default_make_pool):
|
||||
"""Monkey-patches httplib2.Http to be httplib2shim.Http.
|
||||
|
||||
This effectively makes all clients of httplib2 use urlilb3. It's preferable
|
||||
to specify httplib2shim.Http explicitly where you can, but this can be
|
||||
useful in situations where you do not control the construction of the http
|
||||
object.
|
||||
|
||||
Args:
|
||||
make_pool: A function that returns a urllib3.Pool-like object. This
|
||||
allows you to specify special arguments to your connection pool if
|
||||
needed. By default, this will create a urllib3.PoolManager with
|
||||
SSL verification enabled using the certifi certificates.
|
||||
"""
|
||||
setattr(httplib2, '_HttpOriginal', httplib2.Http)
|
||||
httplib2.Http = Http
|
||||
Http._make_pool = make_pool
|
||||
|
||||
|
||||
class Http(httplib2.Http):
|
||||
"""A httplib2.Http subclass that uses urllib3 to perform requests.
|
||||
|
||||
This allows full thread safety, connection pooling, and proper SSL
|
||||
verification support.
|
||||
"""
|
||||
_make_pool = _default_make_pool
|
||||
|
||||
def __init__(self, cache=None, timeout=None,
|
||||
proxy_info=httplib2.proxy_info_from_environment,
|
||||
ca_certs=None, disable_ssl_certificate_validation=False,
|
||||
pool=None, tls_maximum_version=None, tls_minimum_version=None):
|
||||
disable_ssl = disable_ssl_certificate_validation
|
||||
|
||||
super(Http, self).__init__(
|
||||
cache=cache,
|
||||
timeout=timeout,
|
||||
proxy_info=proxy_info,
|
||||
ca_certs=ca_certs,
|
||||
disable_ssl_certificate_validation=disable_ssl,
|
||||
tls_maximum_version=tls_maximum_version,
|
||||
tls_minimum_version=tls_minimum_version)
|
||||
|
||||
if not pool:
|
||||
pool = self._make_pool(proxy_info=proxy_info,
|
||||
tls_maximum_version=tls_maximum_version,
|
||||
tls_minimum_version=tls_minimum_version)
|
||||
|
||||
self.pool = pool
|
||||
|
||||
if httplib2.debuglevel:
|
||||
http.client.HTTPConnection.debuglevel = 5
|
||||
|
||||
|
||||
def _conn_request(self, conn, request_uri, method, body, headers):
|
||||
# Reconstruct the full uri from the connection object.
|
||||
if isinstance(conn, httplib2.HTTPSConnectionWithTimeout):
|
||||
scheme = 'https'
|
||||
else:
|
||||
scheme = 'http'
|
||||
|
||||
host = conn.host
|
||||
|
||||
# Reformat IPv6 hosts.
|
||||
if _is_ipv6(host):
|
||||
host = '[{}]'.format(host)
|
||||
|
||||
full_uri = '{}://{}:{}{}'.format(
|
||||
scheme, host, conn.port, request_uri)
|
||||
|
||||
decode = True if method != 'HEAD' else False
|
||||
|
||||
try:
|
||||
urllib3_response = self.pool.request(
|
||||
method,
|
||||
full_uri,
|
||||
body=body,
|
||||
headers=headers,
|
||||
redirect=False,
|
||||
retries=urllib3.Retry(total=False, redirect=0),
|
||||
timeout=urllib3.Timeout(total=self.timeout),
|
||||
decode_content=decode)
|
||||
|
||||
response = _map_response(urllib3_response, decode=decode)
|
||||
content = urllib3_response.data
|
||||
|
||||
except Exception as e:
|
||||
raise _map_exception(e)
|
||||
|
||||
return response, content
|
||||
|
||||
def add_certificate(self, *args, **kwargs):
|
||||
warnings.warn('httplib2shim does not support add_certificate.')
|
||||
return super(Http, self).add_certificate(*args, **kwargs)
|
||||
|
||||
def __getstate__(self):
|
||||
dict = super(Http, self).__getstate__()
|
||||
del dict['pool']
|
||||
return dict
|
||||
|
||||
def __setstate__(self, dict):
|
||||
super(Http, self).__setstate__(dict)
|
||||
self.pool = self._make_pool(proxy_info=self.proxy_info())
|
||||
|
||||
|
||||
def _is_ipv6(addr):
|
||||
"""Checks if a given address is an IPv6 address."""
|
||||
try:
|
||||
socket.inet_pton(socket.AF_INET6, addr)
|
||||
return True
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
|
||||
def _certifi_where_for_ssl_version():
|
||||
"""Gets the right location for certifi certifications for the current SSL
|
||||
version.
|
||||
|
||||
Older versions of SSL don't support the stronger set of root certificates.
|
||||
"""
|
||||
if not ssl:
|
||||
return
|
||||
|
||||
if ssl.OPENSSL_VERSION_INFO < (1, 0, 2):
|
||||
warnings.warn(
|
||||
'You are using an outdated version of OpenSSL that '
|
||||
'can\'t use stronger root certificates.')
|
||||
return certifi.old_where()
|
||||
|
||||
return certifi.where()
|
||||
|
||||
|
||||
def _map_response(response, decode=False):
|
||||
"""Maps a urllib3 response to a httplib/httplib2 Response."""
|
||||
# This causes weird deepcopy errors, so it's commented out for now.
|
||||
# item._urllib3_response = response
|
||||
item = httplib2.Response(response.getheaders())
|
||||
item.status = response.status
|
||||
item['status'] = str(item.status)
|
||||
item.reason = response.reason
|
||||
item.version = response.version
|
||||
|
||||
# httplib2 expects the content-encoding header to be stripped and the
|
||||
# content length to be the length of the uncompressed content.
|
||||
# This does not occur for 'HEAD' requests.
|
||||
if decode and item.get('content-encoding') in ['gzip', 'deflate']:
|
||||
item['content-length'] = str(len(response.data))
|
||||
item['-content-encoding'] = item.pop('content-encoding')
|
||||
|
||||
return item
|
||||
|
||||
|
||||
def _map_exception(e):
|
||||
"""Maps an exception from urlib3 to httplib2."""
|
||||
if isinstance(e, urllib3.exceptions.MaxRetryError):
|
||||
if not e.reason:
|
||||
return e
|
||||
e = e.reason
|
||||
message = e.args[0] if e.args else ''
|
||||
if isinstance(e, urllib3.exceptions.ResponseError):
|
||||
if 'too many redirects' in message:
|
||||
return httplib2.RedirectLimit(message)
|
||||
if isinstance(e, urllib3.exceptions.NewConnectionError):
|
||||
if ('Name or service not known' in message or
|
||||
'nodename nor servname provided, or not known' in message):
|
||||
return httplib2.ServerNotFoundError(
|
||||
'Unable to find hostname.')
|
||||
if 'Connection refused' in message:
|
||||
return socket.error((errno.ECONNREFUSED, 'Connection refused'))
|
||||
if isinstance(e, urllib3.exceptions.DecodeError):
|
||||
return httplib2.FailedToDecompressContent(
|
||||
'Content purported as compressed but not uncompressable.',
|
||||
httplib2.Response({'status': 500}), '')
|
||||
if isinstance(e, urllib3.exceptions.TimeoutError):
|
||||
return socket.timeout('timed out')
|
||||
if isinstance(e, urllib3.exceptions.SSLError):
|
||||
return ssl.SSLError(*e.args)
|
||||
|
||||
return e
|
0
src/httplib2shim/test/__init__.py
Normal file
0
src/httplib2shim/test/__init__.py
Normal file
1899
src/httplib2shim/test/httplib2_test.py
Normal file
1899
src/httplib2shim/test/httplib2_test.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user