mirror of
https://github.com/GAM-team/GAM.git
synced 2025-05-12 12:17:20 +00:00
googleapiclient 1.4 and oauth2client 1.4.7 upgrades
This commit is contained in:
parent
a2e8d17a69
commit
db0dd231b1
@ -12,4 +12,4 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__version__ = "1.3.1"
|
||||
__version__ = "1.4.0"
|
||||
|
@ -55,12 +55,14 @@ Example of unsubscribing.
|
||||
|
||||
service.channels().stop(channel.body())
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
from googleapiclient import errors
|
||||
from oauth2client import util
|
||||
import six
|
||||
|
||||
|
||||
# The unix time epoch starts at midnight 1970.
|
||||
@ -88,7 +90,7 @@ X_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID'
|
||||
|
||||
def _upper_header_keys(headers):
|
||||
new_headers = {}
|
||||
for k, v in headers.iteritems():
|
||||
for k, v in six.iteritems(headers):
|
||||
new_headers[k.upper()] = v
|
||||
return new_headers
|
||||
|
||||
@ -218,7 +220,7 @@ class Channel(object):
|
||||
Args:
|
||||
resp: dict, The response from a watch() method.
|
||||
"""
|
||||
for json_name, param_name in CHANNEL_PARAMS.iteritems():
|
||||
for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
|
||||
value = resp.get(json_name)
|
||||
if value is not None:
|
||||
setattr(self, param_name, value)
|
||||
|
@ -16,6 +16,9 @@
|
||||
|
||||
A client library for Google's discovery based APIs.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
import six
|
||||
from six.moves import zip
|
||||
|
||||
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
|
||||
__all__ = [
|
||||
@ -25,9 +28,11 @@ __all__ = [
|
||||
'key2param',
|
||||
]
|
||||
|
||||
from six import StringIO
|
||||
from six.moves.urllib.parse import urlencode, urlparse, urljoin, \
|
||||
urlunparse, parse_qsl
|
||||
|
||||
# Standard library imports
|
||||
import StringIO
|
||||
import copy
|
||||
from email.generator import Generator
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
@ -38,20 +43,13 @@ import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import urllib
|
||||
import urlparse
|
||||
|
||||
try:
|
||||
from urlparse import parse_qsl
|
||||
except ImportError:
|
||||
from cgi import parse_qsl
|
||||
|
||||
# Third-party imports
|
||||
import httplib2
|
||||
import mimeparse
|
||||
import uritemplate
|
||||
|
||||
# Local imports
|
||||
from googleapiclient import mimeparse
|
||||
from googleapiclient.errors import HttpError
|
||||
from googleapiclient.errors import InvalidJsonError
|
||||
from googleapiclient.errors import MediaUploadSizeError
|
||||
@ -203,9 +201,14 @@ def build(serviceName,
|
||||
if resp.status >= 400:
|
||||
raise HttpError(resp, content, uri=requested_url)
|
||||
|
||||
try:
|
||||
content = content.decode('utf-8')
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
service = json.loads(content)
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
logger.error('Failed to parse as JSON: ' + content)
|
||||
raise InvalidJsonError()
|
||||
|
||||
@ -253,9 +256,9 @@ def build_from_document(
|
||||
# future is no longer used.
|
||||
future = {}
|
||||
|
||||
if isinstance(service, basestring):
|
||||
if isinstance(service, six.string_types):
|
||||
service = json.loads(service)
|
||||
base = urlparse.urljoin(service['rootUrl'], service['servicePath'])
|
||||
base = urljoin(service['rootUrl'], service['servicePath'])
|
||||
schema = Schemas(service)
|
||||
|
||||
if credentials:
|
||||
@ -271,7 +274,7 @@ def build_from_document(
|
||||
credentials.create_scoped_required()):
|
||||
scopes = service.get('auth', {}).get('oauth2', {}).get('scopes', {})
|
||||
if scopes:
|
||||
credentials = credentials.create_scoped(scopes.keys())
|
||||
credentials = credentials.create_scoped(list(scopes.keys()))
|
||||
else:
|
||||
# No need to authorize the http object
|
||||
# if the service does not require authentication.
|
||||
@ -329,13 +332,13 @@ def _media_size_to_long(maxSize):
|
||||
The size as an integer value.
|
||||
"""
|
||||
if len(maxSize) < 2:
|
||||
return 0L
|
||||
return 0
|
||||
units = maxSize[-2:].upper()
|
||||
bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
|
||||
if bit_shift is not None:
|
||||
return long(maxSize[:-2]) << bit_shift
|
||||
return int(maxSize[:-2]) << bit_shift
|
||||
else:
|
||||
return long(maxSize)
|
||||
return int(maxSize)
|
||||
|
||||
|
||||
def _media_path_url_from_info(root_desc, path_url):
|
||||
@ -385,7 +388,7 @@ def _fix_up_parameters(method_desc, root_desc, http_method):
|
||||
parameters = method_desc.setdefault('parameters', {})
|
||||
|
||||
# Add in the parameters common to all methods.
|
||||
for name, description in root_desc.get('parameters', {}).iteritems():
|
||||
for name, description in six.iteritems(root_desc.get('parameters', {})):
|
||||
parameters[name] = description
|
||||
|
||||
# Add in undocumented query parameters.
|
||||
@ -491,6 +494,23 @@ def _fix_up_method_description(method_desc, root_desc):
|
||||
return path_url, http_method, method_id, accept, max_size, media_path_url
|
||||
|
||||
|
||||
def _urljoin(base, url):
|
||||
"""Custom urljoin replacement supporting : before / in url."""
|
||||
# In general, it's unsafe to simply join base and url. However, for
|
||||
# the case of discovery documents, we know:
|
||||
# * base will never contain params, query, or fragment
|
||||
# * url will never contain a scheme or net_loc.
|
||||
# In general, this means we can safely join on /; we just need to
|
||||
# ensure we end up with precisely one / joining base and url. The
|
||||
# exception here is the case of media uploads, where url will be an
|
||||
# absolute url.
|
||||
if url.startswith('http://') or url.startswith('https://'):
|
||||
return urljoin(base, url)
|
||||
new_base = base if base.endswith('/') else base + '/'
|
||||
new_url = url[1:] if url.startswith('/') else url
|
||||
return new_base + new_url
|
||||
|
||||
|
||||
# TODO(dhermes): Convert this class to ResourceMethod and make it callable
|
||||
class ResourceMethodParameters(object):
|
||||
"""Represents the parameters associated with a method.
|
||||
@ -551,7 +571,7 @@ class ResourceMethodParameters(object):
|
||||
comes from the dictionary of methods stored in the 'methods' key in
|
||||
the deserialized discovery document.
|
||||
"""
|
||||
for arg, desc in method_desc.get('parameters', {}).iteritems():
|
||||
for arg, desc in six.iteritems(method_desc.get('parameters', {})):
|
||||
param = key2param(arg)
|
||||
self.argmap[param] = arg
|
||||
|
||||
@ -599,12 +619,12 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
|
||||
def method(self, **kwargs):
|
||||
# Don't bother with doc string, it will be over-written by createMethod.
|
||||
|
||||
for name in kwargs.iterkeys():
|
||||
for name in six.iterkeys(kwargs):
|
||||
if name not in parameters.argmap:
|
||||
raise TypeError('Got an unexpected keyword argument "%s"' % name)
|
||||
|
||||
# Remove args that have a value of None.
|
||||
keys = kwargs.keys()
|
||||
keys = list(kwargs.keys())
|
||||
for name in keys:
|
||||
if kwargs[name] is None:
|
||||
del kwargs[name]
|
||||
@ -613,9 +633,9 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
|
||||
if name not in kwargs:
|
||||
raise TypeError('Missing required parameter "%s"' % name)
|
||||
|
||||
for name, regex in parameters.pattern_params.iteritems():
|
||||
for name, regex in six.iteritems(parameters.pattern_params):
|
||||
if name in kwargs:
|
||||
if isinstance(kwargs[name], basestring):
|
||||
if isinstance(kwargs[name], six.string_types):
|
||||
pvalues = [kwargs[name]]
|
||||
else:
|
||||
pvalues = kwargs[name]
|
||||
@ -625,13 +645,13 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
|
||||
'Parameter "%s" value "%s" does not match the pattern "%s"' %
|
||||
(name, pvalue, regex))
|
||||
|
||||
for name, enums in parameters.enum_params.iteritems():
|
||||
for name, enums in six.iteritems(parameters.enum_params):
|
||||
if name in kwargs:
|
||||
# We need to handle the case of a repeated enum
|
||||
# name differently, since we want to handle both
|
||||
# arg='value' and arg=['value1', 'value2']
|
||||
if (name in parameters.repeated_params and
|
||||
not isinstance(kwargs[name], basestring)):
|
||||
not isinstance(kwargs[name], six.string_types)):
|
||||
values = kwargs[name]
|
||||
else:
|
||||
values = [kwargs[name]]
|
||||
@ -643,7 +663,7 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
|
||||
|
||||
actual_query_params = {}
|
||||
actual_path_params = {}
|
||||
for key, value in kwargs.iteritems():
|
||||
for key, value in six.iteritems(kwargs):
|
||||
to_type = parameters.param_types.get(key, 'string')
|
||||
# For repeated parameters we cast each member of the list.
|
||||
if key in parameters.repeated_params and type(value) == type([]):
|
||||
@ -671,14 +691,14 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
|
||||
actual_path_params, actual_query_params, body_value)
|
||||
|
||||
expanded_url = uritemplate.expand(pathUrl, params)
|
||||
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
|
||||
url = _urljoin(self._baseUrl, expanded_url + query)
|
||||
|
||||
resumable = None
|
||||
multipart_boundary = ''
|
||||
|
||||
if media_filename:
|
||||
# Ensure we end up with a valid MediaUpload object.
|
||||
if isinstance(media_filename, basestring):
|
||||
if isinstance(media_filename, six.string_types):
|
||||
(media_mime_type, encoding) = mimetypes.guess_type(media_filename)
|
||||
if media_mime_type is None:
|
||||
raise UnknownFileType(media_filename)
|
||||
@ -692,12 +712,12 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
|
||||
raise TypeError('media_filename must be str or MediaUpload.')
|
||||
|
||||
# Check the maxSize
|
||||
if maxSize > 0 and media_upload.size() > maxSize:
|
||||
if media_upload.size() is not None and media_upload.size() > maxSize > 0:
|
||||
raise MediaUploadSizeError("Media larger than: %s" % maxSize)
|
||||
|
||||
# Use the media path uri for media uploads
|
||||
expanded_url = uritemplate.expand(mediaPathUrl, params)
|
||||
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
|
||||
url = _urljoin(self._baseUrl, expanded_url + query)
|
||||
if media_upload.resumable():
|
||||
url = _add_query_parameter(url, 'uploadType', 'resumable')
|
||||
|
||||
@ -732,7 +752,7 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
|
||||
msgRoot.attach(msg)
|
||||
# encode the body: note that we can't use `as_string`, because
|
||||
# it plays games with `From ` lines.
|
||||
fp = StringIO.StringIO()
|
||||
fp = StringIO()
|
||||
g = Generator(fp, mangle_from_=False)
|
||||
g.flatten(msgRoot, unixfrom=False)
|
||||
body = fp.getvalue()
|
||||
@ -757,10 +777,10 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
|
||||
docs.append('Args:\n')
|
||||
|
||||
# Skip undocumented params and params common to all methods.
|
||||
skip_parameters = rootDesc.get('parameters', {}).keys()
|
||||
skip_parameters = list(rootDesc.get('parameters', {}).keys())
|
||||
skip_parameters.extend(STACK_QUERY_PARAMETERS)
|
||||
|
||||
all_args = parameters.argmap.keys()
|
||||
all_args = list(parameters.argmap.keys())
|
||||
args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
|
||||
|
||||
# Move body to the front of the line.
|
||||
@ -839,14 +859,14 @@ Returns:
|
||||
request = copy.copy(previous_request)
|
||||
|
||||
pageToken = previous_response['nextPageToken']
|
||||
parsed = list(urlparse.urlparse(request.uri))
|
||||
parsed = list(urlparse(request.uri))
|
||||
q = parse_qsl(parsed[4])
|
||||
|
||||
# Find and remove old 'pageToken' value from URI
|
||||
newq = [(key, value) for (key, value) in q if key != 'pageToken']
|
||||
newq.append(('pageToken', pageToken))
|
||||
parsed[4] = urllib.urlencode(newq)
|
||||
uri = urlparse.urlunparse(parsed)
|
||||
parsed[4] = urlencode(newq)
|
||||
uri = urlunparse(parsed)
|
||||
|
||||
request.uri = uri
|
||||
|
||||
@ -932,7 +952,7 @@ class Resource(object):
|
||||
def _add_basic_methods(self, resourceDesc, rootDesc, schema):
|
||||
# Add basic methods to Resource
|
||||
if 'methods' in resourceDesc:
|
||||
for methodName, methodDesc in resourceDesc['methods'].iteritems():
|
||||
for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
|
||||
fixedMethodName, method = createMethod(
|
||||
methodName, methodDesc, rootDesc, schema)
|
||||
self._set_dynamic_attr(fixedMethodName,
|
||||
@ -971,7 +991,7 @@ class Resource(object):
|
||||
|
||||
return (methodName, methodResource)
|
||||
|
||||
for methodName, methodDesc in resourceDesc['resources'].iteritems():
|
||||
for methodName, methodDesc in six.iteritems(resourceDesc['resources']):
|
||||
fixedMethodName, method = createResourceMethod(methodName, methodDesc)
|
||||
self._set_dynamic_attr(fixedMethodName,
|
||||
method.__get__(self, self.__class__))
|
||||
@ -981,7 +1001,7 @@ class Resource(object):
|
||||
# Look for response bodies in schema that contain nextPageToken, and methods
|
||||
# that take a pageToken parameter.
|
||||
if 'methods' in resourceDesc:
|
||||
for methodName, methodDesc in resourceDesc['methods'].iteritems():
|
||||
for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
|
||||
if 'response' in methodDesc:
|
||||
responseSchema = methodDesc['response']
|
||||
if '$ref' in responseSchema:
|
||||
|
@ -1,5 +1,3 @@
|
||||
#!/usr/bin/python2.4
|
||||
#
|
||||
# Copyright 2014 Google Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -19,6 +17,7 @@
|
||||
All exceptions defined by the library
|
||||
should be defined in this file.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
|
||||
|
||||
|
@ -18,37 +18,41 @@ The classes implement a command pattern, with every
|
||||
object supporting an execute() method that does the
|
||||
actuall HTTP request.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
import six
|
||||
from six.moves import range
|
||||
|
||||
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
|
||||
|
||||
import StringIO
|
||||
from six import BytesIO, StringIO
|
||||
from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
|
||||
|
||||
import base64
|
||||
import copy
|
||||
import gzip
|
||||
import httplib2
|
||||
import json
|
||||
import logging
|
||||
import mimeparse
|
||||
import mimetypes
|
||||
import os
|
||||
import random
|
||||
import sys
|
||||
import time
|
||||
import urllib
|
||||
import urlparse
|
||||
import uuid
|
||||
|
||||
from email.generator import Generator
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.nonmultipart import MIMENonMultipart
|
||||
from email.parser import FeedParser
|
||||
from errors import BatchError
|
||||
from errors import HttpError
|
||||
from errors import InvalidChunkSizeError
|
||||
from errors import ResumableUploadError
|
||||
from errors import UnexpectedBodyError
|
||||
from errors import UnexpectedMethodError
|
||||
from model import JsonModel
|
||||
|
||||
from googleapiclient import mimeparse
|
||||
from googleapiclient.errors import BatchError
|
||||
from googleapiclient.errors import HttpError
|
||||
from googleapiclient.errors import InvalidChunkSizeError
|
||||
from googleapiclient.errors import ResumableUploadError
|
||||
from googleapiclient.errors import UnexpectedBodyError
|
||||
from googleapiclient.errors import UnexpectedMethodError
|
||||
from googleapiclient.model import JsonModel
|
||||
from oauth2client import util
|
||||
|
||||
|
||||
@ -259,7 +263,7 @@ class MediaIoBaseUpload(MediaUpload):
|
||||
Note that the Python file object is compatible with io.Base and can be used
|
||||
with this class also.
|
||||
|
||||
fh = io.BytesIO('...Some data to upload...')
|
||||
fh = BytesIO('...Some data to upload...')
|
||||
media = MediaIoBaseUpload(fh, mimetype='image/png',
|
||||
chunksize=1024*1024, resumable=True)
|
||||
farm.animals().insert(
|
||||
@ -465,7 +469,7 @@ class MediaInMemoryUpload(MediaIoBaseUpload):
|
||||
resumable: bool, True if this is a resumable upload. False means upload
|
||||
in a single request.
|
||||
"""
|
||||
fd = StringIO.StringIO(body)
|
||||
fd = BytesIO(body)
|
||||
super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
|
||||
resumable=resumable)
|
||||
|
||||
@ -538,7 +542,7 @@ class MediaIoBaseDownload(object):
|
||||
}
|
||||
http = self._request.http
|
||||
|
||||
for retry_num in xrange(num_retries + 1):
|
||||
for retry_num in range(num_retries + 1):
|
||||
if retry_num > 0:
|
||||
self._sleep(self._rand() * 2**retry_num)
|
||||
logging.warning(
|
||||
@ -559,6 +563,8 @@ class MediaIoBaseDownload(object):
|
||||
content_range = resp['content-range']
|
||||
length = content_range.rsplit('/', 1)[1]
|
||||
self._total_size = int(length)
|
||||
elif 'content-length' in resp:
|
||||
self._total_size = int(resp['content-length'])
|
||||
|
||||
if self._progress == self._total_size:
|
||||
self._done = True
|
||||
@ -697,8 +703,8 @@ class HttpRequest(object):
|
||||
self.method = 'POST'
|
||||
self.headers['x-http-method-override'] = 'GET'
|
||||
self.headers['content-type'] = 'application/x-www-form-urlencoded'
|
||||
parsed = urlparse.urlparse(self.uri)
|
||||
self.uri = urlparse.urlunparse(
|
||||
parsed = urlparse(self.uri)
|
||||
self.uri = urlunparse(
|
||||
(parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
|
||||
None)
|
||||
)
|
||||
@ -706,7 +712,7 @@ class HttpRequest(object):
|
||||
self.headers['content-length'] = str(len(self.body))
|
||||
|
||||
# Handle retries for server-side errors.
|
||||
for retry_num in xrange(num_retries + 1):
|
||||
for retry_num in range(num_retries + 1):
|
||||
if retry_num > 0:
|
||||
self._sleep(self._rand() * 2**retry_num)
|
||||
logging.warning('Retry #%d for request: %s %s, following status: %d'
|
||||
@ -789,7 +795,7 @@ class HttpRequest(object):
|
||||
start_headers['X-Upload-Content-Length'] = size
|
||||
start_headers['content-length'] = str(self.body_size)
|
||||
|
||||
for retry_num in xrange(num_retries + 1):
|
||||
for retry_num in range(num_retries + 1):
|
||||
if retry_num > 0:
|
||||
self._sleep(self._rand() * 2**retry_num)
|
||||
logging.warning(
|
||||
@ -854,7 +860,7 @@ class HttpRequest(object):
|
||||
'Content-Length': str(chunk_end - self.resumable_progress + 1)
|
||||
}
|
||||
|
||||
for retry_num in xrange(num_retries + 1):
|
||||
for retry_num in range(num_retries + 1):
|
||||
if retry_num > 0:
|
||||
self._sleep(self._rand() * 2**retry_num)
|
||||
logging.warning(
|
||||
@ -1046,7 +1052,7 @@ class BatchHttpRequest(object):
|
||||
if self._base_id is None:
|
||||
self._base_id = uuid.uuid4()
|
||||
|
||||
return '<%s+%s>' % (self._base_id, urllib.quote(id_))
|
||||
return '<%s+%s>' % (self._base_id, quote(id_))
|
||||
|
||||
def _header_to_id(self, header):
|
||||
"""Convert a Content-ID header value to an id.
|
||||
@ -1069,7 +1075,7 @@ class BatchHttpRequest(object):
|
||||
raise BatchError("Invalid value for Content-ID: %s" % header)
|
||||
base, id_ = header[1:-1].rsplit('+', 1)
|
||||
|
||||
return urllib.unquote(id_)
|
||||
return unquote(id_)
|
||||
|
||||
def _serialize_request(self, request):
|
||||
"""Convert an HttpRequest object into a string.
|
||||
@ -1081,9 +1087,9 @@ class BatchHttpRequest(object):
|
||||
The request as a string in application/http format.
|
||||
"""
|
||||
# Construct status line
|
||||
parsed = urlparse.urlparse(request.uri)
|
||||
request_line = urlparse.urlunparse(
|
||||
(None, None, parsed.path, parsed.params, parsed.query, None)
|
||||
parsed = urlparse(request.uri)
|
||||
request_line = urlunparse(
|
||||
('', '', parsed.path, parsed.params, parsed.query, '')
|
||||
)
|
||||
status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
|
||||
major, minor = request.headers.get('content-type', 'application/json').split('/')
|
||||
@ -1098,7 +1104,7 @@ class BatchHttpRequest(object):
|
||||
if 'content-type' in headers:
|
||||
del headers['content-type']
|
||||
|
||||
for key, value in headers.iteritems():
|
||||
for key, value in six.iteritems(headers):
|
||||
msg[key] = value
|
||||
msg['Host'] = parsed.netloc
|
||||
msg.set_unixfrom(None)
|
||||
@ -1108,7 +1114,7 @@ class BatchHttpRequest(object):
|
||||
msg['content-length'] = str(len(request.body))
|
||||
|
||||
# Serialize the mime message.
|
||||
fp = StringIO.StringIO()
|
||||
fp = StringIO()
|
||||
# maxheaderlen=0 means don't line wrap headers.
|
||||
g = Generator(fp, maxheaderlen=0)
|
||||
g.flatten(msg, unixfrom=False)
|
||||
@ -1118,7 +1124,7 @@ class BatchHttpRequest(object):
|
||||
if request.body is None:
|
||||
body = body[:-2]
|
||||
|
||||
return status_line.encode('utf-8') + body
|
||||
return status_line + body
|
||||
|
||||
def _deserialize_response(self, payload):
|
||||
"""Convert string into httplib2 response and content.
|
||||
@ -1231,7 +1237,7 @@ class BatchHttpRequest(object):
|
||||
|
||||
# encode the body: note that we can't use `as_string`, because
|
||||
# it plays games with `From ` lines.
|
||||
fp = StringIO.StringIO()
|
||||
fp = StringIO()
|
||||
g = Generator(fp, mangle_from_=False)
|
||||
g.flatten(message, unixfrom=False)
|
||||
body = fp.getvalue()
|
||||
@ -1328,7 +1334,7 @@ class BatchHttpRequest(object):
|
||||
if resp.status >= 300:
|
||||
raise HttpError(resp, content, uri=request.uri)
|
||||
response = request.postproc(resp, content)
|
||||
except HttpError, e:
|
||||
except HttpError as e:
|
||||
exception = e
|
||||
|
||||
if callback is not None:
|
||||
@ -1454,7 +1460,7 @@ class HttpMock(object):
|
||||
if headers is None:
|
||||
headers = {'status': '200 OK'}
|
||||
if filename:
|
||||
f = file(filename, 'r')
|
||||
f = open(filename, 'r')
|
||||
self.data = f.read()
|
||||
f.close()
|
||||
else:
|
||||
|
@ -21,6 +21,9 @@ Contents:
|
||||
- best_match(): Choose the mime-type with the highest quality ('q')
|
||||
from a list of candidates.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from functools import reduce
|
||||
import six
|
||||
|
||||
__version__ = '0.1.3'
|
||||
__author__ = 'Joe Gregorio'
|
||||
@ -68,7 +71,7 @@ def parse_media_range(range):
|
||||
necessary.
|
||||
"""
|
||||
(type, subtype, params) = parse_mime_type(range)
|
||||
if not params.has_key('q') or not params['q'] or \
|
||||
if 'q' not in params or not params['q'] or \
|
||||
not float(params['q']) or float(params['q']) > 1\
|
||||
or float(params['q']) < 0:
|
||||
params['q'] = '1'
|
||||
@ -98,8 +101,8 @@ def fitness_and_quality_parsed(mime_type, parsed_ranges):
|
||||
target_subtype == '*')
|
||||
if type_match and subtype_match:
|
||||
param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
|
||||
target_params.iteritems() if key != 'q' and \
|
||||
params.has_key(key) and value == params[key]], 0)
|
||||
six.iteritems(target_params) if key != 'q' and \
|
||||
key in params and value == params[key]], 0)
|
||||
fitness = (type == target_type) and 100 or 0
|
||||
fitness += (subtype == target_subtype) and 10 or 0
|
||||
fitness += param_matches
|
||||
|
@ -1,5 +1,3 @@
|
||||
#!/usr/bin/python2.4
|
||||
#
|
||||
# Copyright 2014 Google Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -21,15 +19,18 @@ as JSON, Atom, etc. The model classes are responsible
|
||||
for converting between the wire format and the Python
|
||||
object representation.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
import six
|
||||
|
||||
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
|
||||
|
||||
import json
|
||||
import logging
|
||||
import urllib
|
||||
|
||||
from six.moves.urllib.parse import urlencode
|
||||
|
||||
from googleapiclient import __version__
|
||||
from errors import HttpError
|
||||
from googleapiclient.errors import HttpError
|
||||
|
||||
|
||||
dump_request_response = False
|
||||
@ -106,11 +107,11 @@ class BaseModel(Model):
|
||||
if dump_request_response:
|
||||
logging.info('--request-start--')
|
||||
logging.info('-headers-start-')
|
||||
for h, v in headers.iteritems():
|
||||
for h, v in six.iteritems(headers):
|
||||
logging.info('%s: %s', h, v)
|
||||
logging.info('-headers-end-')
|
||||
logging.info('-path-parameters-start-')
|
||||
for h, v in path_params.iteritems():
|
||||
for h, v in six.iteritems(path_params):
|
||||
logging.info('%s: %s', h, v)
|
||||
logging.info('-path-parameters-end-')
|
||||
logging.info('body: %s', body)
|
||||
@ -161,22 +162,22 @@ class BaseModel(Model):
|
||||
if self.alt_param is not None:
|
||||
params.update({'alt': self.alt_param})
|
||||
astuples = []
|
||||
for key, value in params.iteritems():
|
||||
for key, value in six.iteritems(params):
|
||||
if type(value) == type([]):
|
||||
for x in value:
|
||||
x = x.encode('utf-8')
|
||||
astuples.append((key, x))
|
||||
else:
|
||||
if getattr(value, 'encode', False) and callable(value.encode):
|
||||
if isinstance(value, six.text_type) and callable(value.encode):
|
||||
value = value.encode('utf-8')
|
||||
astuples.append((key, value))
|
||||
return '?' + urllib.urlencode(astuples)
|
||||
return '?' + urlencode(astuples)
|
||||
|
||||
def _log_response(self, resp, content):
|
||||
"""Logs debugging information about the response if requested."""
|
||||
if dump_request_response:
|
||||
logging.info('--response-start--')
|
||||
for h, v in resp.iteritems():
|
||||
for h, v in six.iteritems(resp):
|
||||
logging.info('%s: %s', h, v)
|
||||
if content:
|
||||
logging.info(content)
|
||||
@ -257,7 +258,10 @@ class JsonModel(BaseModel):
|
||||
return json.dumps(body_value)
|
||||
|
||||
def deserialize(self, content):
|
||||
content = content.decode('utf-8')
|
||||
try:
|
||||
content = content.decode('utf-8')
|
||||
except AttributeError:
|
||||
pass
|
||||
body = json.loads(content)
|
||||
if self._data_wrapper and isinstance(body, dict) and 'data' in body:
|
||||
body = body['data']
|
||||
@ -361,7 +365,7 @@ def makepatch(original, modified):
|
||||
body=makepatch(original, item)).execute()
|
||||
"""
|
||||
patch = {}
|
||||
for key, original_value in original.iteritems():
|
||||
for key, original_value in six.iteritems(original):
|
||||
modified_value = modified.get(key, None)
|
||||
if modified_value is None:
|
||||
# Use None to signal that the element is deleted
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
Consolidates a lot of code commonly repeated in sample applications.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
|
||||
__all__ = ['init']
|
||||
@ -94,9 +95,9 @@ def init(argv, name, version, doc, filename, scope=None, parents=[], discovery_f
|
||||
service = discovery.build(name, version, http=http)
|
||||
else:
|
||||
# Construct a service object using a local discovery document file.
|
||||
with open(discovery_filename) as discovery_file:
|
||||
service = discovery.build_from_document(
|
||||
discovery_file.read(),
|
||||
base='https://www.googleapis.com/',
|
||||
http=http)
|
||||
with open(discovery_filename) as discovery_file:
|
||||
service = discovery.build_from_document(
|
||||
discovery_file.read(),
|
||||
base='https://www.googleapis.com/',
|
||||
http=http)
|
||||
return (service, flags)
|
||||
|
@ -56,6 +56,8 @@ For example, given the schema:
|
||||
|
||||
The constructor takes a discovery document in which to look up named schema.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
import six
|
||||
|
||||
# TODO(jcgregorio) support format, enum, minimum, maximum
|
||||
|
||||
@ -249,7 +251,7 @@ class _SchemaToStruct(object):
|
||||
self.emitEnd('{', schema.get('description', ''))
|
||||
self.indent()
|
||||
if 'properties' in schema:
|
||||
for pname, pschema in schema.get('properties', {}).iteritems():
|
||||
for pname, pschema in six.iteritems(schema.get('properties', {})):
|
||||
self.emitBegin('"%s": ' % pname)
|
||||
self._to_str_impl(pschema)
|
||||
elif 'additionalProperties' in schema:
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Client library for using OAuth2, especially with Google APIs."""
|
||||
|
||||
__version__ = '1.3.1'
|
||||
__version__ = '1.4.7'
|
||||
|
||||
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
|
||||
GOOGLE_DEVICE_URI = 'https://accounts.google.com/o/oauth2/device/code'
|
||||
|
@ -571,16 +571,14 @@ class OAuth2Decorator(object):
|
||||
Instantiate and then use with oauth_required or oauth_aware
|
||||
as decorators on webapp.RequestHandler methods.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
decorator = OAuth2Decorator(
|
||||
client_id='837...ent.com',
|
||||
client_secret='Qh...wwI',
|
||||
scope='https://www.googleapis.com/auth/plus')
|
||||
|
||||
|
||||
class MainHandler(webapp.RequestHandler):
|
||||
|
||||
@decorator.oauth_required
|
||||
def get(self):
|
||||
http = decorator.http()
|
||||
@ -847,7 +845,8 @@ class OAuth2Decorator(object):
|
||||
def callback_handler(self):
|
||||
"""RequestHandler for the OAuth 2.0 redirect callback.
|
||||
|
||||
Usage:
|
||||
Usage::
|
||||
|
||||
app = webapp.WSGIApplication([
|
||||
('/index', MyIndexHandler),
|
||||
...,
|
||||
@ -910,20 +909,19 @@ class OAuth2DecoratorFromClientSecrets(OAuth2Decorator):
|
||||
Uses a clientsecrets file as the source for all the information when
|
||||
constructing an OAuth2Decorator.
|
||||
|
||||
Example:
|
||||
::
|
||||
|
||||
decorator = OAuth2DecoratorFromClientSecrets(
|
||||
os.path.join(os.path.dirname(__file__), 'client_secrets.json')
|
||||
scope='https://www.googleapis.com/auth/plus')
|
||||
|
||||
|
||||
class MainHandler(webapp.RequestHandler):
|
||||
|
||||
@decorator.oauth_required
|
||||
def get(self):
|
||||
http = decorator.http()
|
||||
# http is authorized with the user's Credentials and can be used
|
||||
# in API calls
|
||||
|
||||
"""
|
||||
|
||||
@util.positional(3)
|
||||
|
@ -26,10 +26,11 @@ import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import urllib
|
||||
import urlparse
|
||||
import six
|
||||
from six.moves import urllib
|
||||
|
||||
import httplib2
|
||||
from oauth2client import clientsecrets
|
||||
@ -90,6 +91,15 @@ ADC_HELP_MSG = (
|
||||
AccessTokenInfo = collections.namedtuple(
|
||||
'AccessTokenInfo', ['access_token', 'expires_in'])
|
||||
|
||||
DEFAULT_ENV_NAME = 'UNKNOWN'
|
||||
|
||||
# If set to True _get_environment avoid GCE check (_detect_gce_environment)
|
||||
NO_GCE_CHECK = os.environ.setdefault('NO_GCE_CHECK', 'False')
|
||||
|
||||
class SETTINGS(object):
|
||||
"""Settings namespace for globally defined values."""
|
||||
env_name = None
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Base error for this module."""
|
||||
@ -231,6 +241,9 @@ class Credentials(object):
|
||||
# Add in information we will need later to reconsistitue this instance.
|
||||
d['_class'] = t.__name__
|
||||
d['_module'] = t.__module__
|
||||
for key, val in d.items():
|
||||
if isinstance(val, bytes):
|
||||
d[key] = val.decode('utf-8')
|
||||
return json.dumps(d)
|
||||
|
||||
def to_json(self):
|
||||
@ -254,6 +267,8 @@ class Credentials(object):
|
||||
An instance of the subclass of Credentials that was serialized with
|
||||
to_json().
|
||||
"""
|
||||
if six.PY3 and isinstance(s, bytes):
|
||||
s = s.decode('utf-8')
|
||||
data = json.loads(s)
|
||||
# Find and call the right classmethod from_json() to restore the object.
|
||||
module = data['_module']
|
||||
@ -398,8 +413,10 @@ def clean_headers(headers):
|
||||
"""
|
||||
clean = {}
|
||||
try:
|
||||
for k, v in headers.iteritems():
|
||||
clean[str(k)] = str(v)
|
||||
for k, v in six.iteritems(headers):
|
||||
clean_k = k if isinstance(k, bytes) else str(k).encode('ascii')
|
||||
clean_v = v if isinstance(v, bytes) else str(v).encode('ascii')
|
||||
clean[clean_k] = clean_v
|
||||
except UnicodeEncodeError:
|
||||
raise NonAsciiHeaderError(k + ': ' + v)
|
||||
return clean
|
||||
@ -415,11 +432,11 @@ def _update_query_params(uri, params):
|
||||
Returns:
|
||||
The same URI but with the new query parameters added.
|
||||
"""
|
||||
parts = urlparse.urlparse(uri)
|
||||
query_params = dict(urlparse.parse_qsl(parts.query))
|
||||
parts = urllib.parse.urlparse(uri)
|
||||
query_params = dict(urllib.parse.parse_qsl(parts.query))
|
||||
query_params.update(params)
|
||||
new_parts = parts._replace(query=urllib.urlencode(query_params))
|
||||
return urlparse.urlunparse(new_parts)
|
||||
new_parts = parts._replace(query=urllib.parse.urlencode(query_params))
|
||||
return urllib.parse.urlunparse(new_parts)
|
||||
|
||||
|
||||
class OAuth2Credentials(Credentials):
|
||||
@ -487,13 +504,13 @@ class OAuth2Credentials(Credentials):
|
||||
it.
|
||||
|
||||
Args:
|
||||
http: An instance of httplib2.Http
|
||||
or something that acts like it.
|
||||
http: An instance of ``httplib2.Http`` or something that acts
|
||||
like it.
|
||||
|
||||
Returns:
|
||||
A modified instance of http that was passed in.
|
||||
|
||||
Example:
|
||||
Example::
|
||||
|
||||
h = httplib2.Http()
|
||||
h = credentials.authorize(h)
|
||||
@ -503,6 +520,7 @@ class OAuth2Credentials(Credentials):
|
||||
signing. So instead we have to overload 'request' with a closure
|
||||
that adds in the Authorization header and then calls the original
|
||||
version of 'request()'.
|
||||
|
||||
"""
|
||||
request_orig = http.request
|
||||
|
||||
@ -589,6 +607,8 @@ class OAuth2Credentials(Credentials):
|
||||
Returns:
|
||||
An instance of a Credentials subclass.
|
||||
"""
|
||||
if six.PY3 and isinstance(s, bytes):
|
||||
s = s.decode('utf-8')
|
||||
data = json.loads(s)
|
||||
if (data.get('token_expiry') and
|
||||
not isinstance(data['token_expiry'], datetime.datetime)):
|
||||
@ -691,7 +711,7 @@ class OAuth2Credentials(Credentials):
|
||||
|
||||
def _generate_refresh_request_body(self):
|
||||
"""Generate the body that will be used in the refresh request."""
|
||||
body = urllib.urlencode({
|
||||
body = urllib.parse.urlencode({
|
||||
'grant_type': 'refresh_token',
|
||||
'client_id': self.client_id,
|
||||
'client_secret': self.client_secret,
|
||||
@ -755,8 +775,9 @@ class OAuth2Credentials(Credentials):
|
||||
logger.info('Refreshing access_token')
|
||||
resp, content = http_request(
|
||||
self.token_uri, method='POST', body=body, headers=headers)
|
||||
if six.PY3 and isinstance(content, bytes):
|
||||
content = content.decode('utf-8')
|
||||
if resp.status == 200:
|
||||
# TODO(jcgregorio) Raise an error if loads fails?
|
||||
d = json.loads(content)
|
||||
self.token_response = d
|
||||
self.access_token = d['access_token']
|
||||
@ -785,21 +806,21 @@ class OAuth2Credentials(Credentials):
|
||||
self.invalid = True
|
||||
if self.store:
|
||||
self.store.locked_put(self)
|
||||
except StandardError:
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
raise AccessTokenRefreshError(error_msg)
|
||||
|
||||
def _revoke(self, http_request):
|
||||
"""Revokes the refresh_token and deletes the store if available.
|
||||
"""Revokes this credential and deletes the stored copy (if it exists).
|
||||
|
||||
Args:
|
||||
http_request: callable, a callable that matches the method signature of
|
||||
httplib2.Http.request, used to make the revoke request.
|
||||
"""
|
||||
self._do_revoke(http_request, self.refresh_token)
|
||||
self._do_revoke(http_request, self.refresh_token or self.access_token)
|
||||
|
||||
def _do_revoke(self, http_request, token):
|
||||
"""Revokes the credentials and deletes the store if available.
|
||||
"""Revokes this credential and deletes the stored copy (if it exists).
|
||||
|
||||
Args:
|
||||
http_request: callable, a callable that matches the method signature of
|
||||
@ -822,7 +843,7 @@ class OAuth2Credentials(Credentials):
|
||||
d = json.loads(content)
|
||||
if 'error' in d:
|
||||
error_msg = d['error']
|
||||
except StandardError:
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
raise TokenRevokeError(error_msg)
|
||||
|
||||
@ -844,7 +865,8 @@ class AccessTokenCredentials(OAuth2Credentials):
|
||||
|
||||
AccessTokenCredentials objects may be safely pickled and unpickled.
|
||||
|
||||
Usage:
|
||||
Usage::
|
||||
|
||||
credentials = AccessTokenCredentials('<an access token>',
|
||||
'my-user-agent/1.0')
|
||||
http = httplib2.Http()
|
||||
@ -880,10 +902,12 @@ class AccessTokenCredentials(OAuth2Credentials):
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, s):
|
||||
if six.PY3 and isinstance(s, bytes):
|
||||
s = s.decode('utf-8')
|
||||
data = json.loads(s)
|
||||
retval = AccessTokenCredentials(
|
||||
data['access_token'],
|
||||
data['user_agent'])
|
||||
data['access_token'],
|
||||
data['user_agent'])
|
||||
return retval
|
||||
|
||||
def _refresh(self, http_request):
|
||||
@ -900,36 +924,60 @@ class AccessTokenCredentials(OAuth2Credentials):
|
||||
self._do_revoke(http_request, self.access_token)
|
||||
|
||||
|
||||
_env_name = None
|
||||
def _detect_gce_environment(urlopen=None):
|
||||
"""Determine if the current environment is Compute Engine.
|
||||
|
||||
Args:
|
||||
urlopen: Optional argument. Function used to open a connection to a URL.
|
||||
|
||||
Returns:
|
||||
Boolean indicating whether or not the current environment is Google
|
||||
Compute Engine.
|
||||
"""
|
||||
urlopen = urlopen or urllib.request.urlopen
|
||||
# Note: the explicit `timeout` below is a workaround. The underlying
|
||||
# issue is that resolving an unknown host on some networks will take
|
||||
# 20-30 seconds; making this timeout short fixes the issue, but
|
||||
# could lead to false negatives in the event that we are on GCE, but
|
||||
# the metadata resolution was particularly slow. The latter case is
|
||||
# "unlikely".
|
||||
try:
|
||||
response = urlopen('http://169.254.169.254/', timeout=1)
|
||||
return response.info().get('Metadata-Flavor', '') == 'Google'
|
||||
except socket.timeout:
|
||||
logger.info('Timeout attempting to reach GCE metadata service.')
|
||||
return False
|
||||
except urllib.error.URLError as e:
|
||||
if isinstance(getattr(e, 'reason', None), socket.timeout):
|
||||
logger.info('Timeout attempting to reach GCE metadata service.')
|
||||
return False
|
||||
|
||||
|
||||
def _get_environment(urllib2_urlopen=None):
|
||||
"""Detect the environment the code is being run on."""
|
||||
def _get_environment(urlopen=None):
|
||||
"""Detect the environment the code is being run on.
|
||||
|
||||
global _env_name
|
||||
Args:
|
||||
urlopen: Optional argument. Function used to open a connection to a URL.
|
||||
|
||||
if _env_name:
|
||||
return _env_name
|
||||
Returns:
|
||||
The value of SETTINGS.env_name after being set. If already
|
||||
set, simply returns the value.
|
||||
"""
|
||||
if SETTINGS.env_name is not None:
|
||||
return SETTINGS.env_name
|
||||
|
||||
# None is an unset value, not the default.
|
||||
SETTINGS.env_name = DEFAULT_ENV_NAME
|
||||
|
||||
server_software = os.environ.get('SERVER_SOFTWARE', '')
|
||||
if server_software.startswith('Google App Engine/'):
|
||||
_env_name = 'GAE_PRODUCTION'
|
||||
SETTINGS.env_name = 'GAE_PRODUCTION'
|
||||
elif server_software.startswith('Development/'):
|
||||
_env_name = 'GAE_LOCAL'
|
||||
else:
|
||||
import urllib2
|
||||
try:
|
||||
if urllib2_urlopen is None:
|
||||
urllib2_urlopen = urllib2.urlopen
|
||||
response = urllib2_urlopen('http://metadata.google.internal')
|
||||
if any('Metadata-Flavor: Google' in h for h in response.info().headers):
|
||||
_env_name = 'GCE_PRODUCTION'
|
||||
else:
|
||||
_env_name = 'UNKNOWN'
|
||||
except urllib2.URLError:
|
||||
_env_name = 'UNKNOWN'
|
||||
SETTINGS.env_name = 'GAE_LOCAL'
|
||||
elif NO_GCE_CHECK != 'True' and _detect_gce_environment(urlopen=urlopen):
|
||||
SETTINGS.env_name = 'GCE_PRODUCTION'
|
||||
|
||||
return _env_name
|
||||
return SETTINGS.env_name
|
||||
|
||||
|
||||
class GoogleCredentials(OAuth2Credentials):
|
||||
@ -943,36 +991,19 @@ class GoogleCredentials(OAuth2Credentials):
|
||||
Here is an example of how to use the Application Default Credentials for a
|
||||
service that requires authentication:
|
||||
|
||||
<code>
|
||||
from googleapiclient.discovery import build
|
||||
from oauth2client.client import GoogleCredentials
|
||||
from googleapiclient.discovery import build
|
||||
from oauth2client.client import GoogleCredentials
|
||||
|
||||
PROJECT = 'bamboo-machine-422' # replace this with one of your projects
|
||||
ZONE = 'us-central1-a' # replace this with the zone you care about
|
||||
credentials = GoogleCredentials.get_application_default()
|
||||
service = build('compute', 'v1', credentials=credentials)
|
||||
|
||||
credentials = GoogleCredentials.get_application_default()
|
||||
service = build('compute', 'v1', credentials=credentials)
|
||||
PROJECT = 'bamboo-machine-422'
|
||||
ZONE = 'us-central1-a'
|
||||
request = service.instances().list(project=PROJECT, zone=ZONE)
|
||||
response = request.execute()
|
||||
|
||||
request = service.instances().list(project=PROJECT, zone=ZONE)
|
||||
response = request.execute()
|
||||
|
||||
print response
|
||||
</code>
|
||||
|
||||
A service that does not require authentication does not need credentials
|
||||
to be passed in:
|
||||
|
||||
<code>
|
||||
from googleapiclient.discovery import build
|
||||
|
||||
service = build('discovery', 'v1')
|
||||
|
||||
request = service.apis().list()
|
||||
response = request.execute()
|
||||
|
||||
print response
|
||||
</code>
|
||||
"""
|
||||
print(response)
|
||||
"""
|
||||
|
||||
def __init__(self, access_token, client_id, client_secret, refresh_token,
|
||||
token_expiry, token_uri, user_agent,
|
||||
@ -1024,6 +1055,116 @@ class GoogleCredentials(OAuth2Credentials):
|
||||
'refresh_token': self.refresh_token
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _implicit_credentials_from_gae(env_name=None):
|
||||
"""Attempts to get implicit credentials in Google App Engine env.
|
||||
|
||||
If the current environment is not detected as App Engine, returns None,
|
||||
indicating no Google App Engine credentials can be detected from the
|
||||
current environment.
|
||||
|
||||
Args:
|
||||
env_name: String, indicating current environment.
|
||||
|
||||
Returns:
|
||||
None, if not in GAE, else an appengine.AppAssertionCredentials object.
|
||||
"""
|
||||
env_name = env_name or _get_environment()
|
||||
if env_name not in ('GAE_PRODUCTION', 'GAE_LOCAL'):
|
||||
return None
|
||||
|
||||
return _get_application_default_credential_GAE()
|
||||
|
||||
@staticmethod
|
||||
def _implicit_credentials_from_gce(env_name=None):
|
||||
"""Attempts to get implicit credentials in Google Compute Engine env.
|
||||
|
||||
If the current environment is not detected as Compute Engine, returns None,
|
||||
indicating no Google Compute Engine credentials can be detected from the
|
||||
current environment.
|
||||
|
||||
Args:
|
||||
env_name: String, indicating current environment.
|
||||
|
||||
Returns:
|
||||
None, if not in GCE, else a gce.AppAssertionCredentials object.
|
||||
"""
|
||||
env_name = env_name or _get_environment()
|
||||
if env_name != 'GCE_PRODUCTION':
|
||||
return None
|
||||
|
||||
return _get_application_default_credential_GCE()
|
||||
|
||||
@staticmethod
|
||||
def _implicit_credentials_from_files(env_name=None):
|
||||
"""Attempts to get implicit credentials from local credential files.
|
||||
|
||||
First checks if the environment variable GOOGLE_APPLICATION_CREDENTIALS
|
||||
is set with a filename and then falls back to a configuration file (the
|
||||
"well known" file) associated with the 'gcloud' command line tool.
|
||||
|
||||
Args:
|
||||
env_name: Unused argument.
|
||||
|
||||
Returns:
|
||||
Credentials object associated with the GOOGLE_APPLICATION_CREDENTIALS
|
||||
file or the "well known" file if either exist. If neither file is
|
||||
define, returns None, indicating no credentials from a file can
|
||||
detected from the current environment.
|
||||
"""
|
||||
credentials_filename = _get_environment_variable_file()
|
||||
if not credentials_filename:
|
||||
credentials_filename = _get_well_known_file()
|
||||
if os.path.isfile(credentials_filename):
|
||||
extra_help = (' (produced automatically when running'
|
||||
' "gcloud auth login" command)')
|
||||
else:
|
||||
credentials_filename = None
|
||||
else:
|
||||
extra_help = (' (pointed to by ' + GOOGLE_APPLICATION_CREDENTIALS +
|
||||
' environment variable)')
|
||||
|
||||
if not credentials_filename:
|
||||
return
|
||||
|
||||
try:
|
||||
return _get_application_default_credential_from_file(credentials_filename)
|
||||
except (ApplicationDefaultCredentialsError, ValueError) as error:
|
||||
_raise_exception_for_reading_json(credentials_filename, extra_help, error)
|
||||
|
||||
@classmethod
|
||||
def _get_implicit_credentials(cls):
|
||||
"""Gets credentials implicitly from the environment.
|
||||
|
||||
Checks environment in order of precedence:
|
||||
- Google App Engine (production and testing)
|
||||
- Environment variable GOOGLE_APPLICATION_CREDENTIALS pointing to
|
||||
a file with stored credentials information.
|
||||
- Stored "well known" file associated with `gcloud` command line tool.
|
||||
- Google Compute Engine production environment.
|
||||
|
||||
Exceptions:
|
||||
ApplicationDefaultCredentialsError: raised when the credentials fail
|
||||
to be retrieved.
|
||||
"""
|
||||
env_name = _get_environment()
|
||||
|
||||
# Environ checks (in order). Assumes each checker takes `env_name`
|
||||
# as a kwarg.
|
||||
environ_checkers = [
|
||||
cls._implicit_credentials_from_gae,
|
||||
cls._implicit_credentials_from_files,
|
||||
cls._implicit_credentials_from_gce,
|
||||
]
|
||||
|
||||
for checker in environ_checkers:
|
||||
credentials = checker(env_name=env_name)
|
||||
if credentials is not None:
|
||||
return credentials
|
||||
|
||||
# If no credentials, fail.
|
||||
raise ApplicationDefaultCredentialsError(ADC_HELP_MSG)
|
||||
|
||||
@staticmethod
|
||||
def get_application_default():
|
||||
"""Get the Application Default Credentials for the current environment.
|
||||
@ -1032,42 +1173,7 @@ class GoogleCredentials(OAuth2Credentials):
|
||||
ApplicationDefaultCredentialsError: raised when the credentials fail
|
||||
to be retrieved.
|
||||
"""
|
||||
|
||||
env_name = _get_environment()
|
||||
|
||||
if env_name in ('GAE_PRODUCTION', 'GAE_LOCAL'):
|
||||
# if we are running inside Google App Engine
|
||||
# there is no need to look for credentials in local files
|
||||
application_default_credential_filename = None
|
||||
well_known_file = None
|
||||
else:
|
||||
application_default_credential_filename = _get_environment_variable_file()
|
||||
well_known_file = _get_well_known_file()
|
||||
if not os.path.isfile(well_known_file):
|
||||
well_known_file = None
|
||||
|
||||
if application_default_credential_filename:
|
||||
try:
|
||||
return _get_application_default_credential_from_file(
|
||||
application_default_credential_filename)
|
||||
except (ApplicationDefaultCredentialsError, ValueError) as error:
|
||||
extra_help = (' (pointed to by ' + GOOGLE_APPLICATION_CREDENTIALS +
|
||||
' environment variable)')
|
||||
_raise_exception_for_reading_json(
|
||||
application_default_credential_filename, extra_help, error)
|
||||
elif well_known_file:
|
||||
try:
|
||||
return _get_application_default_credential_from_file(well_known_file)
|
||||
except (ApplicationDefaultCredentialsError, ValueError) as error:
|
||||
extra_help = (' (produced automatically when running'
|
||||
' "gcloud auth login" command)')
|
||||
_raise_exception_for_reading_json(well_known_file, extra_help, error)
|
||||
elif env_name in ('GAE_PRODUCTION', 'GAE_LOCAL'):
|
||||
return _get_application_default_credential_GAE()
|
||||
elif env_name == 'GCE_PRODUCTION':
|
||||
return _get_application_default_credential_GCE()
|
||||
else:
|
||||
raise ApplicationDefaultCredentialsError(ADC_HELP_MSG)
|
||||
return GoogleCredentials._get_implicit_credentials()
|
||||
|
||||
@staticmethod
|
||||
def from_stream(credential_filename):
|
||||
@ -1164,16 +1270,14 @@ def _get_well_known_file():
|
||||
return default_config_path
|
||||
|
||||
|
||||
def _get_application_default_credential_from_file(
|
||||
application_default_credential_filename):
|
||||
def _get_application_default_credential_from_file(filename):
|
||||
"""Build the Application Default Credentials from file."""
|
||||
|
||||
import service_account
|
||||
from oauth2client import service_account
|
||||
|
||||
# read the credentials from the file
|
||||
with open(application_default_credential_filename) as (
|
||||
application_default_credential):
|
||||
client_credentials = json.load(application_default_credential)
|
||||
with open(filename) as file_obj:
|
||||
client_credentials = json.load(file_obj)
|
||||
|
||||
credentials_type = client_credentials.get('type')
|
||||
if credentials_type == AUTHORIZED_USER:
|
||||
@ -1274,7 +1378,7 @@ class AssertionCredentials(GoogleCredentials):
|
||||
def _generate_refresh_request_body(self):
|
||||
assertion = self._generate_assertion()
|
||||
|
||||
body = urllib.urlencode({
|
||||
body = urllib.parse.urlencode({
|
||||
'assertion': assertion,
|
||||
'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
|
||||
})
|
||||
@ -1363,6 +1467,8 @@ class SignedJwtAssertionCredentials(AssertionCredentials):
|
||||
|
||||
# Keep base64 encoded so it can be stored in JSON.
|
||||
self.private_key = base64.b64encode(private_key)
|
||||
if isinstance(self.private_key, six.text_type):
|
||||
self.private_key = self.private_key.encode('utf-8')
|
||||
|
||||
self.private_key_password = private_key_password
|
||||
self.service_account_name = service_account_name
|
||||
@ -1386,7 +1492,7 @@ class SignedJwtAssertionCredentials(AssertionCredentials):
|
||||
|
||||
def _generate_assertion(self):
|
||||
"""Generate the assertion that will be used in the request."""
|
||||
now = long(time.time())
|
||||
now = int(time.time())
|
||||
payload = {
|
||||
'aud': self.token_uri,
|
||||
'scope': self.scope,
|
||||
@ -1435,7 +1541,7 @@ def verify_id_token(id_token, audience, http=None,
|
||||
resp, content = http.request(cert_uri)
|
||||
|
||||
if resp.status == 200:
|
||||
certs = json.loads(content)
|
||||
certs = json.loads(content.decode('utf-8'))
|
||||
return crypt.verify_signed_jwt_with_certs(id_token, certs, audience)
|
||||
else:
|
||||
raise VerifyJwtTokenError('Status code: %d' % resp.status)
|
||||
@ -1443,8 +1549,9 @@ def verify_id_token(id_token, audience, http=None,
|
||||
|
||||
def _urlsafe_b64decode(b64string):
|
||||
# Guard against unicode strings, which base64 can't handle.
|
||||
b64string = b64string.encode('ascii')
|
||||
padded = b64string + '=' * (4 - len(b64string) % 4)
|
||||
if isinstance(b64string, six.text_type):
|
||||
b64string = b64string.encode('ascii')
|
||||
padded = b64string + b'=' * (4 - len(b64string) % 4)
|
||||
return base64.urlsafe_b64decode(padded)
|
||||
|
||||
|
||||
@ -1454,18 +1561,21 @@ def _extract_id_token(id_token):
|
||||
Does the extraction w/o checking the signature.
|
||||
|
||||
Args:
|
||||
id_token: string, OAuth 2.0 id_token.
|
||||
id_token: string or bytestring, OAuth 2.0 id_token.
|
||||
|
||||
Returns:
|
||||
object, The deserialized JSON payload.
|
||||
"""
|
||||
segments = id_token.split('.')
|
||||
if type(id_token) == bytes:
|
||||
segments = id_token.split(b'.')
|
||||
else:
|
||||
segments = id_token.split(u'.')
|
||||
|
||||
if len(segments) != 3:
|
||||
raise VerifyJwtTokenError(
|
||||
'Wrong number of segments in token: %s' % id_token)
|
||||
|
||||
return json.loads(_urlsafe_b64decode(segments[1]))
|
||||
return json.loads(_urlsafe_b64decode(segments[1]).decode('utf-8'))
|
||||
|
||||
|
||||
def _parse_exchange_token_response(content):
|
||||
@ -1483,11 +1593,12 @@ def _parse_exchange_token_response(content):
|
||||
"""
|
||||
resp = {}
|
||||
try:
|
||||
resp = json.loads(content)
|
||||
except StandardError:
|
||||
resp = json.loads(content.decode('utf-8'))
|
||||
except Exception:
|
||||
# different JSON libs raise different exceptions,
|
||||
# so we just do a catch-all here
|
||||
resp = dict(urlparse.parse_qsl(content))
|
||||
content = content.decode('utf-8')
|
||||
resp = dict(urllib.parse.parse_qsl(content))
|
||||
|
||||
# some providers respond with 'expires', others with 'expires_in'
|
||||
if resp and 'expires' in resp:
|
||||
@ -1509,7 +1620,7 @@ def credentials_from_code(client_id, client_secret, scope, code,
|
||||
client_id: string, client identifier.
|
||||
client_secret: string, client secret.
|
||||
scope: string or iterable of strings, scope(s) to request.
|
||||
code: string, An authroization code, most likely passed down from
|
||||
code: string, An authorization code, most likely passed down from
|
||||
the client
|
||||
redirect_uri: string, this is generally set to 'postmessage' to match the
|
||||
redirect_uri that the client specified
|
||||
@ -1593,8 +1704,8 @@ class DeviceFlowInfo(collections.namedtuple('DeviceFlowInfo', (
|
||||
def FromResponse(cls, response):
|
||||
"""Create a DeviceFlowInfo from a server response.
|
||||
|
||||
The response should be a dict containing entries as described
|
||||
here:
|
||||
The response should be a dict containing entries as described here:
|
||||
|
||||
http://tools.ietf.org/html/draft-ietf-oauth-v2-05#section-3.7.1
|
||||
"""
|
||||
# device_code, user_code, and verification_url are required.
|
||||
@ -1726,7 +1837,7 @@ class OAuth2WebServerFlow(Flow):
|
||||
if self.device_uri is None:
|
||||
raise ValueError('The value of device_uri must not be None.')
|
||||
|
||||
body = urllib.urlencode({
|
||||
body = urllib.parse.urlencode({
|
||||
'client_id': self.client_id,
|
||||
'scope': self.scope,
|
||||
})
|
||||
@ -1767,10 +1878,10 @@ class OAuth2WebServerFlow(Flow):
|
||||
|
||||
Args:
|
||||
|
||||
code: string, dict or None. For a non-device flow, this is
|
||||
either the response code as a string, or a dictionary of
|
||||
query parameters to the redirect_uri. For a device flow,
|
||||
this should be None.
|
||||
code: string, a dict-like object, or None. For a non-device
|
||||
flow, this is either the response code as a string, or a
|
||||
dictionary of query parameters to the redirect_uri. For a
|
||||
device flow, this should be None.
|
||||
http: httplib2.Http, optional http instance to use when fetching
|
||||
credentials.
|
||||
device_flow_info: DeviceFlowInfo, return value from step1 in the
|
||||
@ -1780,7 +1891,7 @@ class OAuth2WebServerFlow(Flow):
|
||||
An OAuth2Credentials object that can be used to authorize requests.
|
||||
|
||||
Raises:
|
||||
FlowExchangeError: if a problem occured exchanging the code for a
|
||||
FlowExchangeError: if a problem occurred exchanging the code for a
|
||||
refresh_token.
|
||||
ValueError: if code and device_flow_info are both provided or both
|
||||
missing.
|
||||
@ -1793,7 +1904,7 @@ class OAuth2WebServerFlow(Flow):
|
||||
|
||||
if code is None:
|
||||
code = device_flow_info.device_code
|
||||
elif isinstance(code, dict):
|
||||
elif not isinstance(code, six.string_types):
|
||||
if 'code' not in code:
|
||||
raise FlowExchangeError(code.get(
|
||||
'error', 'No code was supplied in the query parameters.'))
|
||||
@ -1803,14 +1914,14 @@ class OAuth2WebServerFlow(Flow):
|
||||
'client_id': self.client_id,
|
||||
'client_secret': self.client_secret,
|
||||
'code': code,
|
||||
# 'scope': self.scope,
|
||||
'scope': self.scope,
|
||||
}
|
||||
if device_flow_info is not None:
|
||||
post_data['grant_type'] = 'http://oauth.net/grant_type/device/1.0'
|
||||
else:
|
||||
post_data['grant_type'] = 'authorization_code'
|
||||
post_data['redirect_uri'] = self.redirect_uri
|
||||
body = urllib.urlencode(post_data)
|
||||
body = urllib.parse.urlencode(post_data)
|
||||
headers = {
|
||||
'content-type': 'application/x-www-form-urlencoded',
|
||||
}
|
||||
@ -1836,21 +1947,22 @@ class OAuth2WebServerFlow(Flow):
|
||||
token_expiry = datetime.datetime.utcnow() + datetime.timedelta(
|
||||
seconds=int(d['expires_in']))
|
||||
|
||||
extracted_id_token = None
|
||||
if 'id_token' in d:
|
||||
d['id_token'] = _extract_id_token(d['id_token'])
|
||||
extracted_id_token = _extract_id_token(d['id_token'])
|
||||
|
||||
logger.info('Successfully retrieved access token')
|
||||
return OAuth2Credentials(access_token, self.client_id,
|
||||
self.client_secret, refresh_token, token_expiry,
|
||||
self.token_uri, self.user_agent,
|
||||
revoke_uri=self.revoke_uri,
|
||||
id_token=d.get('id_token', None),
|
||||
id_token=extracted_id_token,
|
||||
token_response=d)
|
||||
else:
|
||||
logger.info('Failed to retrieve access token: %s', content)
|
||||
if 'error' in d:
|
||||
# you never know what those providers got to say
|
||||
error_msg = unicode(d['error'])
|
||||
error_msg = str(d['error']) + str(d.get('error_description', ''))
|
||||
else:
|
||||
error_msg = 'Invalid response: %s.' % str(resp.status)
|
||||
raise FlowExchangeError(error_msg)
|
||||
|
@ -21,6 +21,7 @@ an OAuth 2.0 protected service.
|
||||
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
|
||||
|
||||
import json
|
||||
import six
|
||||
|
||||
|
||||
# Properties that make a client_secrets.json file valid.
|
||||
@ -68,11 +69,21 @@ class InvalidClientSecretsError(Error):
|
||||
|
||||
|
||||
def _validate_clientsecrets(obj):
|
||||
if obj is None or len(obj) != 1:
|
||||
raise InvalidClientSecretsError('Invalid file format.')
|
||||
client_type = obj.keys()[0]
|
||||
if client_type not in VALID_CLIENT.keys():
|
||||
raise InvalidClientSecretsError('Unknown client type: %s.' % client_type)
|
||||
_INVALID_FILE_FORMAT_MSG = (
|
||||
'Invalid file format. See '
|
||||
'https://developers.google.com/api-client-library/'
|
||||
'python/guide/aaa_client_secrets')
|
||||
|
||||
if obj is None:
|
||||
raise InvalidClientSecretsError(_INVALID_FILE_FORMAT_MSG)
|
||||
if len(obj) != 1:
|
||||
raise InvalidClientSecretsError(
|
||||
_INVALID_FILE_FORMAT_MSG + ' '
|
||||
'Expected a JSON object with a single property for a "web" or '
|
||||
'"installed" application')
|
||||
client_type = tuple(obj)[0]
|
||||
if client_type not in VALID_CLIENT:
|
||||
raise InvalidClientSecretsError('Unknown client type: %s.' % (client_type,))
|
||||
client_info = obj[client_type]
|
||||
for prop_name in VALID_CLIENT[client_type]['required']:
|
||||
if prop_name not in client_info:
|
||||
@ -98,11 +109,8 @@ def loads(s):
|
||||
|
||||
def _loadfile(filename):
|
||||
try:
|
||||
fp = file(filename, 'r')
|
||||
try:
|
||||
with open(filename, 'r') as fp:
|
||||
obj = json.load(fp)
|
||||
finally:
|
||||
fp.close()
|
||||
except IOError:
|
||||
raise InvalidClientSecretsError('File not found: "%s"' % filename)
|
||||
return _validate_clientsecrets(obj)
|
||||
@ -114,10 +122,12 @@ def loadfile(filename, cache=None):
|
||||
Typical cache storage would be App Engine memcache service,
|
||||
but you can pass in any other cache client that implements
|
||||
these methods:
|
||||
- get(key, namespace=ns)
|
||||
- set(key, value, namespace=ns)
|
||||
|
||||
Usage:
|
||||
* ``get(key, namespace=ns)``
|
||||
* ``set(key, value, namespace=ns)``
|
||||
|
||||
Usage::
|
||||
|
||||
# without caching
|
||||
client_type, client_info = loadfile('secrets.json')
|
||||
# using App Engine memcache service
|
||||
@ -150,4 +160,4 @@ def loadfile(filename, cache=None):
|
||||
obj = {client_type: client_info}
|
||||
cache.set(filename, obj, namespace=_SECRET_NAMESPACE)
|
||||
|
||||
return obj.iteritems().next()
|
||||
return next(six.iteritems(obj))
|
||||
|
@ -18,8 +18,11 @@
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
|
||||
CLOCK_SKEW_SECS = 300 # 5 minutes in seconds
|
||||
AUTH_TOKEN_LIFETIME_SECS = 300 # 5 minutes in seconds
|
||||
@ -59,6 +62,8 @@ try:
|
||||
key that this object was constructed with.
|
||||
"""
|
||||
try:
|
||||
if isinstance(message, six.text_type):
|
||||
message = message.encode('utf-8')
|
||||
crypto.verify(self._pubkey, signature, message, 'sha256')
|
||||
return True
|
||||
except:
|
||||
@ -101,15 +106,17 @@ try:
|
||||
"""Signs a message.
|
||||
|
||||
Args:
|
||||
message: string, Message to be signed.
|
||||
message: bytes, Message to be signed.
|
||||
|
||||
Returns:
|
||||
string, The signature of the message for the given key.
|
||||
"""
|
||||
if isinstance(message, six.text_type):
|
||||
message = message.encode('utf-8')
|
||||
return crypto.sign(self._key, message, 'sha256')
|
||||
|
||||
@staticmethod
|
||||
def from_string(key, password='notasecret'):
|
||||
def from_string(key, password=b'notasecret'):
|
||||
"""Construct a Signer instance from a string.
|
||||
|
||||
Args:
|
||||
@ -126,12 +133,34 @@ try:
|
||||
if parsed_pem_key:
|
||||
pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key)
|
||||
else:
|
||||
pkey = crypto.load_pkcs12(key, password.encode('utf8')).get_privatekey()
|
||||
if isinstance(password, six.text_type):
|
||||
password = password.encode('utf-8')
|
||||
pkey = crypto.load_pkcs12(key, password).get_privatekey()
|
||||
return OpenSSLSigner(pkey)
|
||||
|
||||
|
||||
def pkcs12_key_as_pem(private_key_text, private_key_password):
|
||||
"""Convert the contents of a PKCS12 key to PEM using OpenSSL.
|
||||
|
||||
Args:
|
||||
private_key_text: String. Private key.
|
||||
private_key_password: String. Password for PKCS12.
|
||||
|
||||
Returns:
|
||||
String. PEM contents of ``private_key_text``.
|
||||
"""
|
||||
decoded_body = base64.b64decode(private_key_text)
|
||||
if isinstance(private_key_password, six.string_types):
|
||||
private_key_password = private_key_password.encode('ascii')
|
||||
|
||||
pkcs12 = crypto.load_pkcs12(decoded_body, private_key_password)
|
||||
return crypto.dump_privatekey(crypto.FILETYPE_PEM,
|
||||
pkcs12.get_privatekey())
|
||||
except ImportError:
|
||||
OpenSSLVerifier = None
|
||||
OpenSSLSigner = None
|
||||
def pkcs12_key_as_pem(*args, **kwargs):
|
||||
raise NotImplementedError('pkcs12_key_as_pem requires OpenSSL.')
|
||||
|
||||
|
||||
try:
|
||||
@ -182,8 +211,10 @@ try:
|
||||
Verifier instance.
|
||||
"""
|
||||
if is_x509_cert:
|
||||
pemLines = key_pem.replace(' ', '').split()
|
||||
certDer = _urlsafe_b64decode(''.join(pemLines[1:-1]))
|
||||
if isinstance(key_pem, six.text_type):
|
||||
key_pem = key_pem.encode('ascii')
|
||||
pemLines = key_pem.replace(b' ', b'').split()
|
||||
certDer = _urlsafe_b64decode(b''.join(pemLines[1:-1]))
|
||||
certSeq = DerSequence()
|
||||
certSeq.decode(certDer)
|
||||
tbsSeq = DerSequence()
|
||||
@ -214,6 +245,8 @@ try:
|
||||
Returns:
|
||||
string, The signature of the message for the given key.
|
||||
"""
|
||||
if isinstance(message, six.text_type):
|
||||
message = message.encode('utf-8')
|
||||
return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))
|
||||
|
||||
@staticmethod
|
||||
@ -269,19 +302,22 @@ def _parse_pem_key(raw_key_input):
|
||||
Returns:
|
||||
string, The actual key if the contents are from a PEM file, or else None.
|
||||
"""
|
||||
offset = raw_key_input.find('-----BEGIN ')
|
||||
offset = raw_key_input.find(b'-----BEGIN ')
|
||||
if offset != -1:
|
||||
return raw_key_input[offset:]
|
||||
|
||||
|
||||
def _urlsafe_b64encode(raw_bytes):
|
||||
return base64.urlsafe_b64encode(raw_bytes).rstrip('=')
|
||||
if isinstance(raw_bytes, six.text_type):
|
||||
raw_bytes = raw_bytes.encode('utf-8')
|
||||
return base64.urlsafe_b64encode(raw_bytes).decode('ascii').rstrip('=')
|
||||
|
||||
|
||||
def _urlsafe_b64decode(b64string):
|
||||
# Guard against unicode strings, which base64 can't handle.
|
||||
b64string = b64string.encode('ascii')
|
||||
padded = b64string + '=' * (4 - len(b64string) % 4)
|
||||
if isinstance(b64string, six.text_type):
|
||||
b64string = b64string.encode('ascii')
|
||||
padded = b64string + b'=' * (4 - len(b64string) % 4)
|
||||
return base64.urlsafe_b64decode(padded)
|
||||
|
||||
|
||||
@ -345,13 +381,13 @@ def verify_signed_jwt_with_certs(jwt, certs, audience):
|
||||
# Parse token.
|
||||
json_body = _urlsafe_b64decode(segments[1])
|
||||
try:
|
||||
parsed = json.loads(json_body)
|
||||
parsed = json.loads(json_body.decode('utf-8'))
|
||||
except:
|
||||
raise AppIdentityError('Can\'t parse token: %s' % json_body)
|
||||
|
||||
# Check signature.
|
||||
verified = False
|
||||
for _, pem in certs.items():
|
||||
for pem in certs.values():
|
||||
verifier = Verifier.from_string(pem, True)
|
||||
if verifier.verify(signed, signature):
|
||||
verified = True
|
||||
@ -366,7 +402,7 @@ def verify_signed_jwt_with_certs(jwt, certs, audience):
|
||||
earliest = iat - CLOCK_SKEW_SECS
|
||||
|
||||
# Check expiration timestamp.
|
||||
now = long(time.time())
|
||||
now = int(time.time())
|
||||
exp = parsed.get('exp')
|
||||
if exp is None:
|
||||
raise AppIdentityError('No exp field in token: %s' % json_body)
|
||||
|
136
oauth2client/devshell.py
Normal file
136
oauth2client/devshell.py
Normal file
@ -0,0 +1,136 @@
|
||||
# Copyright 2015 Google Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""OAuth 2.0 utitilies for Google Developer Shell environment."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from oauth2client import client
|
||||
|
||||
|
||||
DEVSHELL_ENV = 'DEVSHELL_CLIENT_PORT'
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Errors for this module."""
|
||||
pass
|
||||
|
||||
|
||||
class CommunicationError(Error):
|
||||
"""Errors for communication with the Developer Shell server."""
|
||||
|
||||
|
||||
class NoDevshellServer(Error):
|
||||
"""Error when no Developer Shell server can be contacted."""
|
||||
|
||||
|
||||
# The request for credential information to the Developer Shell client socket is
|
||||
# always an empty PBLite-formatted JSON object, so just define it as a constant.
|
||||
CREDENTIAL_INFO_REQUEST_JSON = '[]'
|
||||
|
||||
|
||||
class CredentialInfoResponse(object):
|
||||
"""Credential information response from Developer Shell server.
|
||||
|
||||
The credential information response from Developer Shell socket is a
|
||||
PBLite-formatted JSON array with fields encoded by their index in the array:
|
||||
* Index 0 - user email
|
||||
* Index 1 - default project ID. None if the project context is not known.
|
||||
* Index 2 - OAuth2 access token. None if there is no valid auth context.
|
||||
"""
|
||||
|
||||
def __init__(self, json_string):
|
||||
"""Initialize the response data from JSON PBLite array."""
|
||||
pbl = json.loads(json_string)
|
||||
if not isinstance(pbl, list):
|
||||
raise ValueError('Not a list: ' + str(pbl))
|
||||
pbl_len = len(pbl)
|
||||
self.user_email = pbl[0] if pbl_len > 0 else None
|
||||
self.project_id = pbl[1] if pbl_len > 1 else None
|
||||
self.access_token = pbl[2] if pbl_len > 2 else None
|
||||
|
||||
|
||||
def _SendRecv():
|
||||
"""Communicate with the Developer Shell server socket."""
|
||||
|
||||
port = int(os.getenv(DEVSHELL_ENV, 0))
|
||||
if port == 0:
|
||||
raise NoDevshellServer()
|
||||
|
||||
import socket
|
||||
|
||||
sock = socket.socket()
|
||||
sock.connect(('localhost', port))
|
||||
|
||||
data = CREDENTIAL_INFO_REQUEST_JSON
|
||||
msg = '%s\n%s' % (len(data), data)
|
||||
sock.sendall(msg.encode())
|
||||
|
||||
header = sock.recv(6).decode()
|
||||
if '\n' not in header:
|
||||
raise CommunicationError('saw no newline in the first 6 bytes')
|
||||
len_str, json_str = header.split('\n', 1)
|
||||
to_read = int(len_str) - len(json_str)
|
||||
if to_read > 0:
|
||||
json_str += sock.recv(to_read, socket.MSG_WAITALL).decode()
|
||||
|
||||
return CredentialInfoResponse(json_str)
|
||||
|
||||
|
||||
class DevshellCredentials(client.GoogleCredentials):
|
||||
"""Credentials object for Google Developer Shell environment.
|
||||
|
||||
This object will allow a Google Developer Shell session to identify its user
|
||||
to Google and other OAuth 2.0 servers that can verify assertions. It can be
|
||||
used for the purpose of accessing data stored under the user account.
|
||||
|
||||
This credential does not require a flow to instantiate because it represents
|
||||
a two legged flow, and therefore has all of the required information to
|
||||
generate and refresh its own access tokens.
|
||||
"""
|
||||
|
||||
def __init__(self, user_agent=None):
|
||||
super(DevshellCredentials, self).__init__(
|
||||
None, # access_token, initialized below
|
||||
None, # client_id
|
||||
None, # client_secret
|
||||
None, # refresh_token
|
||||
None, # token_expiry
|
||||
None, # token_uri
|
||||
user_agent)
|
||||
self._refresh(None)
|
||||
|
||||
def _refresh(self, http_request):
|
||||
self.devshell_response = _SendRecv()
|
||||
self.access_token = self.devshell_response.access_token
|
||||
|
||||
@property
|
||||
def user_email(self):
|
||||
return self.devshell_response.user_email
|
||||
|
||||
@property
|
||||
def project_id(self):
|
||||
return self.devshell_response.project_id
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_data):
|
||||
raise NotImplementedError(
|
||||
'Cannot load Developer Shell credentials from JSON.')
|
||||
|
||||
@property
|
||||
def serialization_data(self):
|
||||
raise NotImplementedError(
|
||||
'Cannot serialize Developer Shell credentials.')
|
||||
|
@ -39,7 +39,6 @@ class Storage(BaseStorage):
|
||||
self._lock = threading.Lock()
|
||||
|
||||
def _validate_file(self):
|
||||
return
|
||||
if os.path.islink(self._filename):
|
||||
raise CredentialsFileSymbolicLinkError(
|
||||
'File: %s is a symbolic link.' % self._filename)
|
||||
@ -91,7 +90,7 @@ class Storage(BaseStorage):
|
||||
simple version of "touch" to ensure the file has been created.
|
||||
"""
|
||||
if not os.path.exists(self._filename):
|
||||
old_umask = os.umask(0177)
|
||||
old_umask = os.umask(0o177)
|
||||
try:
|
||||
open(self._filename, 'a+b').close()
|
||||
finally:
|
||||
@ -109,7 +108,7 @@ class Storage(BaseStorage):
|
||||
|
||||
self._create_file_if_needed()
|
||||
self._validate_file()
|
||||
f = open(self._filename, 'wb')
|
||||
f = open(self._filename, 'w')
|
||||
f.write(credentials.to_json())
|
||||
f.close()
|
||||
|
||||
|
@ -21,7 +21,7 @@ __author__ = 'jcgregorio@google.com (Joe Gregorio)'
|
||||
|
||||
import json
|
||||
import logging
|
||||
import urllib
|
||||
from six.moves import urllib
|
||||
|
||||
from oauth2client import util
|
||||
from oauth2client.client import AccessTokenRefreshError
|
||||
@ -78,13 +78,13 @@ class AppAssertionCredentials(AssertionCredentials):
|
||||
Raises:
|
||||
AccessTokenRefreshError: When the refresh fails.
|
||||
"""
|
||||
query = '?scope=%s' % urllib.quote(self.scope, '')
|
||||
query = '?scope=%s' % urllib.parse.quote(self.scope, '')
|
||||
uri = META.replace('{?scope}', query)
|
||||
response, content = http_request(uri)
|
||||
if response.status == 200:
|
||||
try:
|
||||
d = json.loads(content)
|
||||
except StandardError as e:
|
||||
except Exception as e:
|
||||
raise AccessTokenRefreshError(str(e))
|
||||
self.access_token = d['accessToken']
|
||||
else:
|
||||
|
@ -17,17 +17,21 @@
|
||||
This module first tries to use fcntl locking to ensure serialized access
|
||||
to a file, then falls back on a lock file if that is unavialable.
|
||||
|
||||
Usage:
|
||||
Usage::
|
||||
|
||||
f = LockedFile('filename', 'r+b', 'rb')
|
||||
f.open_and_lock()
|
||||
if f.is_locked():
|
||||
print 'Acquired filename with r+b mode'
|
||||
print('Acquired filename with r+b mode')
|
||||
f.file_handle().write('locked data')
|
||||
else:
|
||||
print 'Aquired filename with rb mode'
|
||||
print('Acquired filename with rb mode')
|
||||
f.unlock_and_close()
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
__author__ = 'cache@google.com (David T McWherter)'
|
||||
|
||||
import errno
|
||||
@ -208,9 +212,9 @@ try:
|
||||
except IOError as e:
|
||||
# If not retrying, then just pass on the error.
|
||||
if timeout == 0:
|
||||
raise e
|
||||
raise
|
||||
if e.errno != errno.EACCES:
|
||||
raise e
|
||||
raise
|
||||
# We could not acquire the lock. Try again.
|
||||
if (time.time() - start_time) >= timeout:
|
||||
logger.warn('Could not lock %s in %s seconds',
|
||||
@ -287,7 +291,7 @@ try:
|
||||
return
|
||||
except pywintypes.error as e:
|
||||
if timeout == 0:
|
||||
raise e
|
||||
raise
|
||||
|
||||
# If the error is not that the file is already in use, raise.
|
||||
if e[0] != _Win32Opener.FILE_IN_USE_ERROR:
|
||||
|
@ -19,30 +19,34 @@ credentials can be stored in one file. That file supports locking
|
||||
both in a single process and across processes.
|
||||
|
||||
The credential themselves are keyed off of:
|
||||
|
||||
* client_id
|
||||
* user_agent
|
||||
* scope
|
||||
|
||||
The format of the stored data is like so:
|
||||
{
|
||||
'file_version': 1,
|
||||
'data': [
|
||||
{
|
||||
'key': {
|
||||
'clientId': '<client id>',
|
||||
'userAgent': '<user agent>',
|
||||
'scope': '<scope>'
|
||||
},
|
||||
'credential': {
|
||||
# JSON serialized Credentials.
|
||||
The format of the stored data is like so::
|
||||
|
||||
{
|
||||
'file_version': 1,
|
||||
'data': [
|
||||
{
|
||||
'key': {
|
||||
'clientId': '<client id>',
|
||||
'userAgent': '<user agent>',
|
||||
'scope': '<scope>'
|
||||
},
|
||||
'credential': {
|
||||
# JSON serialized Credentials.
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
__author__ = 'jbeda@google.com (Joe Beda)'
|
||||
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@ -62,12 +66,10 @@ _multistores_lock = threading.Lock()
|
||||
|
||||
class Error(Exception):
|
||||
"""Base error for this module."""
|
||||
pass
|
||||
|
||||
|
||||
class NewerCredentialStoreError(Error):
|
||||
"""The credential store is a newer version that supported."""
|
||||
pass
|
||||
"""The credential store is a newer version than supported."""
|
||||
|
||||
|
||||
@util.positional(4)
|
||||
@ -191,7 +193,7 @@ class _MultiStore(object):
|
||||
|
||||
This will create the file if necessary.
|
||||
"""
|
||||
self._file = LockedFile(filename, 'r+b', 'rb')
|
||||
self._file = LockedFile(filename, 'r+', 'r')
|
||||
self._thread_lock = threading.Lock()
|
||||
self._read_only = False
|
||||
self._warn_on_readonly = warn_on_readonly
|
||||
@ -269,7 +271,7 @@ class _MultiStore(object):
|
||||
simple version of "touch" to ensure the file has been created.
|
||||
"""
|
||||
if not os.path.exists(self._file.filename()):
|
||||
old_umask = os.umask(0177)
|
||||
old_umask = os.umask(0o177)
|
||||
try:
|
||||
open(self._file.filename(), 'a+b').close()
|
||||
finally:
|
||||
@ -278,7 +280,17 @@ class _MultiStore(object):
|
||||
def _lock(self):
|
||||
"""Lock the entire multistore."""
|
||||
self._thread_lock.acquire()
|
||||
self._file.open_and_lock()
|
||||
try:
|
||||
self._file.open_and_lock()
|
||||
except IOError as e:
|
||||
if e.errno == errno.ENOSYS:
|
||||
logger.warn('File system does not support locking the credentials '
|
||||
'file.')
|
||||
elif e.errno == errno.ENOLCK:
|
||||
logger.warn('File system is out of resources for writing the '
|
||||
'credentials file (is your disk full?).')
|
||||
else:
|
||||
raise
|
||||
if not self._file.is_locked():
|
||||
self._read_only = True
|
||||
if self._warn_on_readonly:
|
||||
|
@ -15,6 +15,7 @@
|
||||
"""This module holds the old run() function which is deprecated, the
|
||||
tools.run_flow() function should be used in its place."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import socket
|
||||
@ -22,11 +23,12 @@ import sys
|
||||
import webbrowser
|
||||
|
||||
import gflags
|
||||
from six.moves import input
|
||||
|
||||
from oauth2client import client
|
||||
from oauth2client import util
|
||||
from tools import ClientRedirectHandler
|
||||
from tools import ClientRedirectServer
|
||||
from oauth2client.tools import ClientRedirectHandler
|
||||
from oauth2client.tools import ClientRedirectServer
|
||||
|
||||
|
||||
FLAGS = gflags.FLAGS
|
||||
@ -48,39 +50,38 @@ gflags.DEFINE_multi_int('auth_host_port', [8080, 8090],
|
||||
def run(flow, storage, http=None):
|
||||
"""Core code for a command-line application.
|
||||
|
||||
The run() function is called from your application and runs through all
|
||||
the steps to obtain credentials. It takes a Flow argument and attempts to
|
||||
open an authorization server page in the user's default web browser. The
|
||||
server asks the user to grant your application access to the user's data.
|
||||
If the user grants access, the run() function returns new credentials. The
|
||||
new credentials are also stored in the Storage argument, which updates the
|
||||
file associated with the Storage object.
|
||||
The ``run()`` function is called from your application and runs
|
||||
through all the steps to obtain credentials. It takes a ``Flow``
|
||||
argument and attempts to open an authorization server page in the
|
||||
user's default web browser. The server asks the user to grant your
|
||||
application access to the user's data. If the user grants access,
|
||||
the ``run()`` function returns new credentials. The new credentials
|
||||
are also stored in the ``storage`` argument, which updates the file
|
||||
associated with the ``Storage`` object.
|
||||
|
||||
It presumes it is run from a command-line application and supports the
|
||||
following flags:
|
||||
|
||||
--auth_host_name: Host name to use when running a local web server
|
||||
to handle redirects during OAuth authorization.
|
||||
(default: 'localhost')
|
||||
``--auth_host_name`` (string, default: ``localhost``)
|
||||
Host name to use when running a local web server to handle
|
||||
redirects during OAuth authorization.
|
||||
|
||||
--auth_host_port: Port to use when running a local web server to handle
|
||||
redirects during OAuth authorization.;
|
||||
repeat this option to specify a list of values
|
||||
(default: '[8080, 8090]')
|
||||
(an integer)
|
||||
``--auth_host_port`` (integer, default: ``[8080, 8090]``)
|
||||
Port to use when running a local web server to handle redirects
|
||||
during OAuth authorization. Repeat this option to specify a list
|
||||
of values.
|
||||
|
||||
--[no]auth_local_webserver: Run a local web server to handle redirects
|
||||
during OAuth authorization.
|
||||
(default: 'true')
|
||||
``--[no]auth_local_webserver`` (boolean, default: ``True``)
|
||||
Run a local web server to handle redirects during OAuth authorization.
|
||||
|
||||
Since it uses flags make sure to initialize the gflags module before
|
||||
calling run().
|
||||
Since it uses flags make sure to initialize the ``gflags`` module before
|
||||
calling ``run()``.
|
||||
|
||||
Args:
|
||||
flow: Flow, an OAuth 2.0 Flow to step through.
|
||||
storage: Storage, a Storage to store the credential in.
|
||||
http: An instance of httplib2.Http.request
|
||||
or something that acts like it.
|
||||
storage: Storage, a ``Storage`` to store the credential in.
|
||||
http: An instance of ``httplib2.Http.request`` or something that acts
|
||||
like it.
|
||||
|
||||
Returns:
|
||||
Credentials, the obtained credential.
|
||||
@ -103,13 +104,13 @@ def run(flow, storage, http=None):
|
||||
break
|
||||
FLAGS.auth_local_webserver = success
|
||||
if not success:
|
||||
print 'Failed to start a local webserver listening on either port 8080'
|
||||
print 'or port 9090. Please check your firewall settings and locally'
|
||||
print 'running programs that may be blocking or using those ports.'
|
||||
print
|
||||
print 'Falling back to --noauth_local_webserver and continuing with',
|
||||
print 'authorization.'
|
||||
print
|
||||
print('Failed to start a local webserver listening on either port 8080')
|
||||
print('or port 9090. Please check your firewall settings and locally')
|
||||
print('running programs that may be blocking or using those ports.')
|
||||
print()
|
||||
print('Falling back to --noauth_local_webserver and continuing with')
|
||||
print('authorization.')
|
||||
print()
|
||||
|
||||
if FLAGS.auth_local_webserver:
|
||||
oauth_callback = 'http://%s:%s/' % (FLAGS.auth_host_name, port_number)
|
||||
@ -120,20 +121,20 @@ def run(flow, storage, http=None):
|
||||
|
||||
if FLAGS.auth_local_webserver:
|
||||
webbrowser.open(authorize_url, new=1, autoraise=True)
|
||||
print 'Your browser has been opened to visit:'
|
||||
print
|
||||
print ' ' + authorize_url
|
||||
print
|
||||
print 'If your browser is on a different machine then exit and re-run'
|
||||
print 'this application with the command-line parameter '
|
||||
print
|
||||
print ' --noauth_local_webserver'
|
||||
print
|
||||
print('Your browser has been opened to visit:')
|
||||
print()
|
||||
print(' ' + authorize_url)
|
||||
print()
|
||||
print('If your browser is on a different machine then exit and re-run')
|
||||
print('this application with the command-line parameter ')
|
||||
print()
|
||||
print(' --noauth_local_webserver')
|
||||
print()
|
||||
else:
|
||||
print 'Go to the following link in your browser:'
|
||||
print
|
||||
print ' ' + authorize_url
|
||||
print
|
||||
print('Go to the following link in your browser:')
|
||||
print()
|
||||
print(' ' + authorize_url)
|
||||
print()
|
||||
|
||||
code = None
|
||||
if FLAGS.auth_local_webserver:
|
||||
@ -143,10 +144,10 @@ def run(flow, storage, http=None):
|
||||
if 'code' in httpd.query_params:
|
||||
code = httpd.query_params['code']
|
||||
else:
|
||||
print 'Failed to find "code" in the query parameters of the redirect.'
|
||||
print('Failed to find "code" in the query parameters of the redirect.')
|
||||
sys.exit('Try running with --noauth_local_webserver.')
|
||||
else:
|
||||
code = raw_input('Enter verification code: ').strip()
|
||||
code = input('Enter verification code: ').strip()
|
||||
|
||||
try:
|
||||
credential = flow.step2_exchange(code, http=http)
|
||||
@ -155,6 +156,6 @@ def run(flow, storage, http=None):
|
||||
|
||||
storage.put(credential)
|
||||
credential.set_store(storage)
|
||||
print 'Authentication successful.'
|
||||
print('Authentication successful.')
|
||||
|
||||
return credential
|
||||
|
@ -19,6 +19,7 @@ This credentials class is implemented on top of rsa library.
|
||||
|
||||
import base64
|
||||
import json
|
||||
import six
|
||||
import time
|
||||
|
||||
from pyasn1.codec.ber import decoder
|
||||
@ -64,7 +65,7 @@ class _ServiceAccountCredentials(AssertionCredentials):
|
||||
'kid': self._private_key_id
|
||||
}
|
||||
|
||||
now = long(time.time())
|
||||
now = int(time.time())
|
||||
payload = {
|
||||
'aud': self._token_uri,
|
||||
'scope': self._scopes,
|
||||
@ -74,17 +75,21 @@ class _ServiceAccountCredentials(AssertionCredentials):
|
||||
}
|
||||
payload.update(self._kwargs)
|
||||
|
||||
assertion_input = '%s.%s' % (
|
||||
_urlsafe_b64encode(header),
|
||||
_urlsafe_b64encode(payload))
|
||||
assertion_input = (_urlsafe_b64encode(header) + b'.' +
|
||||
_urlsafe_b64encode(payload))
|
||||
|
||||
# Sign the assertion.
|
||||
signature = base64.urlsafe_b64encode(rsa.pkcs1.sign(
|
||||
assertion_input, self._private_key, 'SHA-256')).rstrip('=')
|
||||
rsa_bytes = rsa.pkcs1.sign(assertion_input, self._private_key, 'SHA-256')
|
||||
signature = base64.urlsafe_b64encode(rsa_bytes).rstrip(b'=')
|
||||
|
||||
return '%s.%s' % (assertion_input, signature)
|
||||
return assertion_input + b'.' + signature
|
||||
|
||||
def sign_blob(self, blob):
|
||||
# Ensure that it is bytes
|
||||
try:
|
||||
blob = blob.encode('utf-8')
|
||||
except AttributeError:
|
||||
pass
|
||||
return (self._private_key_id,
|
||||
rsa.pkcs1.sign(blob, self._private_key, 'SHA-256'))
|
||||
|
||||
@ -119,12 +124,14 @@ class _ServiceAccountCredentials(AssertionCredentials):
|
||||
|
||||
def _urlsafe_b64encode(data):
|
||||
return base64.urlsafe_b64encode(
|
||||
json.dumps(data, separators=(',', ':')).encode('UTF-8')).rstrip('=')
|
||||
json.dumps(data, separators=(',', ':')).encode('UTF-8')).rstrip(b'=')
|
||||
|
||||
|
||||
def _get_private_key(private_key_pkcs8_text):
|
||||
"""Get an RSA private key object from a pkcs8 representation."""
|
||||
|
||||
if not isinstance(private_key_pkcs8_text, six.binary_type):
|
||||
private_key_pkcs8_text = private_key_pkcs8_text.encode('ascii')
|
||||
der = rsa.pem.load_pem(private_key_pkcs8_text, 'PRIVATE KEY')
|
||||
asn1_private_key, _ = decoder.decode(der, asn1Spec=PrivateKeyInfo())
|
||||
return rsa.PrivateKey.load_pkcs1(
|
||||
|
@ -19,21 +19,23 @@ generated credentials in a common file that is used by other example apps in
|
||||
the same directory.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
|
||||
__all__ = ['argparser', 'run_flow', 'run', 'message_if_missing']
|
||||
|
||||
|
||||
#import argparse
|
||||
import BaseHTTPServer
|
||||
import logging
|
||||
import socket
|
||||
import sys
|
||||
import urlparse
|
||||
import webbrowser
|
||||
|
||||
from six.moves import BaseHTTPServer
|
||||
from six.moves import urllib
|
||||
from six.moves import input
|
||||
|
||||
from oauth2client import client
|
||||
from oauth2client import util
|
||||
|
||||
|
||||
_CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0
|
||||
|
||||
To make this sample run you will need to populate the client_secrets.json file
|
||||
@ -45,20 +47,27 @@ with information from the APIs Console <https://code.google.com/apis/console>.
|
||||
|
||||
"""
|
||||
|
||||
def _CreateArgumentParser():
|
||||
try:
|
||||
import argparse
|
||||
except ImportError:
|
||||
return None
|
||||
parser = argparse.ArgumentParser(add_help=False)
|
||||
parser.add_argument('--auth_host_name', default='localhost',
|
||||
help='Hostname when running a local web server.')
|
||||
parser.add_argument('--noauth_local_webserver', action='store_true',
|
||||
default=False, help='Do not run a local web server.')
|
||||
parser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
|
||||
nargs='*', help='Port web server should listen on.')
|
||||
parser.add_argument('--logging_level', default='ERROR',
|
||||
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
|
||||
help='Set the logging level of detail.')
|
||||
return parser
|
||||
|
||||
# argparser is an ArgumentParser that contains command-line options expected
|
||||
# by tools.run(). Pass it in as part of the 'parents' argument to your own
|
||||
# ArgumentParser.
|
||||
#argparser = argparse.ArgumentParser(add_help=False)
|
||||
#argparser.add_argument('--auth_host_name', default='localhost',
|
||||
# help='Hostname when running a local web server.')
|
||||
#argparser.add_argument('--noauth_local_webserver', action='store_true',
|
||||
# default=False, help='Do not run a local web server.')
|
||||
#argparser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
|
||||
# nargs='*', help='Port web server should listen on.')
|
||||
#argparser.add_argument('--logging_level', default='ERROR',
|
||||
# choices=['DEBUG', 'INFO', 'WARNING', 'ERROR',
|
||||
# 'CRITICAL'],
|
||||
# help='Set the logging level of detail.')
|
||||
argparser = _CreateArgumentParser()
|
||||
|
||||
|
||||
class ClientRedirectServer(BaseHTTPServer.HTTPServer):
|
||||
@ -88,11 +97,11 @@ class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
self.send_header("Content-type", "text/html")
|
||||
self.end_headers()
|
||||
query = self.path.split('?', 1)[-1]
|
||||
query = dict(urlparse.parse_qsl(query))
|
||||
query = dict(urllib.parse.parse_qsl(query))
|
||||
self.server.query_params = query
|
||||
self.wfile.write("<html><head><title>Authentication Status</title></head>")
|
||||
self.wfile.write("<body><p>The authentication flow has completed.</p>")
|
||||
self.wfile.write("</body></html>")
|
||||
self.wfile.write(b"<html><head><title>Authentication Status</title></head>")
|
||||
self.wfile.write(b"<body><p>The authentication flow has completed.</p>")
|
||||
self.wfile.write(b"</body></html>")
|
||||
|
||||
def log_message(self, format, *args):
|
||||
"""Do not log messages to stdout while running as command line program."""
|
||||
@ -102,46 +111,50 @@ class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
def run_flow(flow, storage, flags, http=None):
|
||||
"""Core code for a command-line application.
|
||||
|
||||
The run() function is called from your application and runs through all the
|
||||
steps to obtain credentials. It takes a Flow argument and attempts to open an
|
||||
authorization server page in the user's default web browser. The server asks
|
||||
the user to grant your application access to the user's data. If the user
|
||||
grants access, the run() function returns new credentials. The new credentials
|
||||
are also stored in the Storage argument, which updates the file associated
|
||||
with the Storage object.
|
||||
The ``run()`` function is called from your application and runs
|
||||
through all the steps to obtain credentials. It takes a ``Flow``
|
||||
argument and attempts to open an authorization server page in the
|
||||
user's default web browser. The server asks the user to grant your
|
||||
application access to the user's data. If the user grants access,
|
||||
the ``run()`` function returns new credentials. The new credentials
|
||||
are also stored in the ``storage`` argument, which updates the file
|
||||
associated with the ``Storage`` object.
|
||||
|
||||
It presumes it is run from a command-line application and supports the
|
||||
following flags:
|
||||
|
||||
--auth_host_name: Host name to use when running a local web server
|
||||
to handle redirects during OAuth authorization.
|
||||
(default: 'localhost')
|
||||
``--auth_host_name`` (string, default: ``localhost``)
|
||||
Host name to use when running a local web server to handle
|
||||
redirects during OAuth authorization.
|
||||
|
||||
--auth_host_port: Port to use when running a local web server to handle
|
||||
redirects during OAuth authorization.;
|
||||
repeat this option to specify a list of values
|
||||
(default: '[8080, 8090]')
|
||||
(an integer)
|
||||
``--auth_host_port`` (integer, default: ``[8080, 8090]``)
|
||||
Port to use when running a local web server to handle redirects
|
||||
during OAuth authorization. Repeat this option to specify a list
|
||||
of values.
|
||||
|
||||
--[no]auth_local_webserver: Run a local web server to handle redirects
|
||||
during OAuth authorization.
|
||||
(default: 'true')
|
||||
``--[no]auth_local_webserver`` (boolean, default: ``True``)
|
||||
Run a local web server to handle redirects during OAuth authorization.
|
||||
|
||||
The tools module defines an ArgumentParser the already contains the flag
|
||||
definitions that run() requires. You can pass that ArgumentParser to your
|
||||
ArgumentParser constructor:
|
||||
|
||||
parser = argparse.ArgumentParser(description=__doc__,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
parents=[tools.argparser])
|
||||
flags = parser.parse_args(argv)
|
||||
|
||||
|
||||
The tools module defines an ``ArgumentParser`` the already contains the flag
|
||||
definitions that ``run()`` requires. You can pass that ``ArgumentParser`` to your
|
||||
``ArgumentParser`` constructor::
|
||||
|
||||
parser = argparse.ArgumentParser(description=__doc__,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
parents=[tools.argparser])
|
||||
flags = parser.parse_args(argv)
|
||||
|
||||
Args:
|
||||
flow: Flow, an OAuth 2.0 Flow to step through.
|
||||
storage: Storage, a Storage to store the credential in.
|
||||
flags: argparse.ArgumentParser, the command-line flags.
|
||||
http: An instance of httplib2.Http.request
|
||||
or something that acts like it.
|
||||
storage: Storage, a ``Storage`` to store the credential in.
|
||||
flags: ``argparse.Namespace``, The command-line flags. This is the
|
||||
object returned from calling ``parse_args()`` on
|
||||
``argparse.ArgumentParser`` as described above.
|
||||
http: An instance of ``httplib2.Http.request`` or something that
|
||||
acts like it.
|
||||
|
||||
Returns:
|
||||
Credentials, the obtained credential.
|
||||
@ -155,20 +168,20 @@ def run_flow(flow, storage, flags, http=None):
|
||||
try:
|
||||
httpd = ClientRedirectServer((flags.auth_host_name, port),
|
||||
ClientRedirectHandler)
|
||||
except socket.error as e:
|
||||
except socket.error:
|
||||
pass
|
||||
else:
|
||||
success = True
|
||||
break
|
||||
flags.noauth_local_webserver = not success
|
||||
if not success:
|
||||
print 'Failed to start a local webserver listening on either port 8080'
|
||||
print 'or port 9090. Please check your firewall settings and locally'
|
||||
print 'running programs that may be blocking or using those ports.'
|
||||
print
|
||||
print 'Falling back to --noauth_local_webserver and continuing with',
|
||||
print 'authorization.'
|
||||
print
|
||||
print('Failed to start a local webserver listening on either port 8080')
|
||||
print('or port 9090. Please check your firewall settings and locally')
|
||||
print('running programs that may be blocking or using those ports.')
|
||||
print()
|
||||
print('Falling back to --noauth_local_webserver and continuing with')
|
||||
print('authorization.')
|
||||
print()
|
||||
|
||||
if not flags.noauth_local_webserver:
|
||||
oauth_callback = 'http://%s:%s/' % (flags.auth_host_name, port_number)
|
||||
@ -186,23 +199,22 @@ def run_flow(flow, storage, flags, http=None):
|
||||
authorize_url = url_result['id']
|
||||
except:
|
||||
pass
|
||||
|
||||
if not flags.noauth_local_webserver:
|
||||
import webbrowser
|
||||
webbrowser.open(authorize_url, new=1, autoraise=True)
|
||||
print 'Your browser has been opened to visit:'
|
||||
print
|
||||
print ' ' + authorize_url
|
||||
print
|
||||
print 'If your browser is on a different machine then exit and re-run this'
|
||||
print 'after creating a file called nobrowser.txt in the same path as GAM.'
|
||||
# print 'application with the command-line parameter '
|
||||
# print
|
||||
# print ' --noauth_local_webserver'
|
||||
# print
|
||||
print('Your browser has been opened to visit:')
|
||||
print()
|
||||
print(' ' + authorize_url)
|
||||
print()
|
||||
print('If your browser is on a different machine then exit and re-run this')
|
||||
print('after creating a file called nobrowser.txt in the same path as GAM.')
|
||||
print()
|
||||
else:
|
||||
print 'Go to the following link in your browser:'
|
||||
print
|
||||
print ' ' + authorize_url
|
||||
print
|
||||
print('Go to the following link in your browser:')
|
||||
print()
|
||||
print(' ' + authorize_url)
|
||||
print()
|
||||
|
||||
code = None
|
||||
if not flags.noauth_local_webserver:
|
||||
@ -212,10 +224,10 @@ def run_flow(flow, storage, flags, http=None):
|
||||
if 'code' in httpd.query_params:
|
||||
code = httpd.query_params['code']
|
||||
else:
|
||||
print 'Failed to find "code" in the query parameters of the redirect.'
|
||||
print('Failed to find "code" in the query parameters of the redirect.')
|
||||
sys.exit('Try running with --noauth_local_webserver.')
|
||||
else:
|
||||
code = raw_input('Enter verification code: ').strip()
|
||||
code = input('Enter verification code: ').strip()
|
||||
|
||||
try:
|
||||
credential = flow.step2_exchange(code, http=http)
|
||||
@ -224,7 +236,7 @@ def run_flow(flow, storage, flags, http=None):
|
||||
|
||||
storage.put(credential)
|
||||
credential.set_store(storage)
|
||||
print 'Authentication successful.'
|
||||
print('Authentication successful.')
|
||||
|
||||
return credential
|
||||
|
||||
|
@ -29,11 +29,15 @@ __all__ = [
|
||||
'POSITIONAL_IGNORE',
|
||||
]
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import logging
|
||||
import sys
|
||||
import types
|
||||
import urllib
|
||||
import urlparse
|
||||
|
||||
import six
|
||||
from six.moves import urllib
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -48,56 +52,58 @@ positional_parameters_enforcement = POSITIONAL_WARNING
|
||||
def positional(max_positional_args):
|
||||
"""A decorator to declare that only the first N arguments my be positional.
|
||||
|
||||
This decorator makes it easy to support Python 3 style key-word only
|
||||
parameters. For example, in Python 3 it is possible to write:
|
||||
This decorator makes it easy to support Python 3 style keyword-only
|
||||
parameters. For example, in Python 3 it is possible to write::
|
||||
|
||||
def fn(pos1, *, kwonly1=None, kwonly1=None):
|
||||
...
|
||||
|
||||
All named parameters after * must be a keyword:
|
||||
All named parameters after ``*`` must be a keyword::
|
||||
|
||||
fn(10, 'kw1', 'kw2') # Raises exception.
|
||||
fn(10, kwonly1='kw1') # Ok.
|
||||
|
||||
Example:
|
||||
To define a function like above, do:
|
||||
Example
|
||||
^^^^^^^
|
||||
|
||||
@positional(1)
|
||||
def fn(pos1, kwonly1=None, kwonly2=None):
|
||||
To define a function like above, do::
|
||||
|
||||
@positional(1)
|
||||
def fn(pos1, kwonly1=None, kwonly2=None):
|
||||
...
|
||||
|
||||
If no default value is provided to a keyword argument, it becomes a required
|
||||
keyword argument::
|
||||
|
||||
@positional(0)
|
||||
def fn(required_kw):
|
||||
...
|
||||
|
||||
This must be called with the keyword parameter::
|
||||
|
||||
fn() # Raises exception.
|
||||
fn(10) # Raises exception.
|
||||
fn(required_kw=10) # Ok.
|
||||
|
||||
When defining instance or class methods always remember to account for
|
||||
``self`` and ``cls``::
|
||||
|
||||
class MyClass(object):
|
||||
|
||||
@positional(2)
|
||||
def my_method(self, pos1, kwonly1=None):
|
||||
...
|
||||
|
||||
If no default value is provided to a keyword argument, it becomes a required
|
||||
keyword argument:
|
||||
|
||||
@positional(0)
|
||||
def fn(required_kw):
|
||||
@classmethod
|
||||
@positional(2)
|
||||
def my_method(cls, pos1, kwonly1=None):
|
||||
...
|
||||
|
||||
This must be called with the keyword parameter:
|
||||
|
||||
fn() # Raises exception.
|
||||
fn(10) # Raises exception.
|
||||
fn(required_kw=10) # Ok.
|
||||
|
||||
When defining instance or class methods always remember to account for
|
||||
'self' and 'cls':
|
||||
|
||||
class MyClass(object):
|
||||
|
||||
@positional(2)
|
||||
def my_method(self, pos1, kwonly1=None):
|
||||
...
|
||||
|
||||
@classmethod
|
||||
@positional(2)
|
||||
def my_method(cls, pos1, kwonly1=None):
|
||||
...
|
||||
|
||||
The positional decorator behavior is controlled by
|
||||
util.positional_parameters_enforcement, which may be set to
|
||||
POSITIONAL_EXCEPTION, POSITIONAL_WARNING or POSITIONAL_IGNORE to raise an
|
||||
exception, log a warning, or do nothing, respectively, if a declaration is
|
||||
violated.
|
||||
``util.positional_parameters_enforcement``, which may be set to
|
||||
``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or
|
||||
``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do
|
||||
nothing, respectively, if a declaration is violated.
|
||||
|
||||
Args:
|
||||
max_positional_arguments: Maximum number of positional arguments. All
|
||||
@ -111,8 +117,10 @@ def positional(max_positional_args):
|
||||
TypeError if a key-word only argument is provided as a positional
|
||||
parameter, but only if util.positional_parameters_enforcement is set to
|
||||
POSITIONAL_EXCEPTION.
|
||||
|
||||
"""
|
||||
def positional_decorator(wrapped):
|
||||
@functools.wraps(wrapped)
|
||||
def positional_wrapper(*args, **kwargs):
|
||||
if len(args) > max_positional_args:
|
||||
plural_s = ''
|
||||
@ -129,7 +137,7 @@ def positional(max_positional_args):
|
||||
return wrapped(*args, **kwargs)
|
||||
return positional_wrapper
|
||||
|
||||
if isinstance(max_positional_args, (int, long)):
|
||||
if isinstance(max_positional_args, six.integer_types):
|
||||
return positional_decorator
|
||||
else:
|
||||
args, _, _, defaults = inspect.getargspec(max_positional_args)
|
||||
@ -149,7 +157,7 @@ def scopes_to_string(scopes):
|
||||
Returns:
|
||||
The scopes formatted as a single string.
|
||||
"""
|
||||
if isinstance(scopes, types.StringTypes):
|
||||
if isinstance(scopes, six.string_types):
|
||||
return scopes
|
||||
else:
|
||||
return ' '.join(scopes)
|
||||
@ -186,8 +194,8 @@ def _add_query_parameter(url, name, value):
|
||||
if value is None:
|
||||
return url
|
||||
else:
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
q = dict(urlparse.parse_qsl(parsed[4]))
|
||||
parsed = list(urllib.parse.urlparse(url))
|
||||
q = dict(urllib.parse.parse_qsl(parsed[4]))
|
||||
q[name] = value
|
||||
parsed[4] = urllib.urlencode(q)
|
||||
return urlparse.urlunparse(parsed)
|
||||
parsed[4] = urllib.parse.urlencode(q)
|
||||
return urllib.parse.urlunparse(parsed)
|
||||
|
@ -1,4 +1,3 @@
|
||||
#!/usr/bin/python2.5
|
||||
#
|
||||
# Copyright 2014 the Melange authors.
|
||||
#
|
||||
@ -26,15 +25,27 @@ import base64
|
||||
import hmac
|
||||
import time
|
||||
|
||||
import six
|
||||
from oauth2client import util
|
||||
|
||||
|
||||
# Delimiter character
|
||||
DELIMITER = ':'
|
||||
DELIMITER = b':'
|
||||
|
||||
|
||||
# 1 hour in seconds
|
||||
DEFAULT_TIMEOUT_SECS = 1*60*60
|
||||
|
||||
|
||||
def _force_bytes(s):
|
||||
if isinstance(s, bytes):
|
||||
return s
|
||||
s = str(s)
|
||||
if isinstance(s, six.text_type):
|
||||
return s.encode('utf-8')
|
||||
return s
|
||||
|
||||
|
||||
@util.positional(2)
|
||||
def generate_token(key, user_id, action_id="", when=None):
|
||||
"""Generates a URL-safe token for the given user, action, time tuple.
|
||||
@ -50,18 +61,16 @@ def generate_token(key, user_id, action_id="", when=None):
|
||||
Returns:
|
||||
A string XSRF protection token.
|
||||
"""
|
||||
when = when or int(time.time())
|
||||
digester = hmac.new(key)
|
||||
digester.update(str(user_id))
|
||||
when = _force_bytes(when or int(time.time()))
|
||||
digester = hmac.new(_force_bytes(key))
|
||||
digester.update(_force_bytes(user_id))
|
||||
digester.update(DELIMITER)
|
||||
digester.update(action_id)
|
||||
digester.update(_force_bytes(action_id))
|
||||
digester.update(DELIMITER)
|
||||
digester.update(str(when))
|
||||
digester.update(when)
|
||||
digest = digester.digest()
|
||||
|
||||
token = base64.urlsafe_b64encode('%s%s%d' % (digest,
|
||||
DELIMITER,
|
||||
when))
|
||||
token = base64.urlsafe_b64encode(digest + DELIMITER + when)
|
||||
return token
|
||||
|
||||
|
||||
@ -86,8 +95,8 @@ def validate_token(key, token, user_id, action_id="", current_time=None):
|
||||
if not token:
|
||||
return False
|
||||
try:
|
||||
decoded = base64.urlsafe_b64decode(str(token))
|
||||
token_time = long(decoded.split(DELIMITER)[-1])
|
||||
decoded = base64.urlsafe_b64decode(token)
|
||||
token_time = int(decoded.split(DELIMITER)[-1])
|
||||
except (TypeError, ValueError):
|
||||
return False
|
||||
if current_time is None:
|
||||
@ -104,9 +113,6 @@ def validate_token(key, token, user_id, action_id="", current_time=None):
|
||||
|
||||
# Perform constant time comparison to avoid timing attacks
|
||||
different = 0
|
||||
for x, y in zip(token, expected_token):
|
||||
different |= ord(x) ^ ord(y)
|
||||
if different:
|
||||
return False
|
||||
|
||||
return True
|
||||
for x, y in zip(bytearray(token), bytearray(expected_token)):
|
||||
different |= x ^ y
|
||||
return not different
|
||||
|
Loading…
x
Reference in New Issue
Block a user