googleapiclient 1.4 and oauth2client 1.4.7 upgrades

This commit is contained in:
Jay Lee 2015-04-15 11:47:26 -04:00
parent a2e8d17a69
commit db0dd231b1
24 changed files with 867 additions and 489 deletions

View File

@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
__version__ = "1.3.1" __version__ = "1.4.0"

View File

@ -55,12 +55,14 @@ Example of unsubscribing.
service.channels().stop(channel.body()) service.channels().stop(channel.body())
""" """
from __future__ import absolute_import
import datetime import datetime
import uuid import uuid
from googleapiclient import errors from googleapiclient import errors
from oauth2client import util from oauth2client import util
import six
# The unix time epoch starts at midnight 1970. # The unix time epoch starts at midnight 1970.
@ -88,7 +90,7 @@ X_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID'
def _upper_header_keys(headers): def _upper_header_keys(headers):
new_headers = {} new_headers = {}
for k, v in headers.iteritems(): for k, v in six.iteritems(headers):
new_headers[k.upper()] = v new_headers[k.upper()] = v
return new_headers return new_headers
@ -218,7 +220,7 @@ class Channel(object):
Args: Args:
resp: dict, The response from a watch() method. resp: dict, The response from a watch() method.
""" """
for json_name, param_name in CHANNEL_PARAMS.iteritems(): for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
value = resp.get(json_name) value = resp.get(json_name)
if value is not None: if value is not None:
setattr(self, param_name, value) setattr(self, param_name, value)

View File

@ -16,6 +16,9 @@
A client library for Google's discovery based APIs. A client library for Google's discovery based APIs.
""" """
from __future__ import absolute_import
import six
from six.moves import zip
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = [ __all__ = [
@ -25,9 +28,11 @@ __all__ = [
'key2param', 'key2param',
] ]
from six import StringIO
from six.moves.urllib.parse import urlencode, urlparse, urljoin, \
urlunparse, parse_qsl
# Standard library imports # Standard library imports
import StringIO
import copy import copy
from email.generator import Generator from email.generator import Generator
from email.mime.multipart import MIMEMultipart from email.mime.multipart import MIMEMultipart
@ -38,20 +43,13 @@ import logging
import mimetypes import mimetypes
import os import os
import re import re
import urllib
import urlparse
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
# Third-party imports # Third-party imports
import httplib2 import httplib2
import mimeparse
import uritemplate import uritemplate
# Local imports # Local imports
from googleapiclient import mimeparse
from googleapiclient.errors import HttpError from googleapiclient.errors import HttpError
from googleapiclient.errors import InvalidJsonError from googleapiclient.errors import InvalidJsonError
from googleapiclient.errors import MediaUploadSizeError from googleapiclient.errors import MediaUploadSizeError
@ -203,9 +201,14 @@ def build(serviceName,
if resp.status >= 400: if resp.status >= 400:
raise HttpError(resp, content, uri=requested_url) raise HttpError(resp, content, uri=requested_url)
try:
content = content.decode('utf-8')
except AttributeError:
pass
try: try:
service = json.loads(content) service = json.loads(content)
except ValueError, e: except ValueError as e:
logger.error('Failed to parse as JSON: ' + content) logger.error('Failed to parse as JSON: ' + content)
raise InvalidJsonError() raise InvalidJsonError()
@ -253,9 +256,9 @@ def build_from_document(
# future is no longer used. # future is no longer used.
future = {} future = {}
if isinstance(service, basestring): if isinstance(service, six.string_types):
service = json.loads(service) service = json.loads(service)
base = urlparse.urljoin(service['rootUrl'], service['servicePath']) base = urljoin(service['rootUrl'], service['servicePath'])
schema = Schemas(service) schema = Schemas(service)
if credentials: if credentials:
@ -271,7 +274,7 @@ def build_from_document(
credentials.create_scoped_required()): credentials.create_scoped_required()):
scopes = service.get('auth', {}).get('oauth2', {}).get('scopes', {}) scopes = service.get('auth', {}).get('oauth2', {}).get('scopes', {})
if scopes: if scopes:
credentials = credentials.create_scoped(scopes.keys()) credentials = credentials.create_scoped(list(scopes.keys()))
else: else:
# No need to authorize the http object # No need to authorize the http object
# if the service does not require authentication. # if the service does not require authentication.
@ -329,13 +332,13 @@ def _media_size_to_long(maxSize):
The size as an integer value. The size as an integer value.
""" """
if len(maxSize) < 2: if len(maxSize) < 2:
return 0L return 0
units = maxSize[-2:].upper() units = maxSize[-2:].upper()
bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units) bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
if bit_shift is not None: if bit_shift is not None:
return long(maxSize[:-2]) << bit_shift return int(maxSize[:-2]) << bit_shift
else: else:
return long(maxSize) return int(maxSize)
def _media_path_url_from_info(root_desc, path_url): def _media_path_url_from_info(root_desc, path_url):
@ -385,7 +388,7 @@ def _fix_up_parameters(method_desc, root_desc, http_method):
parameters = method_desc.setdefault('parameters', {}) parameters = method_desc.setdefault('parameters', {})
# Add in the parameters common to all methods. # Add in the parameters common to all methods.
for name, description in root_desc.get('parameters', {}).iteritems(): for name, description in six.iteritems(root_desc.get('parameters', {})):
parameters[name] = description parameters[name] = description
# Add in undocumented query parameters. # Add in undocumented query parameters.
@ -491,6 +494,23 @@ def _fix_up_method_description(method_desc, root_desc):
return path_url, http_method, method_id, accept, max_size, media_path_url return path_url, http_method, method_id, accept, max_size, media_path_url
def _urljoin(base, url):
"""Custom urljoin replacement supporting : before / in url."""
# In general, it's unsafe to simply join base and url. However, for
# the case of discovery documents, we know:
# * base will never contain params, query, or fragment
# * url will never contain a scheme or net_loc.
# In general, this means we can safely join on /; we just need to
# ensure we end up with precisely one / joining base and url. The
# exception here is the case of media uploads, where url will be an
# absolute url.
if url.startswith('http://') or url.startswith('https://'):
return urljoin(base, url)
new_base = base if base.endswith('/') else base + '/'
new_url = url[1:] if url.startswith('/') else url
return new_base + new_url
# TODO(dhermes): Convert this class to ResourceMethod and make it callable # TODO(dhermes): Convert this class to ResourceMethod and make it callable
class ResourceMethodParameters(object): class ResourceMethodParameters(object):
"""Represents the parameters associated with a method. """Represents the parameters associated with a method.
@ -551,7 +571,7 @@ class ResourceMethodParameters(object):
comes from the dictionary of methods stored in the 'methods' key in comes from the dictionary of methods stored in the 'methods' key in
the deserialized discovery document. the deserialized discovery document.
""" """
for arg, desc in method_desc.get('parameters', {}).iteritems(): for arg, desc in six.iteritems(method_desc.get('parameters', {})):
param = key2param(arg) param = key2param(arg)
self.argmap[param] = arg self.argmap[param] = arg
@ -599,12 +619,12 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
def method(self, **kwargs): def method(self, **kwargs):
# Don't bother with doc string, it will be over-written by createMethod. # Don't bother with doc string, it will be over-written by createMethod.
for name in kwargs.iterkeys(): for name in six.iterkeys(kwargs):
if name not in parameters.argmap: if name not in parameters.argmap:
raise TypeError('Got an unexpected keyword argument "%s"' % name) raise TypeError('Got an unexpected keyword argument "%s"' % name)
# Remove args that have a value of None. # Remove args that have a value of None.
keys = kwargs.keys() keys = list(kwargs.keys())
for name in keys: for name in keys:
if kwargs[name] is None: if kwargs[name] is None:
del kwargs[name] del kwargs[name]
@ -613,9 +633,9 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
if name not in kwargs: if name not in kwargs:
raise TypeError('Missing required parameter "%s"' % name) raise TypeError('Missing required parameter "%s"' % name)
for name, regex in parameters.pattern_params.iteritems(): for name, regex in six.iteritems(parameters.pattern_params):
if name in kwargs: if name in kwargs:
if isinstance(kwargs[name], basestring): if isinstance(kwargs[name], six.string_types):
pvalues = [kwargs[name]] pvalues = [kwargs[name]]
else: else:
pvalues = kwargs[name] pvalues = kwargs[name]
@ -625,13 +645,13 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
'Parameter "%s" value "%s" does not match the pattern "%s"' % 'Parameter "%s" value "%s" does not match the pattern "%s"' %
(name, pvalue, regex)) (name, pvalue, regex))
for name, enums in parameters.enum_params.iteritems(): for name, enums in six.iteritems(parameters.enum_params):
if name in kwargs: if name in kwargs:
# We need to handle the case of a repeated enum # We need to handle the case of a repeated enum
# name differently, since we want to handle both # name differently, since we want to handle both
# arg='value' and arg=['value1', 'value2'] # arg='value' and arg=['value1', 'value2']
if (name in parameters.repeated_params and if (name in parameters.repeated_params and
not isinstance(kwargs[name], basestring)): not isinstance(kwargs[name], six.string_types)):
values = kwargs[name] values = kwargs[name]
else: else:
values = [kwargs[name]] values = [kwargs[name]]
@ -643,7 +663,7 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
actual_query_params = {} actual_query_params = {}
actual_path_params = {} actual_path_params = {}
for key, value in kwargs.iteritems(): for key, value in six.iteritems(kwargs):
to_type = parameters.param_types.get(key, 'string') to_type = parameters.param_types.get(key, 'string')
# For repeated parameters we cast each member of the list. # For repeated parameters we cast each member of the list.
if key in parameters.repeated_params and type(value) == type([]): if key in parameters.repeated_params and type(value) == type([]):
@ -671,14 +691,14 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
actual_path_params, actual_query_params, body_value) actual_path_params, actual_query_params, body_value)
expanded_url = uritemplate.expand(pathUrl, params) expanded_url = uritemplate.expand(pathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query) url = _urljoin(self._baseUrl, expanded_url + query)
resumable = None resumable = None
multipart_boundary = '' multipart_boundary = ''
if media_filename: if media_filename:
# Ensure we end up with a valid MediaUpload object. # Ensure we end up with a valid MediaUpload object.
if isinstance(media_filename, basestring): if isinstance(media_filename, six.string_types):
(media_mime_type, encoding) = mimetypes.guess_type(media_filename) (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
if media_mime_type is None: if media_mime_type is None:
raise UnknownFileType(media_filename) raise UnknownFileType(media_filename)
@ -692,12 +712,12 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
raise TypeError('media_filename must be str or MediaUpload.') raise TypeError('media_filename must be str or MediaUpload.')
# Check the maxSize # Check the maxSize
if maxSize > 0 and media_upload.size() > maxSize: if media_upload.size() is not None and media_upload.size() > maxSize > 0:
raise MediaUploadSizeError("Media larger than: %s" % maxSize) raise MediaUploadSizeError("Media larger than: %s" % maxSize)
# Use the media path uri for media uploads # Use the media path uri for media uploads
expanded_url = uritemplate.expand(mediaPathUrl, params) expanded_url = uritemplate.expand(mediaPathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query) url = _urljoin(self._baseUrl, expanded_url + query)
if media_upload.resumable(): if media_upload.resumable():
url = _add_query_parameter(url, 'uploadType', 'resumable') url = _add_query_parameter(url, 'uploadType', 'resumable')
@ -732,7 +752,7 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
msgRoot.attach(msg) msgRoot.attach(msg)
# encode the body: note that we can't use `as_string`, because # encode the body: note that we can't use `as_string`, because
# it plays games with `From ` lines. # it plays games with `From ` lines.
fp = StringIO.StringIO() fp = StringIO()
g = Generator(fp, mangle_from_=False) g = Generator(fp, mangle_from_=False)
g.flatten(msgRoot, unixfrom=False) g.flatten(msgRoot, unixfrom=False)
body = fp.getvalue() body = fp.getvalue()
@ -757,10 +777,10 @@ def createMethod(methodName, methodDesc, rootDesc, schema):
docs.append('Args:\n') docs.append('Args:\n')
# Skip undocumented params and params common to all methods. # Skip undocumented params and params common to all methods.
skip_parameters = rootDesc.get('parameters', {}).keys() skip_parameters = list(rootDesc.get('parameters', {}).keys())
skip_parameters.extend(STACK_QUERY_PARAMETERS) skip_parameters.extend(STACK_QUERY_PARAMETERS)
all_args = parameters.argmap.keys() all_args = list(parameters.argmap.keys())
args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])] args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
# Move body to the front of the line. # Move body to the front of the line.
@ -839,14 +859,14 @@ Returns:
request = copy.copy(previous_request) request = copy.copy(previous_request)
pageToken = previous_response['nextPageToken'] pageToken = previous_response['nextPageToken']
parsed = list(urlparse.urlparse(request.uri)) parsed = list(urlparse(request.uri))
q = parse_qsl(parsed[4]) q = parse_qsl(parsed[4])
# Find and remove old 'pageToken' value from URI # Find and remove old 'pageToken' value from URI
newq = [(key, value) for (key, value) in q if key != 'pageToken'] newq = [(key, value) for (key, value) in q if key != 'pageToken']
newq.append(('pageToken', pageToken)) newq.append(('pageToken', pageToken))
parsed[4] = urllib.urlencode(newq) parsed[4] = urlencode(newq)
uri = urlparse.urlunparse(parsed) uri = urlunparse(parsed)
request.uri = uri request.uri = uri
@ -932,7 +952,7 @@ class Resource(object):
def _add_basic_methods(self, resourceDesc, rootDesc, schema): def _add_basic_methods(self, resourceDesc, rootDesc, schema):
# Add basic methods to Resource # Add basic methods to Resource
if 'methods' in resourceDesc: if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems(): for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
fixedMethodName, method = createMethod( fixedMethodName, method = createMethod(
methodName, methodDesc, rootDesc, schema) methodName, methodDesc, rootDesc, schema)
self._set_dynamic_attr(fixedMethodName, self._set_dynamic_attr(fixedMethodName,
@ -971,7 +991,7 @@ class Resource(object):
return (methodName, methodResource) return (methodName, methodResource)
for methodName, methodDesc in resourceDesc['resources'].iteritems(): for methodName, methodDesc in six.iteritems(resourceDesc['resources']):
fixedMethodName, method = createResourceMethod(methodName, methodDesc) fixedMethodName, method = createResourceMethod(methodName, methodDesc)
self._set_dynamic_attr(fixedMethodName, self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__)) method.__get__(self, self.__class__))
@ -981,7 +1001,7 @@ class Resource(object):
# Look for response bodies in schema that contain nextPageToken, and methods # Look for response bodies in schema that contain nextPageToken, and methods
# that take a pageToken parameter. # that take a pageToken parameter.
if 'methods' in resourceDesc: if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems(): for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
if 'response' in methodDesc: if 'response' in methodDesc:
responseSchema = methodDesc['response'] responseSchema = methodDesc['response']
if '$ref' in responseSchema: if '$ref' in responseSchema:

View File

@ -1,5 +1,3 @@
#!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved. # Copyright 2014 Google Inc. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
@ -19,6 +17,7 @@
All exceptions defined by the library All exceptions defined by the library
should be defined in this file. should be defined in this file.
""" """
from __future__ import absolute_import
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'

View File

@ -18,37 +18,41 @@ The classes implement a command pattern, with every
object supporting an execute() method that does the object supporting an execute() method that does the
actuall HTTP request. actuall HTTP request.
""" """
from __future__ import absolute_import
import six
from six.moves import range
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
import StringIO from six import BytesIO, StringIO
from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
import base64 import base64
import copy import copy
import gzip import gzip
import httplib2 import httplib2
import json import json
import logging import logging
import mimeparse
import mimetypes import mimetypes
import os import os
import random import random
import sys import sys
import time import time
import urllib
import urlparse
import uuid import uuid
from email.generator import Generator from email.generator import Generator
from email.mime.multipart import MIMEMultipart from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart from email.mime.nonmultipart import MIMENonMultipart
from email.parser import FeedParser from email.parser import FeedParser
from errors import BatchError
from errors import HttpError from googleapiclient import mimeparse
from errors import InvalidChunkSizeError from googleapiclient.errors import BatchError
from errors import ResumableUploadError from googleapiclient.errors import HttpError
from errors import UnexpectedBodyError from googleapiclient.errors import InvalidChunkSizeError
from errors import UnexpectedMethodError from googleapiclient.errors import ResumableUploadError
from model import JsonModel from googleapiclient.errors import UnexpectedBodyError
from googleapiclient.errors import UnexpectedMethodError
from googleapiclient.model import JsonModel
from oauth2client import util from oauth2client import util
@ -259,7 +263,7 @@ class MediaIoBaseUpload(MediaUpload):
Note that the Python file object is compatible with io.Base and can be used Note that the Python file object is compatible with io.Base and can be used
with this class also. with this class also.
fh = io.BytesIO('...Some data to upload...') fh = BytesIO('...Some data to upload...')
media = MediaIoBaseUpload(fh, mimetype='image/png', media = MediaIoBaseUpload(fh, mimetype='image/png',
chunksize=1024*1024, resumable=True) chunksize=1024*1024, resumable=True)
farm.animals().insert( farm.animals().insert(
@ -465,7 +469,7 @@ class MediaInMemoryUpload(MediaIoBaseUpload):
resumable: bool, True if this is a resumable upload. False means upload resumable: bool, True if this is a resumable upload. False means upload
in a single request. in a single request.
""" """
fd = StringIO.StringIO(body) fd = BytesIO(body)
super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize, super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
resumable=resumable) resumable=resumable)
@ -538,7 +542,7 @@ class MediaIoBaseDownload(object):
} }
http = self._request.http http = self._request.http
for retry_num in xrange(num_retries + 1): for retry_num in range(num_retries + 1):
if retry_num > 0: if retry_num > 0:
self._sleep(self._rand() * 2**retry_num) self._sleep(self._rand() * 2**retry_num)
logging.warning( logging.warning(
@ -559,6 +563,8 @@ class MediaIoBaseDownload(object):
content_range = resp['content-range'] content_range = resp['content-range']
length = content_range.rsplit('/', 1)[1] length = content_range.rsplit('/', 1)[1]
self._total_size = int(length) self._total_size = int(length)
elif 'content-length' in resp:
self._total_size = int(resp['content-length'])
if self._progress == self._total_size: if self._progress == self._total_size:
self._done = True self._done = True
@ -697,8 +703,8 @@ class HttpRequest(object):
self.method = 'POST' self.method = 'POST'
self.headers['x-http-method-override'] = 'GET' self.headers['x-http-method-override'] = 'GET'
self.headers['content-type'] = 'application/x-www-form-urlencoded' self.headers['content-type'] = 'application/x-www-form-urlencoded'
parsed = urlparse.urlparse(self.uri) parsed = urlparse(self.uri)
self.uri = urlparse.urlunparse( self.uri = urlunparse(
(parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
None) None)
) )
@ -706,7 +712,7 @@ class HttpRequest(object):
self.headers['content-length'] = str(len(self.body)) self.headers['content-length'] = str(len(self.body))
# Handle retries for server-side errors. # Handle retries for server-side errors.
for retry_num in xrange(num_retries + 1): for retry_num in range(num_retries + 1):
if retry_num > 0: if retry_num > 0:
self._sleep(self._rand() * 2**retry_num) self._sleep(self._rand() * 2**retry_num)
logging.warning('Retry #%d for request: %s %s, following status: %d' logging.warning('Retry #%d for request: %s %s, following status: %d'
@ -789,7 +795,7 @@ class HttpRequest(object):
start_headers['X-Upload-Content-Length'] = size start_headers['X-Upload-Content-Length'] = size
start_headers['content-length'] = str(self.body_size) start_headers['content-length'] = str(self.body_size)
for retry_num in xrange(num_retries + 1): for retry_num in range(num_retries + 1):
if retry_num > 0: if retry_num > 0:
self._sleep(self._rand() * 2**retry_num) self._sleep(self._rand() * 2**retry_num)
logging.warning( logging.warning(
@ -854,7 +860,7 @@ class HttpRequest(object):
'Content-Length': str(chunk_end - self.resumable_progress + 1) 'Content-Length': str(chunk_end - self.resumable_progress + 1)
} }
for retry_num in xrange(num_retries + 1): for retry_num in range(num_retries + 1):
if retry_num > 0: if retry_num > 0:
self._sleep(self._rand() * 2**retry_num) self._sleep(self._rand() * 2**retry_num)
logging.warning( logging.warning(
@ -1046,7 +1052,7 @@ class BatchHttpRequest(object):
if self._base_id is None: if self._base_id is None:
self._base_id = uuid.uuid4() self._base_id = uuid.uuid4()
return '<%s+%s>' % (self._base_id, urllib.quote(id_)) return '<%s+%s>' % (self._base_id, quote(id_))
def _header_to_id(self, header): def _header_to_id(self, header):
"""Convert a Content-ID header value to an id. """Convert a Content-ID header value to an id.
@ -1069,7 +1075,7 @@ class BatchHttpRequest(object):
raise BatchError("Invalid value for Content-ID: %s" % header) raise BatchError("Invalid value for Content-ID: %s" % header)
base, id_ = header[1:-1].rsplit('+', 1) base, id_ = header[1:-1].rsplit('+', 1)
return urllib.unquote(id_) return unquote(id_)
def _serialize_request(self, request): def _serialize_request(self, request):
"""Convert an HttpRequest object into a string. """Convert an HttpRequest object into a string.
@ -1081,9 +1087,9 @@ class BatchHttpRequest(object):
The request as a string in application/http format. The request as a string in application/http format.
""" """
# Construct status line # Construct status line
parsed = urlparse.urlparse(request.uri) parsed = urlparse(request.uri)
request_line = urlparse.urlunparse( request_line = urlunparse(
(None, None, parsed.path, parsed.params, parsed.query, None) ('', '', parsed.path, parsed.params, parsed.query, '')
) )
status_line = request.method + ' ' + request_line + ' HTTP/1.1\n' status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
major, minor = request.headers.get('content-type', 'application/json').split('/') major, minor = request.headers.get('content-type', 'application/json').split('/')
@ -1098,7 +1104,7 @@ class BatchHttpRequest(object):
if 'content-type' in headers: if 'content-type' in headers:
del headers['content-type'] del headers['content-type']
for key, value in headers.iteritems(): for key, value in six.iteritems(headers):
msg[key] = value msg[key] = value
msg['Host'] = parsed.netloc msg['Host'] = parsed.netloc
msg.set_unixfrom(None) msg.set_unixfrom(None)
@ -1108,7 +1114,7 @@ class BatchHttpRequest(object):
msg['content-length'] = str(len(request.body)) msg['content-length'] = str(len(request.body))
# Serialize the mime message. # Serialize the mime message.
fp = StringIO.StringIO() fp = StringIO()
# maxheaderlen=0 means don't line wrap headers. # maxheaderlen=0 means don't line wrap headers.
g = Generator(fp, maxheaderlen=0) g = Generator(fp, maxheaderlen=0)
g.flatten(msg, unixfrom=False) g.flatten(msg, unixfrom=False)
@ -1118,7 +1124,7 @@ class BatchHttpRequest(object):
if request.body is None: if request.body is None:
body = body[:-2] body = body[:-2]
return status_line.encode('utf-8') + body return status_line + body
def _deserialize_response(self, payload): def _deserialize_response(self, payload):
"""Convert string into httplib2 response and content. """Convert string into httplib2 response and content.
@ -1231,7 +1237,7 @@ class BatchHttpRequest(object):
# encode the body: note that we can't use `as_string`, because # encode the body: note that we can't use `as_string`, because
# it plays games with `From ` lines. # it plays games with `From ` lines.
fp = StringIO.StringIO() fp = StringIO()
g = Generator(fp, mangle_from_=False) g = Generator(fp, mangle_from_=False)
g.flatten(message, unixfrom=False) g.flatten(message, unixfrom=False)
body = fp.getvalue() body = fp.getvalue()
@ -1328,7 +1334,7 @@ class BatchHttpRequest(object):
if resp.status >= 300: if resp.status >= 300:
raise HttpError(resp, content, uri=request.uri) raise HttpError(resp, content, uri=request.uri)
response = request.postproc(resp, content) response = request.postproc(resp, content)
except HttpError, e: except HttpError as e:
exception = e exception = e
if callback is not None: if callback is not None:
@ -1454,7 +1460,7 @@ class HttpMock(object):
if headers is None: if headers is None:
headers = {'status': '200 OK'} headers = {'status': '200 OK'}
if filename: if filename:
f = file(filename, 'r') f = open(filename, 'r')
self.data = f.read() self.data = f.read()
f.close() f.close()
else: else:

View File

@ -21,6 +21,9 @@ Contents:
- best_match(): Choose the mime-type with the highest quality ('q') - best_match(): Choose the mime-type with the highest quality ('q')
from a list of candidates. from a list of candidates.
""" """
from __future__ import absolute_import
from functools import reduce
import six
__version__ = '0.1.3' __version__ = '0.1.3'
__author__ = 'Joe Gregorio' __author__ = 'Joe Gregorio'
@ -68,7 +71,7 @@ def parse_media_range(range):
necessary. necessary.
""" """
(type, subtype, params) = parse_mime_type(range) (type, subtype, params) = parse_mime_type(range)
if not params.has_key('q') or not params['q'] or \ if 'q' not in params or not params['q'] or \
not float(params['q']) or float(params['q']) > 1\ not float(params['q']) or float(params['q']) > 1\
or float(params['q']) < 0: or float(params['q']) < 0:
params['q'] = '1' params['q'] = '1'
@ -98,8 +101,8 @@ def fitness_and_quality_parsed(mime_type, parsed_ranges):
target_subtype == '*') target_subtype == '*')
if type_match and subtype_match: if type_match and subtype_match:
param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \ param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
target_params.iteritems() if key != 'q' and \ six.iteritems(target_params) if key != 'q' and \
params.has_key(key) and value == params[key]], 0) key in params and value == params[key]], 0)
fitness = (type == target_type) and 100 or 0 fitness = (type == target_type) and 100 or 0
fitness += (subtype == target_subtype) and 10 or 0 fitness += (subtype == target_subtype) and 10 or 0
fitness += param_matches fitness += param_matches

View File

@ -1,5 +1,3 @@
#!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved. # Copyright 2014 Google Inc. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
@ -21,15 +19,18 @@ as JSON, Atom, etc. The model classes are responsible
for converting between the wire format and the Python for converting between the wire format and the Python
object representation. object representation.
""" """
from __future__ import absolute_import
import six
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
import json import json
import logging import logging
import urllib
from six.moves.urllib.parse import urlencode
from googleapiclient import __version__ from googleapiclient import __version__
from errors import HttpError from googleapiclient.errors import HttpError
dump_request_response = False dump_request_response = False
@ -106,11 +107,11 @@ class BaseModel(Model):
if dump_request_response: if dump_request_response:
logging.info('--request-start--') logging.info('--request-start--')
logging.info('-headers-start-') logging.info('-headers-start-')
for h, v in headers.iteritems(): for h, v in six.iteritems(headers):
logging.info('%s: %s', h, v) logging.info('%s: %s', h, v)
logging.info('-headers-end-') logging.info('-headers-end-')
logging.info('-path-parameters-start-') logging.info('-path-parameters-start-')
for h, v in path_params.iteritems(): for h, v in six.iteritems(path_params):
logging.info('%s: %s', h, v) logging.info('%s: %s', h, v)
logging.info('-path-parameters-end-') logging.info('-path-parameters-end-')
logging.info('body: %s', body) logging.info('body: %s', body)
@ -161,22 +162,22 @@ class BaseModel(Model):
if self.alt_param is not None: if self.alt_param is not None:
params.update({'alt': self.alt_param}) params.update({'alt': self.alt_param})
astuples = [] astuples = []
for key, value in params.iteritems(): for key, value in six.iteritems(params):
if type(value) == type([]): if type(value) == type([]):
for x in value: for x in value:
x = x.encode('utf-8') x = x.encode('utf-8')
astuples.append((key, x)) astuples.append((key, x))
else: else:
if getattr(value, 'encode', False) and callable(value.encode): if isinstance(value, six.text_type) and callable(value.encode):
value = value.encode('utf-8') value = value.encode('utf-8')
astuples.append((key, value)) astuples.append((key, value))
return '?' + urllib.urlencode(astuples) return '?' + urlencode(astuples)
def _log_response(self, resp, content): def _log_response(self, resp, content):
"""Logs debugging information about the response if requested.""" """Logs debugging information about the response if requested."""
if dump_request_response: if dump_request_response:
logging.info('--response-start--') logging.info('--response-start--')
for h, v in resp.iteritems(): for h, v in six.iteritems(resp):
logging.info('%s: %s', h, v) logging.info('%s: %s', h, v)
if content: if content:
logging.info(content) logging.info(content)
@ -257,7 +258,10 @@ class JsonModel(BaseModel):
return json.dumps(body_value) return json.dumps(body_value)
def deserialize(self, content): def deserialize(self, content):
content = content.decode('utf-8') try:
content = content.decode('utf-8')
except AttributeError:
pass
body = json.loads(content) body = json.loads(content)
if self._data_wrapper and isinstance(body, dict) and 'data' in body: if self._data_wrapper and isinstance(body, dict) and 'data' in body:
body = body['data'] body = body['data']
@ -361,7 +365,7 @@ def makepatch(original, modified):
body=makepatch(original, item)).execute() body=makepatch(original, item)).execute()
""" """
patch = {} patch = {}
for key, original_value in original.iteritems(): for key, original_value in six.iteritems(original):
modified_value = modified.get(key, None) modified_value = modified.get(key, None)
if modified_value is None: if modified_value is None:
# Use None to signal that the element is deleted # Use None to signal that the element is deleted

View File

@ -16,6 +16,7 @@
Consolidates a lot of code commonly repeated in sample applications. Consolidates a lot of code commonly repeated in sample applications.
""" """
from __future__ import absolute_import
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = ['init'] __all__ = ['init']
@ -94,9 +95,9 @@ def init(argv, name, version, doc, filename, scope=None, parents=[], discovery_f
service = discovery.build(name, version, http=http) service = discovery.build(name, version, http=http)
else: else:
# Construct a service object using a local discovery document file. # Construct a service object using a local discovery document file.
with open(discovery_filename) as discovery_file: with open(discovery_filename) as discovery_file:
service = discovery.build_from_document( service = discovery.build_from_document(
discovery_file.read(), discovery_file.read(),
base='https://www.googleapis.com/', base='https://www.googleapis.com/',
http=http) http=http)
return (service, flags) return (service, flags)

View File

@ -56,6 +56,8 @@ For example, given the schema:
The constructor takes a discovery document in which to look up named schema. The constructor takes a discovery document in which to look up named schema.
""" """
from __future__ import absolute_import
import six
# TODO(jcgregorio) support format, enum, minimum, maximum # TODO(jcgregorio) support format, enum, minimum, maximum
@ -249,7 +251,7 @@ class _SchemaToStruct(object):
self.emitEnd('{', schema.get('description', '')) self.emitEnd('{', schema.get('description', ''))
self.indent() self.indent()
if 'properties' in schema: if 'properties' in schema:
for pname, pschema in schema.get('properties', {}).iteritems(): for pname, pschema in six.iteritems(schema.get('properties', {})):
self.emitBegin('"%s": ' % pname) self.emitBegin('"%s": ' % pname)
self._to_str_impl(pschema) self._to_str_impl(pschema)
elif 'additionalProperties' in schema: elif 'additionalProperties' in schema:

View File

@ -1,6 +1,6 @@
"""Client library for using OAuth2, especially with Google APIs.""" """Client library for using OAuth2, especially with Google APIs."""
__version__ = '1.3.1' __version__ = '1.4.7'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth' GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
GOOGLE_DEVICE_URI = 'https://accounts.google.com/o/oauth2/device/code' GOOGLE_DEVICE_URI = 'https://accounts.google.com/o/oauth2/device/code'

View File

@ -571,16 +571,14 @@ class OAuth2Decorator(object):
Instantiate and then use with oauth_required or oauth_aware Instantiate and then use with oauth_required or oauth_aware
as decorators on webapp.RequestHandler methods. as decorators on webapp.RequestHandler methods.
Example: ::
decorator = OAuth2Decorator( decorator = OAuth2Decorator(
client_id='837...ent.com', client_id='837...ent.com',
client_secret='Qh...wwI', client_secret='Qh...wwI',
scope='https://www.googleapis.com/auth/plus') scope='https://www.googleapis.com/auth/plus')
class MainHandler(webapp.RequestHandler): class MainHandler(webapp.RequestHandler):
@decorator.oauth_required @decorator.oauth_required
def get(self): def get(self):
http = decorator.http() http = decorator.http()
@ -847,7 +845,8 @@ class OAuth2Decorator(object):
def callback_handler(self): def callback_handler(self):
"""RequestHandler for the OAuth 2.0 redirect callback. """RequestHandler for the OAuth 2.0 redirect callback.
Usage: Usage::
app = webapp.WSGIApplication([ app = webapp.WSGIApplication([
('/index', MyIndexHandler), ('/index', MyIndexHandler),
..., ...,
@ -910,20 +909,19 @@ class OAuth2DecoratorFromClientSecrets(OAuth2Decorator):
Uses a clientsecrets file as the source for all the information when Uses a clientsecrets file as the source for all the information when
constructing an OAuth2Decorator. constructing an OAuth2Decorator.
Example: ::
decorator = OAuth2DecoratorFromClientSecrets( decorator = OAuth2DecoratorFromClientSecrets(
os.path.join(os.path.dirname(__file__), 'client_secrets.json') os.path.join(os.path.dirname(__file__), 'client_secrets.json')
scope='https://www.googleapis.com/auth/plus') scope='https://www.googleapis.com/auth/plus')
class MainHandler(webapp.RequestHandler): class MainHandler(webapp.RequestHandler):
@decorator.oauth_required @decorator.oauth_required
def get(self): def get(self):
http = decorator.http() http = decorator.http()
# http is authorized with the user's Credentials and can be used # http is authorized with the user's Credentials and can be used
# in API calls # in API calls
""" """
@util.positional(3) @util.positional(3)

View File

@ -26,10 +26,11 @@ import datetime
import json import json
import logging import logging
import os import os
import socket
import sys import sys
import time import time
import urllib import six
import urlparse from six.moves import urllib
import httplib2 import httplib2
from oauth2client import clientsecrets from oauth2client import clientsecrets
@ -90,6 +91,15 @@ ADC_HELP_MSG = (
AccessTokenInfo = collections.namedtuple( AccessTokenInfo = collections.namedtuple(
'AccessTokenInfo', ['access_token', 'expires_in']) 'AccessTokenInfo', ['access_token', 'expires_in'])
DEFAULT_ENV_NAME = 'UNKNOWN'
# If set to True _get_environment avoid GCE check (_detect_gce_environment)
NO_GCE_CHECK = os.environ.setdefault('NO_GCE_CHECK', 'False')
class SETTINGS(object):
"""Settings namespace for globally defined values."""
env_name = None
class Error(Exception): class Error(Exception):
"""Base error for this module.""" """Base error for this module."""
@ -231,6 +241,9 @@ class Credentials(object):
# Add in information we will need later to reconsistitue this instance. # Add in information we will need later to reconsistitue this instance.
d['_class'] = t.__name__ d['_class'] = t.__name__
d['_module'] = t.__module__ d['_module'] = t.__module__
for key, val in d.items():
if isinstance(val, bytes):
d[key] = val.decode('utf-8')
return json.dumps(d) return json.dumps(d)
def to_json(self): def to_json(self):
@ -254,6 +267,8 @@ class Credentials(object):
An instance of the subclass of Credentials that was serialized with An instance of the subclass of Credentials that was serialized with
to_json(). to_json().
""" """
if six.PY3 and isinstance(s, bytes):
s = s.decode('utf-8')
data = json.loads(s) data = json.loads(s)
# Find and call the right classmethod from_json() to restore the object. # Find and call the right classmethod from_json() to restore the object.
module = data['_module'] module = data['_module']
@ -398,8 +413,10 @@ def clean_headers(headers):
""" """
clean = {} clean = {}
try: try:
for k, v in headers.iteritems(): for k, v in six.iteritems(headers):
clean[str(k)] = str(v) clean_k = k if isinstance(k, bytes) else str(k).encode('ascii')
clean_v = v if isinstance(v, bytes) else str(v).encode('ascii')
clean[clean_k] = clean_v
except UnicodeEncodeError: except UnicodeEncodeError:
raise NonAsciiHeaderError(k + ': ' + v) raise NonAsciiHeaderError(k + ': ' + v)
return clean return clean
@ -415,11 +432,11 @@ def _update_query_params(uri, params):
Returns: Returns:
The same URI but with the new query parameters added. The same URI but with the new query parameters added.
""" """
parts = urlparse.urlparse(uri) parts = urllib.parse.urlparse(uri)
query_params = dict(urlparse.parse_qsl(parts.query)) query_params = dict(urllib.parse.parse_qsl(parts.query))
query_params.update(params) query_params.update(params)
new_parts = parts._replace(query=urllib.urlencode(query_params)) new_parts = parts._replace(query=urllib.parse.urlencode(query_params))
return urlparse.urlunparse(new_parts) return urllib.parse.urlunparse(new_parts)
class OAuth2Credentials(Credentials): class OAuth2Credentials(Credentials):
@ -487,13 +504,13 @@ class OAuth2Credentials(Credentials):
it. it.
Args: Args:
http: An instance of httplib2.Http http: An instance of ``httplib2.Http`` or something that acts
or something that acts like it. like it.
Returns: Returns:
A modified instance of http that was passed in. A modified instance of http that was passed in.
Example: Example::
h = httplib2.Http() h = httplib2.Http()
h = credentials.authorize(h) h = credentials.authorize(h)
@ -503,6 +520,7 @@ class OAuth2Credentials(Credentials):
signing. So instead we have to overload 'request' with a closure signing. So instead we have to overload 'request' with a closure
that adds in the Authorization header and then calls the original that adds in the Authorization header and then calls the original
version of 'request()'. version of 'request()'.
""" """
request_orig = http.request request_orig = http.request
@ -589,6 +607,8 @@ class OAuth2Credentials(Credentials):
Returns: Returns:
An instance of a Credentials subclass. An instance of a Credentials subclass.
""" """
if six.PY3 and isinstance(s, bytes):
s = s.decode('utf-8')
data = json.loads(s) data = json.loads(s)
if (data.get('token_expiry') and if (data.get('token_expiry') and
not isinstance(data['token_expiry'], datetime.datetime)): not isinstance(data['token_expiry'], datetime.datetime)):
@ -691,7 +711,7 @@ class OAuth2Credentials(Credentials):
def _generate_refresh_request_body(self): def _generate_refresh_request_body(self):
"""Generate the body that will be used in the refresh request.""" """Generate the body that will be used in the refresh request."""
body = urllib.urlencode({ body = urllib.parse.urlencode({
'grant_type': 'refresh_token', 'grant_type': 'refresh_token',
'client_id': self.client_id, 'client_id': self.client_id,
'client_secret': self.client_secret, 'client_secret': self.client_secret,
@ -755,8 +775,9 @@ class OAuth2Credentials(Credentials):
logger.info('Refreshing access_token') logger.info('Refreshing access_token')
resp, content = http_request( resp, content = http_request(
self.token_uri, method='POST', body=body, headers=headers) self.token_uri, method='POST', body=body, headers=headers)
if six.PY3 and isinstance(content, bytes):
content = content.decode('utf-8')
if resp.status == 200: if resp.status == 200:
# TODO(jcgregorio) Raise an error if loads fails?
d = json.loads(content) d = json.loads(content)
self.token_response = d self.token_response = d
self.access_token = d['access_token'] self.access_token = d['access_token']
@ -785,21 +806,21 @@ class OAuth2Credentials(Credentials):
self.invalid = True self.invalid = True
if self.store: if self.store:
self.store.locked_put(self) self.store.locked_put(self)
except StandardError: except (TypeError, ValueError):
pass pass
raise AccessTokenRefreshError(error_msg) raise AccessTokenRefreshError(error_msg)
def _revoke(self, http_request): def _revoke(self, http_request):
"""Revokes the refresh_token and deletes the store if available. """Revokes this credential and deletes the stored copy (if it exists).
Args: Args:
http_request: callable, a callable that matches the method signature of http_request: callable, a callable that matches the method signature of
httplib2.Http.request, used to make the revoke request. httplib2.Http.request, used to make the revoke request.
""" """
self._do_revoke(http_request, self.refresh_token) self._do_revoke(http_request, self.refresh_token or self.access_token)
def _do_revoke(self, http_request, token): def _do_revoke(self, http_request, token):
"""Revokes the credentials and deletes the store if available. """Revokes this credential and deletes the stored copy (if it exists).
Args: Args:
http_request: callable, a callable that matches the method signature of http_request: callable, a callable that matches the method signature of
@ -822,7 +843,7 @@ class OAuth2Credentials(Credentials):
d = json.loads(content) d = json.loads(content)
if 'error' in d: if 'error' in d:
error_msg = d['error'] error_msg = d['error']
except StandardError: except (TypeError, ValueError):
pass pass
raise TokenRevokeError(error_msg) raise TokenRevokeError(error_msg)
@ -844,7 +865,8 @@ class AccessTokenCredentials(OAuth2Credentials):
AccessTokenCredentials objects may be safely pickled and unpickled. AccessTokenCredentials objects may be safely pickled and unpickled.
Usage: Usage::
credentials = AccessTokenCredentials('<an access token>', credentials = AccessTokenCredentials('<an access token>',
'my-user-agent/1.0') 'my-user-agent/1.0')
http = httplib2.Http() http = httplib2.Http()
@ -880,10 +902,12 @@ class AccessTokenCredentials(OAuth2Credentials):
@classmethod @classmethod
def from_json(cls, s): def from_json(cls, s):
if six.PY3 and isinstance(s, bytes):
s = s.decode('utf-8')
data = json.loads(s) data = json.loads(s)
retval = AccessTokenCredentials( retval = AccessTokenCredentials(
data['access_token'], data['access_token'],
data['user_agent']) data['user_agent'])
return retval return retval
def _refresh(self, http_request): def _refresh(self, http_request):
@ -900,36 +924,60 @@ class AccessTokenCredentials(OAuth2Credentials):
self._do_revoke(http_request, self.access_token) self._do_revoke(http_request, self.access_token)
_env_name = None def _detect_gce_environment(urlopen=None):
"""Determine if the current environment is Compute Engine.
Args:
urlopen: Optional argument. Function used to open a connection to a URL.
Returns:
Boolean indicating whether or not the current environment is Google
Compute Engine.
"""
urlopen = urlopen or urllib.request.urlopen
# Note: the explicit `timeout` below is a workaround. The underlying
# issue is that resolving an unknown host on some networks will take
# 20-30 seconds; making this timeout short fixes the issue, but
# could lead to false negatives in the event that we are on GCE, but
# the metadata resolution was particularly slow. The latter case is
# "unlikely".
try:
response = urlopen('http://169.254.169.254/', timeout=1)
return response.info().get('Metadata-Flavor', '') == 'Google'
except socket.timeout:
logger.info('Timeout attempting to reach GCE metadata service.')
return False
except urllib.error.URLError as e:
if isinstance(getattr(e, 'reason', None), socket.timeout):
logger.info('Timeout attempting to reach GCE metadata service.')
return False
def _get_environment(urllib2_urlopen=None): def _get_environment(urlopen=None):
"""Detect the environment the code is being run on.""" """Detect the environment the code is being run on.
global _env_name Args:
urlopen: Optional argument. Function used to open a connection to a URL.
if _env_name: Returns:
return _env_name The value of SETTINGS.env_name after being set. If already
set, simply returns the value.
"""
if SETTINGS.env_name is not None:
return SETTINGS.env_name
# None is an unset value, not the default.
SETTINGS.env_name = DEFAULT_ENV_NAME
server_software = os.environ.get('SERVER_SOFTWARE', '') server_software = os.environ.get('SERVER_SOFTWARE', '')
if server_software.startswith('Google App Engine/'): if server_software.startswith('Google App Engine/'):
_env_name = 'GAE_PRODUCTION' SETTINGS.env_name = 'GAE_PRODUCTION'
elif server_software.startswith('Development/'): elif server_software.startswith('Development/'):
_env_name = 'GAE_LOCAL' SETTINGS.env_name = 'GAE_LOCAL'
else: elif NO_GCE_CHECK != 'True' and _detect_gce_environment(urlopen=urlopen):
import urllib2 SETTINGS.env_name = 'GCE_PRODUCTION'
try:
if urllib2_urlopen is None:
urllib2_urlopen = urllib2.urlopen
response = urllib2_urlopen('http://metadata.google.internal')
if any('Metadata-Flavor: Google' in h for h in response.info().headers):
_env_name = 'GCE_PRODUCTION'
else:
_env_name = 'UNKNOWN'
except urllib2.URLError:
_env_name = 'UNKNOWN'
return _env_name return SETTINGS.env_name
class GoogleCredentials(OAuth2Credentials): class GoogleCredentials(OAuth2Credentials):
@ -943,36 +991,19 @@ class GoogleCredentials(OAuth2Credentials):
Here is an example of how to use the Application Default Credentials for a Here is an example of how to use the Application Default Credentials for a
service that requires authentication: service that requires authentication:
<code> from googleapiclient.discovery import build
from googleapiclient.discovery import build from oauth2client.client import GoogleCredentials
from oauth2client.client import GoogleCredentials
PROJECT = 'bamboo-machine-422' # replace this with one of your projects credentials = GoogleCredentials.get_application_default()
ZONE = 'us-central1-a' # replace this with the zone you care about service = build('compute', 'v1', credentials=credentials)
credentials = GoogleCredentials.get_application_default() PROJECT = 'bamboo-machine-422'
service = build('compute', 'v1', credentials=credentials) ZONE = 'us-central1-a'
request = service.instances().list(project=PROJECT, zone=ZONE)
response = request.execute()
request = service.instances().list(project=PROJECT, zone=ZONE) print(response)
response = request.execute() """
print response
</code>
A service that does not require authentication does not need credentials
to be passed in:
<code>
from googleapiclient.discovery import build
service = build('discovery', 'v1')
request = service.apis().list()
response = request.execute()
print response
</code>
"""
def __init__(self, access_token, client_id, client_secret, refresh_token, def __init__(self, access_token, client_id, client_secret, refresh_token,
token_expiry, token_uri, user_agent, token_expiry, token_uri, user_agent,
@ -1024,6 +1055,116 @@ class GoogleCredentials(OAuth2Credentials):
'refresh_token': self.refresh_token 'refresh_token': self.refresh_token
} }
@staticmethod
def _implicit_credentials_from_gae(env_name=None):
"""Attempts to get implicit credentials in Google App Engine env.
If the current environment is not detected as App Engine, returns None,
indicating no Google App Engine credentials can be detected from the
current environment.
Args:
env_name: String, indicating current environment.
Returns:
None, if not in GAE, else an appengine.AppAssertionCredentials object.
"""
env_name = env_name or _get_environment()
if env_name not in ('GAE_PRODUCTION', 'GAE_LOCAL'):
return None
return _get_application_default_credential_GAE()
@staticmethod
def _implicit_credentials_from_gce(env_name=None):
"""Attempts to get implicit credentials in Google Compute Engine env.
If the current environment is not detected as Compute Engine, returns None,
indicating no Google Compute Engine credentials can be detected from the
current environment.
Args:
env_name: String, indicating current environment.
Returns:
None, if not in GCE, else a gce.AppAssertionCredentials object.
"""
env_name = env_name or _get_environment()
if env_name != 'GCE_PRODUCTION':
return None
return _get_application_default_credential_GCE()
@staticmethod
def _implicit_credentials_from_files(env_name=None):
"""Attempts to get implicit credentials from local credential files.
First checks if the environment variable GOOGLE_APPLICATION_CREDENTIALS
is set with a filename and then falls back to a configuration file (the
"well known" file) associated with the 'gcloud' command line tool.
Args:
env_name: Unused argument.
Returns:
Credentials object associated with the GOOGLE_APPLICATION_CREDENTIALS
file or the "well known" file if either exist. If neither file is
define, returns None, indicating no credentials from a file can
detected from the current environment.
"""
credentials_filename = _get_environment_variable_file()
if not credentials_filename:
credentials_filename = _get_well_known_file()
if os.path.isfile(credentials_filename):
extra_help = (' (produced automatically when running'
' "gcloud auth login" command)')
else:
credentials_filename = None
else:
extra_help = (' (pointed to by ' + GOOGLE_APPLICATION_CREDENTIALS +
' environment variable)')
if not credentials_filename:
return
try:
return _get_application_default_credential_from_file(credentials_filename)
except (ApplicationDefaultCredentialsError, ValueError) as error:
_raise_exception_for_reading_json(credentials_filename, extra_help, error)
@classmethod
def _get_implicit_credentials(cls):
"""Gets credentials implicitly from the environment.
Checks environment in order of precedence:
- Google App Engine (production and testing)
- Environment variable GOOGLE_APPLICATION_CREDENTIALS pointing to
a file with stored credentials information.
- Stored "well known" file associated with `gcloud` command line tool.
- Google Compute Engine production environment.
Exceptions:
ApplicationDefaultCredentialsError: raised when the credentials fail
to be retrieved.
"""
env_name = _get_environment()
# Environ checks (in order). Assumes each checker takes `env_name`
# as a kwarg.
environ_checkers = [
cls._implicit_credentials_from_gae,
cls._implicit_credentials_from_files,
cls._implicit_credentials_from_gce,
]
for checker in environ_checkers:
credentials = checker(env_name=env_name)
if credentials is not None:
return credentials
# If no credentials, fail.
raise ApplicationDefaultCredentialsError(ADC_HELP_MSG)
@staticmethod @staticmethod
def get_application_default(): def get_application_default():
"""Get the Application Default Credentials for the current environment. """Get the Application Default Credentials for the current environment.
@ -1032,42 +1173,7 @@ class GoogleCredentials(OAuth2Credentials):
ApplicationDefaultCredentialsError: raised when the credentials fail ApplicationDefaultCredentialsError: raised when the credentials fail
to be retrieved. to be retrieved.
""" """
return GoogleCredentials._get_implicit_credentials()
env_name = _get_environment()
if env_name in ('GAE_PRODUCTION', 'GAE_LOCAL'):
# if we are running inside Google App Engine
# there is no need to look for credentials in local files
application_default_credential_filename = None
well_known_file = None
else:
application_default_credential_filename = _get_environment_variable_file()
well_known_file = _get_well_known_file()
if not os.path.isfile(well_known_file):
well_known_file = None
if application_default_credential_filename:
try:
return _get_application_default_credential_from_file(
application_default_credential_filename)
except (ApplicationDefaultCredentialsError, ValueError) as error:
extra_help = (' (pointed to by ' + GOOGLE_APPLICATION_CREDENTIALS +
' environment variable)')
_raise_exception_for_reading_json(
application_default_credential_filename, extra_help, error)
elif well_known_file:
try:
return _get_application_default_credential_from_file(well_known_file)
except (ApplicationDefaultCredentialsError, ValueError) as error:
extra_help = (' (produced automatically when running'
' "gcloud auth login" command)')
_raise_exception_for_reading_json(well_known_file, extra_help, error)
elif env_name in ('GAE_PRODUCTION', 'GAE_LOCAL'):
return _get_application_default_credential_GAE()
elif env_name == 'GCE_PRODUCTION':
return _get_application_default_credential_GCE()
else:
raise ApplicationDefaultCredentialsError(ADC_HELP_MSG)
@staticmethod @staticmethod
def from_stream(credential_filename): def from_stream(credential_filename):
@ -1164,16 +1270,14 @@ def _get_well_known_file():
return default_config_path return default_config_path
def _get_application_default_credential_from_file( def _get_application_default_credential_from_file(filename):
application_default_credential_filename):
"""Build the Application Default Credentials from file.""" """Build the Application Default Credentials from file."""
import service_account from oauth2client import service_account
# read the credentials from the file # read the credentials from the file
with open(application_default_credential_filename) as ( with open(filename) as file_obj:
application_default_credential): client_credentials = json.load(file_obj)
client_credentials = json.load(application_default_credential)
credentials_type = client_credentials.get('type') credentials_type = client_credentials.get('type')
if credentials_type == AUTHORIZED_USER: if credentials_type == AUTHORIZED_USER:
@ -1274,7 +1378,7 @@ class AssertionCredentials(GoogleCredentials):
def _generate_refresh_request_body(self): def _generate_refresh_request_body(self):
assertion = self._generate_assertion() assertion = self._generate_assertion()
body = urllib.urlencode({ body = urllib.parse.urlencode({
'assertion': assertion, 'assertion': assertion,
'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer', 'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
}) })
@ -1363,6 +1467,8 @@ class SignedJwtAssertionCredentials(AssertionCredentials):
# Keep base64 encoded so it can be stored in JSON. # Keep base64 encoded so it can be stored in JSON.
self.private_key = base64.b64encode(private_key) self.private_key = base64.b64encode(private_key)
if isinstance(self.private_key, six.text_type):
self.private_key = self.private_key.encode('utf-8')
self.private_key_password = private_key_password self.private_key_password = private_key_password
self.service_account_name = service_account_name self.service_account_name = service_account_name
@ -1386,7 +1492,7 @@ class SignedJwtAssertionCredentials(AssertionCredentials):
def _generate_assertion(self): def _generate_assertion(self):
"""Generate the assertion that will be used in the request.""" """Generate the assertion that will be used in the request."""
now = long(time.time()) now = int(time.time())
payload = { payload = {
'aud': self.token_uri, 'aud': self.token_uri,
'scope': self.scope, 'scope': self.scope,
@ -1435,7 +1541,7 @@ def verify_id_token(id_token, audience, http=None,
resp, content = http.request(cert_uri) resp, content = http.request(cert_uri)
if resp.status == 200: if resp.status == 200:
certs = json.loads(content) certs = json.loads(content.decode('utf-8'))
return crypt.verify_signed_jwt_with_certs(id_token, certs, audience) return crypt.verify_signed_jwt_with_certs(id_token, certs, audience)
else: else:
raise VerifyJwtTokenError('Status code: %d' % resp.status) raise VerifyJwtTokenError('Status code: %d' % resp.status)
@ -1443,8 +1549,9 @@ def verify_id_token(id_token, audience, http=None,
def _urlsafe_b64decode(b64string): def _urlsafe_b64decode(b64string):
# Guard against unicode strings, which base64 can't handle. # Guard against unicode strings, which base64 can't handle.
b64string = b64string.encode('ascii') if isinstance(b64string, six.text_type):
padded = b64string + '=' * (4 - len(b64string) % 4) b64string = b64string.encode('ascii')
padded = b64string + b'=' * (4 - len(b64string) % 4)
return base64.urlsafe_b64decode(padded) return base64.urlsafe_b64decode(padded)
@ -1454,18 +1561,21 @@ def _extract_id_token(id_token):
Does the extraction w/o checking the signature. Does the extraction w/o checking the signature.
Args: Args:
id_token: string, OAuth 2.0 id_token. id_token: string or bytestring, OAuth 2.0 id_token.
Returns: Returns:
object, The deserialized JSON payload. object, The deserialized JSON payload.
""" """
segments = id_token.split('.') if type(id_token) == bytes:
segments = id_token.split(b'.')
else:
segments = id_token.split(u'.')
if len(segments) != 3: if len(segments) != 3:
raise VerifyJwtTokenError( raise VerifyJwtTokenError(
'Wrong number of segments in token: %s' % id_token) 'Wrong number of segments in token: %s' % id_token)
return json.loads(_urlsafe_b64decode(segments[1])) return json.loads(_urlsafe_b64decode(segments[1]).decode('utf-8'))
def _parse_exchange_token_response(content): def _parse_exchange_token_response(content):
@ -1483,11 +1593,12 @@ def _parse_exchange_token_response(content):
""" """
resp = {} resp = {}
try: try:
resp = json.loads(content) resp = json.loads(content.decode('utf-8'))
except StandardError: except Exception:
# different JSON libs raise different exceptions, # different JSON libs raise different exceptions,
# so we just do a catch-all here # so we just do a catch-all here
resp = dict(urlparse.parse_qsl(content)) content = content.decode('utf-8')
resp = dict(urllib.parse.parse_qsl(content))
# some providers respond with 'expires', others with 'expires_in' # some providers respond with 'expires', others with 'expires_in'
if resp and 'expires' in resp: if resp and 'expires' in resp:
@ -1509,7 +1620,7 @@ def credentials_from_code(client_id, client_secret, scope, code,
client_id: string, client identifier. client_id: string, client identifier.
client_secret: string, client secret. client_secret: string, client secret.
scope: string or iterable of strings, scope(s) to request. scope: string or iterable of strings, scope(s) to request.
code: string, An authroization code, most likely passed down from code: string, An authorization code, most likely passed down from
the client the client
redirect_uri: string, this is generally set to 'postmessage' to match the redirect_uri: string, this is generally set to 'postmessage' to match the
redirect_uri that the client specified redirect_uri that the client specified
@ -1593,8 +1704,8 @@ class DeviceFlowInfo(collections.namedtuple('DeviceFlowInfo', (
def FromResponse(cls, response): def FromResponse(cls, response):
"""Create a DeviceFlowInfo from a server response. """Create a DeviceFlowInfo from a server response.
The response should be a dict containing entries as described The response should be a dict containing entries as described here:
here:
http://tools.ietf.org/html/draft-ietf-oauth-v2-05#section-3.7.1 http://tools.ietf.org/html/draft-ietf-oauth-v2-05#section-3.7.1
""" """
# device_code, user_code, and verification_url are required. # device_code, user_code, and verification_url are required.
@ -1726,7 +1837,7 @@ class OAuth2WebServerFlow(Flow):
if self.device_uri is None: if self.device_uri is None:
raise ValueError('The value of device_uri must not be None.') raise ValueError('The value of device_uri must not be None.')
body = urllib.urlencode({ body = urllib.parse.urlencode({
'client_id': self.client_id, 'client_id': self.client_id,
'scope': self.scope, 'scope': self.scope,
}) })
@ -1767,10 +1878,10 @@ class OAuth2WebServerFlow(Flow):
Args: Args:
code: string, dict or None. For a non-device flow, this is code: string, a dict-like object, or None. For a non-device
either the response code as a string, or a dictionary of flow, this is either the response code as a string, or a
query parameters to the redirect_uri. For a device flow, dictionary of query parameters to the redirect_uri. For a
this should be None. device flow, this should be None.
http: httplib2.Http, optional http instance to use when fetching http: httplib2.Http, optional http instance to use when fetching
credentials. credentials.
device_flow_info: DeviceFlowInfo, return value from step1 in the device_flow_info: DeviceFlowInfo, return value from step1 in the
@ -1780,7 +1891,7 @@ class OAuth2WebServerFlow(Flow):
An OAuth2Credentials object that can be used to authorize requests. An OAuth2Credentials object that can be used to authorize requests.
Raises: Raises:
FlowExchangeError: if a problem occured exchanging the code for a FlowExchangeError: if a problem occurred exchanging the code for a
refresh_token. refresh_token.
ValueError: if code and device_flow_info are both provided or both ValueError: if code and device_flow_info are both provided or both
missing. missing.
@ -1793,7 +1904,7 @@ class OAuth2WebServerFlow(Flow):
if code is None: if code is None:
code = device_flow_info.device_code code = device_flow_info.device_code
elif isinstance(code, dict): elif not isinstance(code, six.string_types):
if 'code' not in code: if 'code' not in code:
raise FlowExchangeError(code.get( raise FlowExchangeError(code.get(
'error', 'No code was supplied in the query parameters.')) 'error', 'No code was supplied in the query parameters.'))
@ -1803,14 +1914,14 @@ class OAuth2WebServerFlow(Flow):
'client_id': self.client_id, 'client_id': self.client_id,
'client_secret': self.client_secret, 'client_secret': self.client_secret,
'code': code, 'code': code,
# 'scope': self.scope, 'scope': self.scope,
} }
if device_flow_info is not None: if device_flow_info is not None:
post_data['grant_type'] = 'http://oauth.net/grant_type/device/1.0' post_data['grant_type'] = 'http://oauth.net/grant_type/device/1.0'
else: else:
post_data['grant_type'] = 'authorization_code' post_data['grant_type'] = 'authorization_code'
post_data['redirect_uri'] = self.redirect_uri post_data['redirect_uri'] = self.redirect_uri
body = urllib.urlencode(post_data) body = urllib.parse.urlencode(post_data)
headers = { headers = {
'content-type': 'application/x-www-form-urlencoded', 'content-type': 'application/x-www-form-urlencoded',
} }
@ -1836,21 +1947,22 @@ class OAuth2WebServerFlow(Flow):
token_expiry = datetime.datetime.utcnow() + datetime.timedelta( token_expiry = datetime.datetime.utcnow() + datetime.timedelta(
seconds=int(d['expires_in'])) seconds=int(d['expires_in']))
extracted_id_token = None
if 'id_token' in d: if 'id_token' in d:
d['id_token'] = _extract_id_token(d['id_token']) extracted_id_token = _extract_id_token(d['id_token'])
logger.info('Successfully retrieved access token') logger.info('Successfully retrieved access token')
return OAuth2Credentials(access_token, self.client_id, return OAuth2Credentials(access_token, self.client_id,
self.client_secret, refresh_token, token_expiry, self.client_secret, refresh_token, token_expiry,
self.token_uri, self.user_agent, self.token_uri, self.user_agent,
revoke_uri=self.revoke_uri, revoke_uri=self.revoke_uri,
id_token=d.get('id_token', None), id_token=extracted_id_token,
token_response=d) token_response=d)
else: else:
logger.info('Failed to retrieve access token: %s', content) logger.info('Failed to retrieve access token: %s', content)
if 'error' in d: if 'error' in d:
# you never know what those providers got to say # you never know what those providers got to say
error_msg = unicode(d['error']) error_msg = str(d['error']) + str(d.get('error_description', ''))
else: else:
error_msg = 'Invalid response: %s.' % str(resp.status) error_msg = 'Invalid response: %s.' % str(resp.status)
raise FlowExchangeError(error_msg) raise FlowExchangeError(error_msg)

View File

@ -21,6 +21,7 @@ an OAuth 2.0 protected service.
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
import json import json
import six
# Properties that make a client_secrets.json file valid. # Properties that make a client_secrets.json file valid.
@ -68,11 +69,21 @@ class InvalidClientSecretsError(Error):
def _validate_clientsecrets(obj): def _validate_clientsecrets(obj):
if obj is None or len(obj) != 1: _INVALID_FILE_FORMAT_MSG = (
raise InvalidClientSecretsError('Invalid file format.') 'Invalid file format. See '
client_type = obj.keys()[0] 'https://developers.google.com/api-client-library/'
if client_type not in VALID_CLIENT.keys(): 'python/guide/aaa_client_secrets')
raise InvalidClientSecretsError('Unknown client type: %s.' % client_type)
if obj is None:
raise InvalidClientSecretsError(_INVALID_FILE_FORMAT_MSG)
if len(obj) != 1:
raise InvalidClientSecretsError(
_INVALID_FILE_FORMAT_MSG + ' '
'Expected a JSON object with a single property for a "web" or '
'"installed" application')
client_type = tuple(obj)[0]
if client_type not in VALID_CLIENT:
raise InvalidClientSecretsError('Unknown client type: %s.' % (client_type,))
client_info = obj[client_type] client_info = obj[client_type]
for prop_name in VALID_CLIENT[client_type]['required']: for prop_name in VALID_CLIENT[client_type]['required']:
if prop_name not in client_info: if prop_name not in client_info:
@ -98,11 +109,8 @@ def loads(s):
def _loadfile(filename): def _loadfile(filename):
try: try:
fp = file(filename, 'r') with open(filename, 'r') as fp:
try:
obj = json.load(fp) obj = json.load(fp)
finally:
fp.close()
except IOError: except IOError:
raise InvalidClientSecretsError('File not found: "%s"' % filename) raise InvalidClientSecretsError('File not found: "%s"' % filename)
return _validate_clientsecrets(obj) return _validate_clientsecrets(obj)
@ -114,10 +122,12 @@ def loadfile(filename, cache=None):
Typical cache storage would be App Engine memcache service, Typical cache storage would be App Engine memcache service,
but you can pass in any other cache client that implements but you can pass in any other cache client that implements
these methods: these methods:
- get(key, namespace=ns)
- set(key, value, namespace=ns)
Usage: * ``get(key, namespace=ns)``
* ``set(key, value, namespace=ns)``
Usage::
# without caching # without caching
client_type, client_info = loadfile('secrets.json') client_type, client_info = loadfile('secrets.json')
# using App Engine memcache service # using App Engine memcache service
@ -150,4 +160,4 @@ def loadfile(filename, cache=None):
obj = {client_type: client_info} obj = {client_type: client_info}
cache.set(filename, obj, namespace=_SECRET_NAMESPACE) cache.set(filename, obj, namespace=_SECRET_NAMESPACE)
return obj.iteritems().next() return next(six.iteritems(obj))

View File

@ -18,8 +18,11 @@
import base64 import base64
import json import json
import logging import logging
import sys
import time import time
import six
CLOCK_SKEW_SECS = 300 # 5 minutes in seconds CLOCK_SKEW_SECS = 300 # 5 minutes in seconds
AUTH_TOKEN_LIFETIME_SECS = 300 # 5 minutes in seconds AUTH_TOKEN_LIFETIME_SECS = 300 # 5 minutes in seconds
@ -59,6 +62,8 @@ try:
key that this object was constructed with. key that this object was constructed with.
""" """
try: try:
if isinstance(message, six.text_type):
message = message.encode('utf-8')
crypto.verify(self._pubkey, signature, message, 'sha256') crypto.verify(self._pubkey, signature, message, 'sha256')
return True return True
except: except:
@ -101,15 +106,17 @@ try:
"""Signs a message. """Signs a message.
Args: Args:
message: string, Message to be signed. message: bytes, Message to be signed.
Returns: Returns:
string, The signature of the message for the given key. string, The signature of the message for the given key.
""" """
if isinstance(message, six.text_type):
message = message.encode('utf-8')
return crypto.sign(self._key, message, 'sha256') return crypto.sign(self._key, message, 'sha256')
@staticmethod @staticmethod
def from_string(key, password='notasecret'): def from_string(key, password=b'notasecret'):
"""Construct a Signer instance from a string. """Construct a Signer instance from a string.
Args: Args:
@ -126,12 +133,34 @@ try:
if parsed_pem_key: if parsed_pem_key:
pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key) pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key)
else: else:
pkey = crypto.load_pkcs12(key, password.encode('utf8')).get_privatekey() if isinstance(password, six.text_type):
password = password.encode('utf-8')
pkey = crypto.load_pkcs12(key, password).get_privatekey()
return OpenSSLSigner(pkey) return OpenSSLSigner(pkey)
def pkcs12_key_as_pem(private_key_text, private_key_password):
"""Convert the contents of a PKCS12 key to PEM using OpenSSL.
Args:
private_key_text: String. Private key.
private_key_password: String. Password for PKCS12.
Returns:
String. PEM contents of ``private_key_text``.
"""
decoded_body = base64.b64decode(private_key_text)
if isinstance(private_key_password, six.string_types):
private_key_password = private_key_password.encode('ascii')
pkcs12 = crypto.load_pkcs12(decoded_body, private_key_password)
return crypto.dump_privatekey(crypto.FILETYPE_PEM,
pkcs12.get_privatekey())
except ImportError: except ImportError:
OpenSSLVerifier = None OpenSSLVerifier = None
OpenSSLSigner = None OpenSSLSigner = None
def pkcs12_key_as_pem(*args, **kwargs):
raise NotImplementedError('pkcs12_key_as_pem requires OpenSSL.')
try: try:
@ -182,8 +211,10 @@ try:
Verifier instance. Verifier instance.
""" """
if is_x509_cert: if is_x509_cert:
pemLines = key_pem.replace(' ', '').split() if isinstance(key_pem, six.text_type):
certDer = _urlsafe_b64decode(''.join(pemLines[1:-1])) key_pem = key_pem.encode('ascii')
pemLines = key_pem.replace(b' ', b'').split()
certDer = _urlsafe_b64decode(b''.join(pemLines[1:-1]))
certSeq = DerSequence() certSeq = DerSequence()
certSeq.decode(certDer) certSeq.decode(certDer)
tbsSeq = DerSequence() tbsSeq = DerSequence()
@ -214,6 +245,8 @@ try:
Returns: Returns:
string, The signature of the message for the given key. string, The signature of the message for the given key.
""" """
if isinstance(message, six.text_type):
message = message.encode('utf-8')
return PKCS1_v1_5.new(self._key).sign(SHA256.new(message)) return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))
@staticmethod @staticmethod
@ -269,19 +302,22 @@ def _parse_pem_key(raw_key_input):
Returns: Returns:
string, The actual key if the contents are from a PEM file, or else None. string, The actual key if the contents are from a PEM file, or else None.
""" """
offset = raw_key_input.find('-----BEGIN ') offset = raw_key_input.find(b'-----BEGIN ')
if offset != -1: if offset != -1:
return raw_key_input[offset:] return raw_key_input[offset:]
def _urlsafe_b64encode(raw_bytes): def _urlsafe_b64encode(raw_bytes):
return base64.urlsafe_b64encode(raw_bytes).rstrip('=') if isinstance(raw_bytes, six.text_type):
raw_bytes = raw_bytes.encode('utf-8')
return base64.urlsafe_b64encode(raw_bytes).decode('ascii').rstrip('=')
def _urlsafe_b64decode(b64string): def _urlsafe_b64decode(b64string):
# Guard against unicode strings, which base64 can't handle. # Guard against unicode strings, which base64 can't handle.
b64string = b64string.encode('ascii') if isinstance(b64string, six.text_type):
padded = b64string + '=' * (4 - len(b64string) % 4) b64string = b64string.encode('ascii')
padded = b64string + b'=' * (4 - len(b64string) % 4)
return base64.urlsafe_b64decode(padded) return base64.urlsafe_b64decode(padded)
@ -345,13 +381,13 @@ def verify_signed_jwt_with_certs(jwt, certs, audience):
# Parse token. # Parse token.
json_body = _urlsafe_b64decode(segments[1]) json_body = _urlsafe_b64decode(segments[1])
try: try:
parsed = json.loads(json_body) parsed = json.loads(json_body.decode('utf-8'))
except: except:
raise AppIdentityError('Can\'t parse token: %s' % json_body) raise AppIdentityError('Can\'t parse token: %s' % json_body)
# Check signature. # Check signature.
verified = False verified = False
for _, pem in certs.items(): for pem in certs.values():
verifier = Verifier.from_string(pem, True) verifier = Verifier.from_string(pem, True)
if verifier.verify(signed, signature): if verifier.verify(signed, signature):
verified = True verified = True
@ -366,7 +402,7 @@ def verify_signed_jwt_with_certs(jwt, certs, audience):
earliest = iat - CLOCK_SKEW_SECS earliest = iat - CLOCK_SKEW_SECS
# Check expiration timestamp. # Check expiration timestamp.
now = long(time.time()) now = int(time.time())
exp = parsed.get('exp') exp = parsed.get('exp')
if exp is None: if exp is None:
raise AppIdentityError('No exp field in token: %s' % json_body) raise AppIdentityError('No exp field in token: %s' % json_body)

136
oauth2client/devshell.py Normal file
View File

@ -0,0 +1,136 @@
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OAuth 2.0 utitilies for Google Developer Shell environment."""
import json
import os
from oauth2client import client
DEVSHELL_ENV = 'DEVSHELL_CLIENT_PORT'
class Error(Exception):
"""Errors for this module."""
pass
class CommunicationError(Error):
"""Errors for communication with the Developer Shell server."""
class NoDevshellServer(Error):
"""Error when no Developer Shell server can be contacted."""
# The request for credential information to the Developer Shell client socket is
# always an empty PBLite-formatted JSON object, so just define it as a constant.
CREDENTIAL_INFO_REQUEST_JSON = '[]'
class CredentialInfoResponse(object):
"""Credential information response from Developer Shell server.
The credential information response from Developer Shell socket is a
PBLite-formatted JSON array with fields encoded by their index in the array:
* Index 0 - user email
* Index 1 - default project ID. None if the project context is not known.
* Index 2 - OAuth2 access token. None if there is no valid auth context.
"""
def __init__(self, json_string):
"""Initialize the response data from JSON PBLite array."""
pbl = json.loads(json_string)
if not isinstance(pbl, list):
raise ValueError('Not a list: ' + str(pbl))
pbl_len = len(pbl)
self.user_email = pbl[0] if pbl_len > 0 else None
self.project_id = pbl[1] if pbl_len > 1 else None
self.access_token = pbl[2] if pbl_len > 2 else None
def _SendRecv():
"""Communicate with the Developer Shell server socket."""
port = int(os.getenv(DEVSHELL_ENV, 0))
if port == 0:
raise NoDevshellServer()
import socket
sock = socket.socket()
sock.connect(('localhost', port))
data = CREDENTIAL_INFO_REQUEST_JSON
msg = '%s\n%s' % (len(data), data)
sock.sendall(msg.encode())
header = sock.recv(6).decode()
if '\n' not in header:
raise CommunicationError('saw no newline in the first 6 bytes')
len_str, json_str = header.split('\n', 1)
to_read = int(len_str) - len(json_str)
if to_read > 0:
json_str += sock.recv(to_read, socket.MSG_WAITALL).decode()
return CredentialInfoResponse(json_str)
class DevshellCredentials(client.GoogleCredentials):
"""Credentials object for Google Developer Shell environment.
This object will allow a Google Developer Shell session to identify its user
to Google and other OAuth 2.0 servers that can verify assertions. It can be
used for the purpose of accessing data stored under the user account.
This credential does not require a flow to instantiate because it represents
a two legged flow, and therefore has all of the required information to
generate and refresh its own access tokens.
"""
def __init__(self, user_agent=None):
super(DevshellCredentials, self).__init__(
None, # access_token, initialized below
None, # client_id
None, # client_secret
None, # refresh_token
None, # token_expiry
None, # token_uri
user_agent)
self._refresh(None)
def _refresh(self, http_request):
self.devshell_response = _SendRecv()
self.access_token = self.devshell_response.access_token
@property
def user_email(self):
return self.devshell_response.user_email
@property
def project_id(self):
return self.devshell_response.project_id
@classmethod
def from_json(cls, json_data):
raise NotImplementedError(
'Cannot load Developer Shell credentials from JSON.')
@property
def serialization_data(self):
raise NotImplementedError(
'Cannot serialize Developer Shell credentials.')

View File

@ -39,7 +39,6 @@ class Storage(BaseStorage):
self._lock = threading.Lock() self._lock = threading.Lock()
def _validate_file(self): def _validate_file(self):
return
if os.path.islink(self._filename): if os.path.islink(self._filename):
raise CredentialsFileSymbolicLinkError( raise CredentialsFileSymbolicLinkError(
'File: %s is a symbolic link.' % self._filename) 'File: %s is a symbolic link.' % self._filename)
@ -91,7 +90,7 @@ class Storage(BaseStorage):
simple version of "touch" to ensure the file has been created. simple version of "touch" to ensure the file has been created.
""" """
if not os.path.exists(self._filename): if not os.path.exists(self._filename):
old_umask = os.umask(0177) old_umask = os.umask(0o177)
try: try:
open(self._filename, 'a+b').close() open(self._filename, 'a+b').close()
finally: finally:
@ -109,7 +108,7 @@ class Storage(BaseStorage):
self._create_file_if_needed() self._create_file_if_needed()
self._validate_file() self._validate_file()
f = open(self._filename, 'wb') f = open(self._filename, 'w')
f.write(credentials.to_json()) f.write(credentials.to_json())
f.close() f.close()

View File

@ -21,7 +21,7 @@ __author__ = 'jcgregorio@google.com (Joe Gregorio)'
import json import json
import logging import logging
import urllib from six.moves import urllib
from oauth2client import util from oauth2client import util
from oauth2client.client import AccessTokenRefreshError from oauth2client.client import AccessTokenRefreshError
@ -78,13 +78,13 @@ class AppAssertionCredentials(AssertionCredentials):
Raises: Raises:
AccessTokenRefreshError: When the refresh fails. AccessTokenRefreshError: When the refresh fails.
""" """
query = '?scope=%s' % urllib.quote(self.scope, '') query = '?scope=%s' % urllib.parse.quote(self.scope, '')
uri = META.replace('{?scope}', query) uri = META.replace('{?scope}', query)
response, content = http_request(uri) response, content = http_request(uri)
if response.status == 200: if response.status == 200:
try: try:
d = json.loads(content) d = json.loads(content)
except StandardError as e: except Exception as e:
raise AccessTokenRefreshError(str(e)) raise AccessTokenRefreshError(str(e))
self.access_token = d['accessToken'] self.access_token = d['accessToken']
else: else:

View File

@ -17,17 +17,21 @@
This module first tries to use fcntl locking to ensure serialized access This module first tries to use fcntl locking to ensure serialized access
to a file, then falls back on a lock file if that is unavialable. to a file, then falls back on a lock file if that is unavialable.
Usage: Usage::
f = LockedFile('filename', 'r+b', 'rb') f = LockedFile('filename', 'r+b', 'rb')
f.open_and_lock() f.open_and_lock()
if f.is_locked(): if f.is_locked():
print 'Acquired filename with r+b mode' print('Acquired filename with r+b mode')
f.file_handle().write('locked data') f.file_handle().write('locked data')
else: else:
print 'Aquired filename with rb mode' print('Acquired filename with rb mode')
f.unlock_and_close() f.unlock_and_close()
""" """
from __future__ import print_function
__author__ = 'cache@google.com (David T McWherter)' __author__ = 'cache@google.com (David T McWherter)'
import errno import errno
@ -208,9 +212,9 @@ try:
except IOError as e: except IOError as e:
# If not retrying, then just pass on the error. # If not retrying, then just pass on the error.
if timeout == 0: if timeout == 0:
raise e raise
if e.errno != errno.EACCES: if e.errno != errno.EACCES:
raise e raise
# We could not acquire the lock. Try again. # We could not acquire the lock. Try again.
if (time.time() - start_time) >= timeout: if (time.time() - start_time) >= timeout:
logger.warn('Could not lock %s in %s seconds', logger.warn('Could not lock %s in %s seconds',
@ -287,7 +291,7 @@ try:
return return
except pywintypes.error as e: except pywintypes.error as e:
if timeout == 0: if timeout == 0:
raise e raise
# If the error is not that the file is already in use, raise. # If the error is not that the file is already in use, raise.
if e[0] != _Win32Opener.FILE_IN_USE_ERROR: if e[0] != _Win32Opener.FILE_IN_USE_ERROR:

View File

@ -19,30 +19,34 @@ credentials can be stored in one file. That file supports locking
both in a single process and across processes. both in a single process and across processes.
The credential themselves are keyed off of: The credential themselves are keyed off of:
* client_id * client_id
* user_agent * user_agent
* scope * scope
The format of the stored data is like so: The format of the stored data is like so::
{
'file_version': 1, {
'data': [ 'file_version': 1,
{ 'data': [
'key': { {
'clientId': '<client id>', 'key': {
'userAgent': '<user agent>', 'clientId': '<client id>',
'scope': '<scope>' 'userAgent': '<user agent>',
}, 'scope': '<scope>'
'credential': { },
# JSON serialized Credentials. 'credential': {
# JSON serialized Credentials.
}
} }
} ]
] }
}
""" """
__author__ = 'jbeda@google.com (Joe Beda)' __author__ = 'jbeda@google.com (Joe Beda)'
import errno
import json import json
import logging import logging
import os import os
@ -62,12 +66,10 @@ _multistores_lock = threading.Lock()
class Error(Exception): class Error(Exception):
"""Base error for this module.""" """Base error for this module."""
pass
class NewerCredentialStoreError(Error): class NewerCredentialStoreError(Error):
"""The credential store is a newer version that supported.""" """The credential store is a newer version than supported."""
pass
@util.positional(4) @util.positional(4)
@ -191,7 +193,7 @@ class _MultiStore(object):
This will create the file if necessary. This will create the file if necessary.
""" """
self._file = LockedFile(filename, 'r+b', 'rb') self._file = LockedFile(filename, 'r+', 'r')
self._thread_lock = threading.Lock() self._thread_lock = threading.Lock()
self._read_only = False self._read_only = False
self._warn_on_readonly = warn_on_readonly self._warn_on_readonly = warn_on_readonly
@ -269,7 +271,7 @@ class _MultiStore(object):
simple version of "touch" to ensure the file has been created. simple version of "touch" to ensure the file has been created.
""" """
if not os.path.exists(self._file.filename()): if not os.path.exists(self._file.filename()):
old_umask = os.umask(0177) old_umask = os.umask(0o177)
try: try:
open(self._file.filename(), 'a+b').close() open(self._file.filename(), 'a+b').close()
finally: finally:
@ -278,7 +280,17 @@ class _MultiStore(object):
def _lock(self): def _lock(self):
"""Lock the entire multistore.""" """Lock the entire multistore."""
self._thread_lock.acquire() self._thread_lock.acquire()
self._file.open_and_lock() try:
self._file.open_and_lock()
except IOError as e:
if e.errno == errno.ENOSYS:
logger.warn('File system does not support locking the credentials '
'file.')
elif e.errno == errno.ENOLCK:
logger.warn('File system is out of resources for writing the '
'credentials file (is your disk full?).')
else:
raise
if not self._file.is_locked(): if not self._file.is_locked():
self._read_only = True self._read_only = True
if self._warn_on_readonly: if self._warn_on_readonly:

View File

@ -15,6 +15,7 @@
"""This module holds the old run() function which is deprecated, the """This module holds the old run() function which is deprecated, the
tools.run_flow() function should be used in its place.""" tools.run_flow() function should be used in its place."""
from __future__ import print_function
import logging import logging
import socket import socket
@ -22,11 +23,12 @@ import sys
import webbrowser import webbrowser
import gflags import gflags
from six.moves import input
from oauth2client import client from oauth2client import client
from oauth2client import util from oauth2client import util
from tools import ClientRedirectHandler from oauth2client.tools import ClientRedirectHandler
from tools import ClientRedirectServer from oauth2client.tools import ClientRedirectServer
FLAGS = gflags.FLAGS FLAGS = gflags.FLAGS
@ -48,39 +50,38 @@ gflags.DEFINE_multi_int('auth_host_port', [8080, 8090],
def run(flow, storage, http=None): def run(flow, storage, http=None):
"""Core code for a command-line application. """Core code for a command-line application.
The run() function is called from your application and runs through all The ``run()`` function is called from your application and runs
the steps to obtain credentials. It takes a Flow argument and attempts to through all the steps to obtain credentials. It takes a ``Flow``
open an authorization server page in the user's default web browser. The argument and attempts to open an authorization server page in the
server asks the user to grant your application access to the user's data. user's default web browser. The server asks the user to grant your
If the user grants access, the run() function returns new credentials. The application access to the user's data. If the user grants access,
new credentials are also stored in the Storage argument, which updates the the ``run()`` function returns new credentials. The new credentials
file associated with the Storage object. are also stored in the ``storage`` argument, which updates the file
associated with the ``Storage`` object.
It presumes it is run from a command-line application and supports the It presumes it is run from a command-line application and supports the
following flags: following flags:
--auth_host_name: Host name to use when running a local web server ``--auth_host_name`` (string, default: ``localhost``)
to handle redirects during OAuth authorization. Host name to use when running a local web server to handle
(default: 'localhost') redirects during OAuth authorization.
--auth_host_port: Port to use when running a local web server to handle ``--auth_host_port`` (integer, default: ``[8080, 8090]``)
redirects during OAuth authorization.; Port to use when running a local web server to handle redirects
repeat this option to specify a list of values during OAuth authorization. Repeat this option to specify a list
(default: '[8080, 8090]') of values.
(an integer)
--[no]auth_local_webserver: Run a local web server to handle redirects ``--[no]auth_local_webserver`` (boolean, default: ``True``)
during OAuth authorization. Run a local web server to handle redirects during OAuth authorization.
(default: 'true')
Since it uses flags make sure to initialize the gflags module before Since it uses flags make sure to initialize the ``gflags`` module before
calling run(). calling ``run()``.
Args: Args:
flow: Flow, an OAuth 2.0 Flow to step through. flow: Flow, an OAuth 2.0 Flow to step through.
storage: Storage, a Storage to store the credential in. storage: Storage, a ``Storage`` to store the credential in.
http: An instance of httplib2.Http.request http: An instance of ``httplib2.Http.request`` or something that acts
or something that acts like it. like it.
Returns: Returns:
Credentials, the obtained credential. Credentials, the obtained credential.
@ -103,13 +104,13 @@ def run(flow, storage, http=None):
break break
FLAGS.auth_local_webserver = success FLAGS.auth_local_webserver = success
if not success: if not success:
print 'Failed to start a local webserver listening on either port 8080' print('Failed to start a local webserver listening on either port 8080')
print 'or port 9090. Please check your firewall settings and locally' print('or port 9090. Please check your firewall settings and locally')
print 'running programs that may be blocking or using those ports.' print('running programs that may be blocking or using those ports.')
print print()
print 'Falling back to --noauth_local_webserver and continuing with', print('Falling back to --noauth_local_webserver and continuing with')
print 'authorization.' print('authorization.')
print print()
if FLAGS.auth_local_webserver: if FLAGS.auth_local_webserver:
oauth_callback = 'http://%s:%s/' % (FLAGS.auth_host_name, port_number) oauth_callback = 'http://%s:%s/' % (FLAGS.auth_host_name, port_number)
@ -120,20 +121,20 @@ def run(flow, storage, http=None):
if FLAGS.auth_local_webserver: if FLAGS.auth_local_webserver:
webbrowser.open(authorize_url, new=1, autoraise=True) webbrowser.open(authorize_url, new=1, autoraise=True)
print 'Your browser has been opened to visit:' print('Your browser has been opened to visit:')
print print()
print ' ' + authorize_url print(' ' + authorize_url)
print print()
print 'If your browser is on a different machine then exit and re-run' print('If your browser is on a different machine then exit and re-run')
print 'this application with the command-line parameter ' print('this application with the command-line parameter ')
print print()
print ' --noauth_local_webserver' print(' --noauth_local_webserver')
print print()
else: else:
print 'Go to the following link in your browser:' print('Go to the following link in your browser:')
print print()
print ' ' + authorize_url print(' ' + authorize_url)
print print()
code = None code = None
if FLAGS.auth_local_webserver: if FLAGS.auth_local_webserver:
@ -143,10 +144,10 @@ def run(flow, storage, http=None):
if 'code' in httpd.query_params: if 'code' in httpd.query_params:
code = httpd.query_params['code'] code = httpd.query_params['code']
else: else:
print 'Failed to find "code" in the query parameters of the redirect.' print('Failed to find "code" in the query parameters of the redirect.')
sys.exit('Try running with --noauth_local_webserver.') sys.exit('Try running with --noauth_local_webserver.')
else: else:
code = raw_input('Enter verification code: ').strip() code = input('Enter verification code: ').strip()
try: try:
credential = flow.step2_exchange(code, http=http) credential = flow.step2_exchange(code, http=http)
@ -155,6 +156,6 @@ def run(flow, storage, http=None):
storage.put(credential) storage.put(credential)
credential.set_store(storage) credential.set_store(storage)
print 'Authentication successful.' print('Authentication successful.')
return credential return credential

View File

@ -19,6 +19,7 @@ This credentials class is implemented on top of rsa library.
import base64 import base64
import json import json
import six
import time import time
from pyasn1.codec.ber import decoder from pyasn1.codec.ber import decoder
@ -64,7 +65,7 @@ class _ServiceAccountCredentials(AssertionCredentials):
'kid': self._private_key_id 'kid': self._private_key_id
} }
now = long(time.time()) now = int(time.time())
payload = { payload = {
'aud': self._token_uri, 'aud': self._token_uri,
'scope': self._scopes, 'scope': self._scopes,
@ -74,17 +75,21 @@ class _ServiceAccountCredentials(AssertionCredentials):
} }
payload.update(self._kwargs) payload.update(self._kwargs)
assertion_input = '%s.%s' % ( assertion_input = (_urlsafe_b64encode(header) + b'.' +
_urlsafe_b64encode(header), _urlsafe_b64encode(payload))
_urlsafe_b64encode(payload))
# Sign the assertion. # Sign the assertion.
signature = base64.urlsafe_b64encode(rsa.pkcs1.sign( rsa_bytes = rsa.pkcs1.sign(assertion_input, self._private_key, 'SHA-256')
assertion_input, self._private_key, 'SHA-256')).rstrip('=') signature = base64.urlsafe_b64encode(rsa_bytes).rstrip(b'=')
return '%s.%s' % (assertion_input, signature) return assertion_input + b'.' + signature
def sign_blob(self, blob): def sign_blob(self, blob):
# Ensure that it is bytes
try:
blob = blob.encode('utf-8')
except AttributeError:
pass
return (self._private_key_id, return (self._private_key_id,
rsa.pkcs1.sign(blob, self._private_key, 'SHA-256')) rsa.pkcs1.sign(blob, self._private_key, 'SHA-256'))
@ -119,12 +124,14 @@ class _ServiceAccountCredentials(AssertionCredentials):
def _urlsafe_b64encode(data): def _urlsafe_b64encode(data):
return base64.urlsafe_b64encode( return base64.urlsafe_b64encode(
json.dumps(data, separators=(',', ':')).encode('UTF-8')).rstrip('=') json.dumps(data, separators=(',', ':')).encode('UTF-8')).rstrip(b'=')
def _get_private_key(private_key_pkcs8_text): def _get_private_key(private_key_pkcs8_text):
"""Get an RSA private key object from a pkcs8 representation.""" """Get an RSA private key object from a pkcs8 representation."""
if not isinstance(private_key_pkcs8_text, six.binary_type):
private_key_pkcs8_text = private_key_pkcs8_text.encode('ascii')
der = rsa.pem.load_pem(private_key_pkcs8_text, 'PRIVATE KEY') der = rsa.pem.load_pem(private_key_pkcs8_text, 'PRIVATE KEY')
asn1_private_key, _ = decoder.decode(der, asn1Spec=PrivateKeyInfo()) asn1_private_key, _ = decoder.decode(der, asn1Spec=PrivateKeyInfo())
return rsa.PrivateKey.load_pkcs1( return rsa.PrivateKey.load_pkcs1(

View File

@ -19,21 +19,23 @@ generated credentials in a common file that is used by other example apps in
the same directory. the same directory.
""" """
from __future__ import print_function
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = ['argparser', 'run_flow', 'run', 'message_if_missing'] __all__ = ['argparser', 'run_flow', 'run', 'message_if_missing']
#import argparse
import BaseHTTPServer
import logging import logging
import socket import socket
import sys import sys
import urlparse
import webbrowser from six.moves import BaseHTTPServer
from six.moves import urllib
from six.moves import input
from oauth2client import client from oauth2client import client
from oauth2client import util from oauth2client import util
_CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0 _CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file To make this sample run you will need to populate the client_secrets.json file
@ -45,20 +47,27 @@ with information from the APIs Console <https://code.google.com/apis/console>.
""" """
def _CreateArgumentParser():
try:
import argparse
except ImportError:
return None
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--auth_host_name', default='localhost',
help='Hostname when running a local web server.')
parser.add_argument('--noauth_local_webserver', action='store_true',
default=False, help='Do not run a local web server.')
parser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
nargs='*', help='Port web server should listen on.')
parser.add_argument('--logging_level', default='ERROR',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
help='Set the logging level of detail.')
return parser
# argparser is an ArgumentParser that contains command-line options expected # argparser is an ArgumentParser that contains command-line options expected
# by tools.run(). Pass it in as part of the 'parents' argument to your own # by tools.run(). Pass it in as part of the 'parents' argument to your own
# ArgumentParser. # ArgumentParser.
#argparser = argparse.ArgumentParser(add_help=False) argparser = _CreateArgumentParser()
#argparser.add_argument('--auth_host_name', default='localhost',
# help='Hostname when running a local web server.')
#argparser.add_argument('--noauth_local_webserver', action='store_true',
# default=False, help='Do not run a local web server.')
#argparser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
# nargs='*', help='Port web server should listen on.')
#argparser.add_argument('--logging_level', default='ERROR',
# choices=['DEBUG', 'INFO', 'WARNING', 'ERROR',
# 'CRITICAL'],
# help='Set the logging level of detail.')
class ClientRedirectServer(BaseHTTPServer.HTTPServer): class ClientRedirectServer(BaseHTTPServer.HTTPServer):
@ -88,11 +97,11 @@ class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
self.send_header("Content-type", "text/html") self.send_header("Content-type", "text/html")
self.end_headers() self.end_headers()
query = self.path.split('?', 1)[-1] query = self.path.split('?', 1)[-1]
query = dict(urlparse.parse_qsl(query)) query = dict(urllib.parse.parse_qsl(query))
self.server.query_params = query self.server.query_params = query
self.wfile.write("<html><head><title>Authentication Status</title></head>") self.wfile.write(b"<html><head><title>Authentication Status</title></head>")
self.wfile.write("<body><p>The authentication flow has completed.</p>") self.wfile.write(b"<body><p>The authentication flow has completed.</p>")
self.wfile.write("</body></html>") self.wfile.write(b"</body></html>")
def log_message(self, format, *args): def log_message(self, format, *args):
"""Do not log messages to stdout while running as command line program.""" """Do not log messages to stdout while running as command line program."""
@ -102,46 +111,50 @@ class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def run_flow(flow, storage, flags, http=None): def run_flow(flow, storage, flags, http=None):
"""Core code for a command-line application. """Core code for a command-line application.
The run() function is called from your application and runs through all the The ``run()`` function is called from your application and runs
steps to obtain credentials. It takes a Flow argument and attempts to open an through all the steps to obtain credentials. It takes a ``Flow``
authorization server page in the user's default web browser. The server asks argument and attempts to open an authorization server page in the
the user to grant your application access to the user's data. If the user user's default web browser. The server asks the user to grant your
grants access, the run() function returns new credentials. The new credentials application access to the user's data. If the user grants access,
are also stored in the Storage argument, which updates the file associated the ``run()`` function returns new credentials. The new credentials
with the Storage object. are also stored in the ``storage`` argument, which updates the file
associated with the ``Storage`` object.
It presumes it is run from a command-line application and supports the It presumes it is run from a command-line application and supports the
following flags: following flags:
--auth_host_name: Host name to use when running a local web server ``--auth_host_name`` (string, default: ``localhost``)
to handle redirects during OAuth authorization. Host name to use when running a local web server to handle
(default: 'localhost') redirects during OAuth authorization.
--auth_host_port: Port to use when running a local web server to handle ``--auth_host_port`` (integer, default: ``[8080, 8090]``)
redirects during OAuth authorization.; Port to use when running a local web server to handle redirects
repeat this option to specify a list of values during OAuth authorization. Repeat this option to specify a list
(default: '[8080, 8090]') of values.
(an integer)
--[no]auth_local_webserver: Run a local web server to handle redirects ``--[no]auth_local_webserver`` (boolean, default: ``True``)
during OAuth authorization. Run a local web server to handle redirects during OAuth authorization.
(default: 'true')
The tools module defines an ArgumentParser the already contains the flag
definitions that run() requires. You can pass that ArgumentParser to your
ArgumentParser constructor:
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[tools.argparser]) The tools module defines an ``ArgumentParser`` the already contains the flag
flags = parser.parse_args(argv) definitions that ``run()`` requires. You can pass that ``ArgumentParser`` to your
``ArgumentParser`` constructor::
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[tools.argparser])
flags = parser.parse_args(argv)
Args: Args:
flow: Flow, an OAuth 2.0 Flow to step through. flow: Flow, an OAuth 2.0 Flow to step through.
storage: Storage, a Storage to store the credential in. storage: Storage, a ``Storage`` to store the credential in.
flags: argparse.ArgumentParser, the command-line flags. flags: ``argparse.Namespace``, The command-line flags. This is the
http: An instance of httplib2.Http.request object returned from calling ``parse_args()`` on
or something that acts like it. ``argparse.ArgumentParser`` as described above.
http: An instance of ``httplib2.Http.request`` or something that
acts like it.
Returns: Returns:
Credentials, the obtained credential. Credentials, the obtained credential.
@ -155,20 +168,20 @@ def run_flow(flow, storage, flags, http=None):
try: try:
httpd = ClientRedirectServer((flags.auth_host_name, port), httpd = ClientRedirectServer((flags.auth_host_name, port),
ClientRedirectHandler) ClientRedirectHandler)
except socket.error as e: except socket.error:
pass pass
else: else:
success = True success = True
break break
flags.noauth_local_webserver = not success flags.noauth_local_webserver = not success
if not success: if not success:
print 'Failed to start a local webserver listening on either port 8080' print('Failed to start a local webserver listening on either port 8080')
print 'or port 9090. Please check your firewall settings and locally' print('or port 9090. Please check your firewall settings and locally')
print 'running programs that may be blocking or using those ports.' print('running programs that may be blocking or using those ports.')
print print()
print 'Falling back to --noauth_local_webserver and continuing with', print('Falling back to --noauth_local_webserver and continuing with')
print 'authorization.' print('authorization.')
print print()
if not flags.noauth_local_webserver: if not flags.noauth_local_webserver:
oauth_callback = 'http://%s:%s/' % (flags.auth_host_name, port_number) oauth_callback = 'http://%s:%s/' % (flags.auth_host_name, port_number)
@ -186,23 +199,22 @@ def run_flow(flow, storage, flags, http=None):
authorize_url = url_result['id'] authorize_url = url_result['id']
except: except:
pass pass
if not flags.noauth_local_webserver: if not flags.noauth_local_webserver:
import webbrowser
webbrowser.open(authorize_url, new=1, autoraise=True) webbrowser.open(authorize_url, new=1, autoraise=True)
print 'Your browser has been opened to visit:' print('Your browser has been opened to visit:')
print print()
print ' ' + authorize_url print(' ' + authorize_url)
print print()
print 'If your browser is on a different machine then exit and re-run this' print('If your browser is on a different machine then exit and re-run this')
print 'after creating a file called nobrowser.txt in the same path as GAM.' print('after creating a file called nobrowser.txt in the same path as GAM.')
# print 'application with the command-line parameter ' print()
# print
# print ' --noauth_local_webserver'
# print
else: else:
print 'Go to the following link in your browser:' print('Go to the following link in your browser:')
print print()
print ' ' + authorize_url print(' ' + authorize_url)
print print()
code = None code = None
if not flags.noauth_local_webserver: if not flags.noauth_local_webserver:
@ -212,10 +224,10 @@ def run_flow(flow, storage, flags, http=None):
if 'code' in httpd.query_params: if 'code' in httpd.query_params:
code = httpd.query_params['code'] code = httpd.query_params['code']
else: else:
print 'Failed to find "code" in the query parameters of the redirect.' print('Failed to find "code" in the query parameters of the redirect.')
sys.exit('Try running with --noauth_local_webserver.') sys.exit('Try running with --noauth_local_webserver.')
else: else:
code = raw_input('Enter verification code: ').strip() code = input('Enter verification code: ').strip()
try: try:
credential = flow.step2_exchange(code, http=http) credential = flow.step2_exchange(code, http=http)
@ -224,7 +236,7 @@ def run_flow(flow, storage, flags, http=None):
storage.put(credential) storage.put(credential)
credential.set_store(storage) credential.set_store(storage)
print 'Authentication successful.' print('Authentication successful.')
return credential return credential

View File

@ -29,11 +29,15 @@ __all__ = [
'POSITIONAL_IGNORE', 'POSITIONAL_IGNORE',
] ]
import functools
import inspect import inspect
import logging import logging
import sys
import types import types
import urllib
import urlparse import six
from six.moves import urllib
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -48,56 +52,58 @@ positional_parameters_enforcement = POSITIONAL_WARNING
def positional(max_positional_args): def positional(max_positional_args):
"""A decorator to declare that only the first N arguments my be positional. """A decorator to declare that only the first N arguments my be positional.
This decorator makes it easy to support Python 3 style key-word only This decorator makes it easy to support Python 3 style keyword-only
parameters. For example, in Python 3 it is possible to write: parameters. For example, in Python 3 it is possible to write::
def fn(pos1, *, kwonly1=None, kwonly1=None): def fn(pos1, *, kwonly1=None, kwonly1=None):
... ...
All named parameters after * must be a keyword: All named parameters after ``*`` must be a keyword::
fn(10, 'kw1', 'kw2') # Raises exception. fn(10, 'kw1', 'kw2') # Raises exception.
fn(10, kwonly1='kw1') # Ok. fn(10, kwonly1='kw1') # Ok.
Example: Example
To define a function like above, do: ^^^^^^^
@positional(1) To define a function like above, do::
def fn(pos1, kwonly1=None, kwonly2=None):
@positional(1)
def fn(pos1, kwonly1=None, kwonly2=None):
...
If no default value is provided to a keyword argument, it becomes a required
keyword argument::
@positional(0)
def fn(required_kw):
...
This must be called with the keyword parameter::
fn() # Raises exception.
fn(10) # Raises exception.
fn(required_kw=10) # Ok.
When defining instance or class methods always remember to account for
``self`` and ``cls``::
class MyClass(object):
@positional(2)
def my_method(self, pos1, kwonly1=None):
... ...
If no default value is provided to a keyword argument, it becomes a required @classmethod
keyword argument: @positional(2)
def my_method(cls, pos1, kwonly1=None):
@positional(0)
def fn(required_kw):
... ...
This must be called with the keyword parameter:
fn() # Raises exception.
fn(10) # Raises exception.
fn(required_kw=10) # Ok.
When defining instance or class methods always remember to account for
'self' and 'cls':
class MyClass(object):
@positional(2)
def my_method(self, pos1, kwonly1=None):
...
@classmethod
@positional(2)
def my_method(cls, pos1, kwonly1=None):
...
The positional decorator behavior is controlled by The positional decorator behavior is controlled by
util.positional_parameters_enforcement, which may be set to ``util.positional_parameters_enforcement``, which may be set to
POSITIONAL_EXCEPTION, POSITIONAL_WARNING or POSITIONAL_IGNORE to raise an ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or
exception, log a warning, or do nothing, respectively, if a declaration is ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do
violated. nothing, respectively, if a declaration is violated.
Args: Args:
max_positional_arguments: Maximum number of positional arguments. All max_positional_arguments: Maximum number of positional arguments. All
@ -111,8 +117,10 @@ def positional(max_positional_args):
TypeError if a key-word only argument is provided as a positional TypeError if a key-word only argument is provided as a positional
parameter, but only if util.positional_parameters_enforcement is set to parameter, but only if util.positional_parameters_enforcement is set to
POSITIONAL_EXCEPTION. POSITIONAL_EXCEPTION.
""" """
def positional_decorator(wrapped): def positional_decorator(wrapped):
@functools.wraps(wrapped)
def positional_wrapper(*args, **kwargs): def positional_wrapper(*args, **kwargs):
if len(args) > max_positional_args: if len(args) > max_positional_args:
plural_s = '' plural_s = ''
@ -129,7 +137,7 @@ def positional(max_positional_args):
return wrapped(*args, **kwargs) return wrapped(*args, **kwargs)
return positional_wrapper return positional_wrapper
if isinstance(max_positional_args, (int, long)): if isinstance(max_positional_args, six.integer_types):
return positional_decorator return positional_decorator
else: else:
args, _, _, defaults = inspect.getargspec(max_positional_args) args, _, _, defaults = inspect.getargspec(max_positional_args)
@ -149,7 +157,7 @@ def scopes_to_string(scopes):
Returns: Returns:
The scopes formatted as a single string. The scopes formatted as a single string.
""" """
if isinstance(scopes, types.StringTypes): if isinstance(scopes, six.string_types):
return scopes return scopes
else: else:
return ' '.join(scopes) return ' '.join(scopes)
@ -186,8 +194,8 @@ def _add_query_parameter(url, name, value):
if value is None: if value is None:
return url return url
else: else:
parsed = list(urlparse.urlparse(url)) parsed = list(urllib.parse.urlparse(url))
q = dict(urlparse.parse_qsl(parsed[4])) q = dict(urllib.parse.parse_qsl(parsed[4]))
q[name] = value q[name] = value
parsed[4] = urllib.urlencode(q) parsed[4] = urllib.parse.urlencode(q)
return urlparse.urlunparse(parsed) return urllib.parse.urlunparse(parsed)

View File

@ -1,4 +1,3 @@
#!/usr/bin/python2.5
# #
# Copyright 2014 the Melange authors. # Copyright 2014 the Melange authors.
# #
@ -26,15 +25,27 @@ import base64
import hmac import hmac
import time import time
import six
from oauth2client import util from oauth2client import util
# Delimiter character # Delimiter character
DELIMITER = ':' DELIMITER = b':'
# 1 hour in seconds # 1 hour in seconds
DEFAULT_TIMEOUT_SECS = 1*60*60 DEFAULT_TIMEOUT_SECS = 1*60*60
def _force_bytes(s):
if isinstance(s, bytes):
return s
s = str(s)
if isinstance(s, six.text_type):
return s.encode('utf-8')
return s
@util.positional(2) @util.positional(2)
def generate_token(key, user_id, action_id="", when=None): def generate_token(key, user_id, action_id="", when=None):
"""Generates a URL-safe token for the given user, action, time tuple. """Generates a URL-safe token for the given user, action, time tuple.
@ -50,18 +61,16 @@ def generate_token(key, user_id, action_id="", when=None):
Returns: Returns:
A string XSRF protection token. A string XSRF protection token.
""" """
when = when or int(time.time()) when = _force_bytes(when or int(time.time()))
digester = hmac.new(key) digester = hmac.new(_force_bytes(key))
digester.update(str(user_id)) digester.update(_force_bytes(user_id))
digester.update(DELIMITER) digester.update(DELIMITER)
digester.update(action_id) digester.update(_force_bytes(action_id))
digester.update(DELIMITER) digester.update(DELIMITER)
digester.update(str(when)) digester.update(when)
digest = digester.digest() digest = digester.digest()
token = base64.urlsafe_b64encode('%s%s%d' % (digest, token = base64.urlsafe_b64encode(digest + DELIMITER + when)
DELIMITER,
when))
return token return token
@ -86,8 +95,8 @@ def validate_token(key, token, user_id, action_id="", current_time=None):
if not token: if not token:
return False return False
try: try:
decoded = base64.urlsafe_b64decode(str(token)) decoded = base64.urlsafe_b64decode(token)
token_time = long(decoded.split(DELIMITER)[-1]) token_time = int(decoded.split(DELIMITER)[-1])
except (TypeError, ValueError): except (TypeError, ValueError):
return False return False
if current_time is None: if current_time is None:
@ -104,9 +113,6 @@ def validate_token(key, token, user_id, action_id="", current_time=None):
# Perform constant time comparison to avoid timing attacks # Perform constant time comparison to avoid timing attacks
different = 0 different = 0
for x, y in zip(token, expected_token): for x, y in zip(bytearray(token), bytearray(expected_token)):
different |= ord(x) ^ ord(y) different |= x ^ y
if different: return not different
return False
return True