diff --git a/gam.py b/gam.py index 870a0283..a6c075ac 100644 --- a/gam.py +++ b/gam.py @@ -31,10 +31,10 @@ import sys, os, time, datetime, random, socket, csv, platform, re, calendar, bas import json import httplib2 -import apiclient -import apiclient.discovery -import apiclient.errors -import apiclient.http +import googleapiclient +import googleapiclient.discovery +import googleapiclient.errors +import googleapiclient.http import oauth2client.client import oauth2client.file import oauth2client.tools @@ -148,7 +148,7 @@ def getGamPath(): def doGAMVersion(): import struct print u'Dito GAM %s - http://git.io/gam\n%s\nPython %s.%s.%s %s-bit %s\ngoogle-api-python-client %s\n%s %s\nPath: %s' % (__version__, __author__, - sys.version_info[0], sys.version_info[1], sys.version_info[2], struct.calcsize('P')*8, sys.version_info[3], apiclient.__version__, + sys.version_info[0], sys.version_info[1], sys.version_info[2], struct.calcsize('P')*8, sys.version_info[3], googleapiclient.__version__, platform.platform(), platform.machine(), getGamPath()) def doGAMCheckForUpdates(): @@ -368,7 +368,7 @@ def callGAPI(service, function, silent_errors=False, soft_errors=False, throw_re for n in range(1, retries+1): try: return method(prettyPrint=prettyPrint, **kwargs).execute() - except apiclient.errors.HttpError, e: + except googleapiclient.errors.HttpError, e: try: error = json.loads(e.content) except ValueError: @@ -542,14 +542,14 @@ def buildGAPIObject(api): if api in [u'directory', u'reports']: api = u'admin' try: - service = apiclient.discovery.build(api, version, http=http) - except apiclient.errors.UnknownApiNameOrVersion: + service = googleapiclient.discovery.build(api, version, http=http) + except googleapiclient.errors.UnknownApiNameOrVersion: disc_file = getGamPath()+u'%s-%s.json' % (api, version) if os.path.isfile(disc_file): f = file(disc_file, 'rb') discovery = f.read() f.close() - service = apiclient.discovery.build_from_document(discovery, base=u'https://www.googleapis.com', http=http) + service = googleapiclient.discovery.build_from_document(discovery, base=u'https://www.googleapis.com', http=http) else: raise except httplib2.CertificateValidationUnsupported: @@ -619,7 +619,7 @@ def buildGAPIServiceObject(api, act_as=None): http = credentials.authorize(http) version = getAPIVer(api) try: - return apiclient.discovery.build(api, version, http=http) + return googleapiclient.discovery.build(api, version, http=http) except oauth2client.client.AccessTokenRefreshError, e: if e.message == u'access_denied': print u'Error: Access Denied. Please make sure the Client Name:\n\n%s\n\nis authorized for the API Scope(s):\n\n%s\n\nThis can be configured in your Control Panel under:\n\nSecurity -->\nAdvanced Settings -->\nManage third party OAuth Client access' % (SERVICE_ACCOUNT_CLIENT_ID, ','.join(scope)) @@ -643,10 +643,10 @@ def buildDiscoveryObject(api): if not os.path.isfile(getGamPath()+u'nocache.txt'): cache = u'%sgamcache' % getGamPath() http = httplib2.Http(ca_certs=getGamPath()+u'cacert.pem', disable_ssl_certificate_validation=disable_ssl_certificate_validation, cache=cache) - requested_url = uritemplate.expand(apiclient.discovery.DISCOVERY_URI, params) + requested_url = uritemplate.expand(googleapiclient.discovery.DISCOVERY_URI, params) resp, content = http.request(requested_url) if resp.status == 404: - raise apiclient.errors.UnknownApiNameOrVersion("name: %s version: %s" % (api, version)) + raise googleapiclient.errors.UnknownApiNameOrVersion("name: %s version: %s" % (api, version)) if resp.status >= 400: raise HttpError(resp, content, uri=requested_url) try: @@ -766,7 +766,7 @@ def showReport(): page_message = u'Got %%num_items%% users\n' usage = callGAPIpages(service=rep.userUsageReport(), function=u'get', items=u'usageReports', page_message=page_message, throw_reasons=[u'invalid'], date=str(try_date), userKey=userKey, customerId=customerId, filters=filters, parameters=parameters) break - except apiclient.errors.HttpError, e: + except googleapiclient.errors.HttpError, e: error = json.loads(e.content) try: message = error[u'error'][u'errors'][0][u'message'] @@ -803,7 +803,7 @@ def showReport(): try: usage = callGAPIpages(service=rep.customerUsageReports(), function=u'get', items=u'usageReports', throw_reasons=[u'invalid'], customerId=customerId, date=str(try_date), parameters=parameters) break - except apiclient.errors.HttpError, e: + except googleapiclient.errors.HttpError, e: error = json.loads(e.content) try: message = error[u'error'][u'errors'][0][u'message'] @@ -1543,7 +1543,7 @@ def getPhoto(users): i += 1 try: photo = callGAPI(service=cd.users().photos(), function=u'get', throw_reasons=[u'notFound'], userKey=user) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: print u' no photo for %s' % user continue try: @@ -2128,7 +2128,7 @@ def doUpdateDriveFile(users): if drivefilename: fileIds = doDriveSearch(drive, query=u'"me" in owners and title = "%s"' % drivefilename) if local_filepath: - media_body = apiclient.http.MediaFileUpload(local_filepath, mimetype=mimetype, resumable=True) + media_body = googleapiclient.http.MediaFileUpload(local_filepath, mimetype=mimetype, resumable=True) for fileId in fileIds: if operation == u'update': if media_body: @@ -2243,7 +2243,7 @@ def createDriveFile(users): for a_parent in more_parents: body[u'parents'].append({u'id': a_parent}) if local_filepath: - media_body = apiclient.http.MediaFileUpload(local_filepath, mimetype=mimetype, resumable=True) + media_body = googleapiclient.http.MediaFileUpload(local_filepath, mimetype=mimetype, resumable=True) result = callGAPI(service=drive.files(), function=u'insert', convert=convert, ocr=ocr, ocrLanguage=ocrLanguage, media_body=media_body, body=body, fields='id') try: print u'Successfully uploaded %s to Drive file ID %s' % (local_filename, result[u'id']) @@ -2886,14 +2886,14 @@ def doDeleteLabel(users): continue del_me_count = len(del_labels) i = 1 - dbatch = apiclient.http.BatchHttpRequest() + dbatch = googleapiclient.http.BatchHttpRequest() for del_me in del_labels: print u' deleting label %s (%s/%s)' % (del_me[u'name'], i, del_me_count) i += 1 dbatch.add(gmail.users().labels().delete(userId=user, id=del_me[u'id']), callback=label_del_result) if len(dbatch._order) == 25: dbatch.execute() - dbatch = apiclient.http.BatchHttpRequest() + dbatch = googleapiclient.http.BatchHttpRequest() if len(dbatch._order) > 0: dbatch.execute() @@ -2993,7 +2993,7 @@ def renameLabels(users): print u' Renaming "%s" to "%s"' % (label[u'name'], new_label_name) try: callGAPI(service=gmail.users().labels(), function=u'patch', soft_errors=True, throw_reasons=[u'aborted'], id=label[u'id'], userId=user, body={u'name': new_label_name}) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: if merge: print u' Merging %s label to existing %s label' % (label[u'name'], new_label_name) q = u'label:"%s"' % label[u'name'] @@ -3826,7 +3826,7 @@ def doCreateAlias(): elif target_type == u'target': try: callGAPI(service=cd.users().aliases(), function=u'insert', throw_reasons=[u'invalid'], userKey=targetKey, body=body) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: callGAPI(service=cd.groups().aliases(), function=u'insert', groupKey=targetKey, body=body) def doCreateOrg(): @@ -4310,7 +4310,7 @@ def doUpdateGroup(): print u'added %s to group' % result[u'email'] except TypeError: pass - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: pass elif sys.argv[4].lower() == u'sync': role = sys.argv[5].upper() @@ -4328,7 +4328,7 @@ def doUpdateGroup(): sys.stderr.write(u' adding %s %s\n' % (role, user_email)) try: result = callGAPI(service=cd.members(), function=u'insert', soft_errors=True, throw_reasons=[u'duplicate'], groupKey=group, body={u'email': user_email, u'role': role}) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: result = callGAPI(service=cd.members(), function=u'update', soft_errors=True, groupKey=group, memberKey=user_email, body={u'email': user_email, u'role': role}) for user_email in to_remove: sys.stderr.write(u' removing %s\n' % user_email) @@ -4435,7 +4435,7 @@ def doUpdateAlias(): target_email = u'%s@%s' % (target_email, domain) try: callGAPI(service=cd.users().aliases(), function=u'delete', throw_reasons=[u'invalid'], userKey=alias, alias=alias) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: callGAPI(service=cd.groups().aliases(), function=u'delete', groupKey=alias, alias=alias) if target_type == u'user': callGAPI(service=cd.users().aliases(), function=u'insert', userKey=target_email, body={u'alias': alias}) @@ -4444,7 +4444,7 @@ def doUpdateAlias(): elif target_type == u'target': try: callGAPI(service=cd.users().aliases(), function=u'insert', throw_reasons=[u'invalid'], userKey=target_email, body={u'alias': alias}) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: callGAPI(service=cd.groups().aliases(), function=u'insert', groupKey=target_email, body={u'alias': alias}) print u'updated alias %s' % alias @@ -4579,7 +4579,7 @@ def doUpdateOrg(): sys.stderr.write(u' moving %s to %s (%s/%s)\n' % (user, orgUnitPath, current_user, user_count)) try: callGAPI(service=cd.users(), function=u'patch', throw_reasons=[u'conditionNotMet'], userKey=user, body={u'orgUnitPath': orgUnitPath}) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: pass current_user += 1 else: @@ -4622,12 +4622,12 @@ def doWhatIs(): sys.stderr.write(u'%s is a user alias\n\n' % email) doGetAliasInfo(alias_email=email) return - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: sys.stderr.write(u'%s is not a user...\n' % email) sys.stderr.write(u'%s is not a user alias...\n' % email) try: group = callGAPI(service=cd.groups(), function=u'get', throw_reasons=[u'notFound', u'badRequest'], groupKey=email, fields=u'email') - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: sys.stderr.write(u'%s is not a group either!\n\nDoesn\'t seem to exist!\n\n' % email) sys.exit(1) if group[u'email'].lower() == email.lower(): @@ -4826,7 +4826,7 @@ def doGetUserInfo(user_email=None): productId, skuId = getProductAndSKU(sku) try: result = callGAPI(service=lic.licenseAssignments(), function=u'get', throw_reasons=['notFound'], userId=user_email, productId=productId, skuId=skuId) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: continue print u' %s' % result[u'skuId'] @@ -4848,7 +4848,7 @@ def doGetGroupInfo(group_name=None): basic_info = callGAPI(service=cd.groups(), function=u'get', groupKey=group_name) try: settings = callGAPI(service=gs.groups(), function=u'get', retry_reasons=[u'serviceLimit'], groupUniqueId=basic_info[u'email'], throw_reasons=u'authError') # Use email address retrieved from cd since GS API doesn't support uid - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: pass print u'' print u'Group Settings:' @@ -4894,7 +4894,7 @@ def doGetAliasInfo(alias_email=None): alias_email = u'%s@%s' % (alias_email, domain) try: result = callGAPI(service=cd.users(), function=u'get', throw_reasons=[u'invalid', u'badRequest'], userKey=alias_email) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: result = callGAPI(service=cd.groups(), function=u'get', groupKey=alias_email) print u' Alias Email: %s' % alias_email try: @@ -5066,7 +5066,7 @@ def doSiteVerifyAttempt(): body = {u'site':{u'type':verify_type, u'identifier':identifier}, u'verificationMethod':verificationMethod} try: verify_result = callGAPI(service=verif.webResource(), function=u'insert', throw_reasons=[u'badRequest'], verificationMethod=verificationMethod, body=body) - except apiclient.errors.HttpError, e: + except googleapiclient.errors.HttpError, e: error = json.loads(e.content) message = error[u'error'][u'errors'][0][u'message'] print u'ERROR: %s' % message @@ -5223,7 +5223,7 @@ def doGetBackupCodes(users): for user in users: try: codes = callGAPI(service=cd.verificationCodes(), function=u'list', throw_reasons=[u'invalidArgument', u'invalid'], userKey=user) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: codes = dict() codes[u'items'] = list() print u'Backup verification codes for %s' % user @@ -5262,7 +5262,7 @@ def doDelBackupCodes(users): for user in users: try: codes = callGAPI(service=cd.verificationCodes(), function=u'invalidate', soft_errors=True, throw_reasons=[u'invalid',], userKey=user) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: print u'No 2SV backup codes for %s' % user continue print u'2SV backup codes for %s invalidated' % user @@ -5288,7 +5288,7 @@ def doGetTokens(users): for user in users: try: token = callGAPI(service=cd.tokens(), function=u'get', throw_reasons=[u'notFound',], userKey=user, clientId=clientId, fields=u'clientId') - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: continue print u'%s has allowed this token' % user return @@ -5339,7 +5339,7 @@ def doDeprovUser(users): print u'Invalidating 2SV Backup Codes for %s' % user try: codes = callGAPI(service=cd.verificationCodes(), function=u'invalidate', soft_errors=True, throw_reasons=[u'invalid'], userKey=user) - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: print u'No 2SV Backup Codes' print u'Getting tokens for %s...' % user tokens = callGAPI(service=cd.tokens(), function=u'list', userKey=user, fields=u'items/clientId') @@ -5673,7 +5673,7 @@ def doDeleteAlias(alias_email=None): try: callGAPI(service=cd.users().aliases(), function=u'delete', throw_reasons=[u'invalid', u'badRequest', u'notFound'], userKey=alias_email, alias=alias_email) return - except apiclient.errors.HttpError, e: + except googleapiclient.errors.HttpError, e: error = json.loads(e.content) reason = error[u'error'][u'errors'][0][u'reason'] if reason == u'notFound': @@ -5715,7 +5715,7 @@ def output_csv(csv_list, titles, list_type, todrive): convert = False drive = buildGAPIObject(u'drive') string_data = string_file.getvalue() - media = apiclient.http.MediaInMemoryUpload(string_data, mimetype=u'text/csv') + media = googleapiclient.http.MediaInMemoryUpload(string_data, mimetype=u'text/csv') result = callGAPI(service=drive.files(), function=u'insert', convert=convert, body={u'description': u' '.join(sys.argv), u'title': u'%s - %s' % (domain, list_type), u'mimeType': u'text/csv'}, media_body=media) file_url = result[u'alternateLink'] if os.path.isfile(getGamPath()+u'nobrowser.txt'): @@ -6378,14 +6378,14 @@ def doPrintLicenses(return_list=False): page_message = u'Got %%%%total_items%%%% Licenses for %s...\n' % sku try: licenses += callGAPIpages(service=lic.licenseAssignments(), function=u'listForProductAndSku', throw_reasons=[u'invalid', u'forbidden'], page_message=page_message, customerId=domain, productId=product, skuId=sku, fields=u'items(productId,skuId,userId),nextPageToken') - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: licenses += [] else: for productId in products: page_message = u'Got %%%%total_items%%%% Licenses for %s...\n' % productId try: licenses += callGAPIpages(service=lic.licenseAssignments(), function=u'listForProduct', throw_reasons=[u'invalid', u'forbidden'], page_message=page_message, customerId=domain, productId=productId, fields=u'items(productId,skuId,userId),nextPageToken') - except apiclient.errors.HttpError: + except googleapiclient.errors.HttpError: licenses = +[] for license in licenses: a_license = dict() diff --git a/googleapiclient/__init__.py b/googleapiclient/__init__.py new file mode 100644 index 00000000..1e1a6cf6 --- /dev/null +++ b/googleapiclient/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = "1.3.1" diff --git a/googleapiclient/channel.py b/googleapiclient/channel.py new file mode 100644 index 00000000..265273ed --- /dev/null +++ b/googleapiclient/channel.py @@ -0,0 +1,285 @@ +"""Channel notifications support. + +Classes and functions to support channel subscriptions and notifications +on those channels. + +Notes: + - This code is based on experimental APIs and is subject to change. + - Notification does not do deduplication of notification ids, that's up to + the receiver. + - Storing the Channel between calls is up to the caller. + + +Example setting up a channel: + + # Create a new channel that gets notifications via webhook. + channel = new_webhook_channel("https://example.com/my_web_hook") + + # Store the channel, keyed by 'channel.id'. Store it before calling the + # watch method because notifications may start arriving before the watch + # method returns. + ... + + resp = service.objects().watchAll( + bucket="some_bucket_id", body=channel.body()).execute() + channel.update(resp) + + # Store the channel, keyed by 'channel.id'. Store it after being updated + # since the resource_id value will now be correct, and that's needed to + # stop a subscription. + ... + + +An example Webhook implementation using webapp2. Note that webapp2 puts +headers in a case insensitive dictionary, as headers aren't guaranteed to +always be upper case. + + id = self.request.headers[X_GOOG_CHANNEL_ID] + + # Retrieve the channel by id. + channel = ... + + # Parse notification from the headers, including validating the id. + n = notification_from_headers(channel, self.request.headers) + + # Do app specific stuff with the notification here. + if n.resource_state == 'sync': + # Code to handle sync state. + elif n.resource_state == 'exists': + # Code to handle the exists state. + elif n.resource_state == 'not_exists': + # Code to handle the not exists state. + + +Example of unsubscribing. + + service.channels().stop(channel.body()) +""" + +import datetime +import uuid + +from googleapiclient import errors +from oauth2client import util + + +# The unix time epoch starts at midnight 1970. +EPOCH = datetime.datetime.utcfromtimestamp(0) + +# Map the names of the parameters in the JSON channel description to +# the parameter names we use in the Channel class. +CHANNEL_PARAMS = { + 'address': 'address', + 'id': 'id', + 'expiration': 'expiration', + 'params': 'params', + 'resourceId': 'resource_id', + 'resourceUri': 'resource_uri', + 'type': 'type', + 'token': 'token', + } + +X_GOOG_CHANNEL_ID = 'X-GOOG-CHANNEL-ID' +X_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER' +X_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE' +X_GOOG_RESOURCE_URI = 'X-GOOG-RESOURCE-URI' +X_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID' + + +def _upper_header_keys(headers): + new_headers = {} + for k, v in headers.iteritems(): + new_headers[k.upper()] = v + return new_headers + + +class Notification(object): + """A Notification from a Channel. + + Notifications are not usually constructed directly, but are returned + from functions like notification_from_headers(). + + Attributes: + message_number: int, The unique id number of this notification. + state: str, The state of the resource being monitored. + uri: str, The address of the resource being monitored. + resource_id: str, The unique identifier of the version of the resource at + this event. + """ + @util.positional(5) + def __init__(self, message_number, state, resource_uri, resource_id): + """Notification constructor. + + Args: + message_number: int, The unique id number of this notification. + state: str, The state of the resource being monitored. Can be one + of "exists", "not_exists", or "sync". + resource_uri: str, The address of the resource being monitored. + resource_id: str, The identifier of the watched resource. + """ + self.message_number = message_number + self.state = state + self.resource_uri = resource_uri + self.resource_id = resource_id + + +class Channel(object): + """A Channel for notifications. + + Usually not constructed directly, instead it is returned from helper + functions like new_webhook_channel(). + + Attributes: + type: str, The type of delivery mechanism used by this channel. For + example, 'web_hook'. + id: str, A UUID for the channel. + token: str, An arbitrary string associated with the channel that + is delivered to the target address with each event delivered + over this channel. + address: str, The address of the receiving entity where events are + delivered. Specific to the channel type. + expiration: int, The time, in milliseconds from the epoch, when this + channel will expire. + params: dict, A dictionary of string to string, with additional parameters + controlling delivery channel behavior. + resource_id: str, An opaque id that identifies the resource that is + being watched. Stable across different API versions. + resource_uri: str, The canonicalized ID of the watched resource. + """ + + @util.positional(5) + def __init__(self, type, id, token, address, expiration=None, + params=None, resource_id="", resource_uri=""): + """Create a new Channel. + + In user code, this Channel constructor will not typically be called + manually since there are functions for creating channels for each specific + type with a more customized set of arguments to pass. + + Args: + type: str, The type of delivery mechanism used by this channel. For + example, 'web_hook'. + id: str, A UUID for the channel. + token: str, An arbitrary string associated with the channel that + is delivered to the target address with each event delivered + over this channel. + address: str, The address of the receiving entity where events are + delivered. Specific to the channel type. + expiration: int, The time, in milliseconds from the epoch, when this + channel will expire. + params: dict, A dictionary of string to string, with additional parameters + controlling delivery channel behavior. + resource_id: str, An opaque id that identifies the resource that is + being watched. Stable across different API versions. + resource_uri: str, The canonicalized ID of the watched resource. + """ + self.type = type + self.id = id + self.token = token + self.address = address + self.expiration = expiration + self.params = params + self.resource_id = resource_id + self.resource_uri = resource_uri + + def body(self): + """Build a body from the Channel. + + Constructs a dictionary that's appropriate for passing into watch() + methods as the value of body argument. + + Returns: + A dictionary representation of the channel. + """ + result = { + 'id': self.id, + 'token': self.token, + 'type': self.type, + 'address': self.address + } + if self.params: + result['params'] = self.params + if self.resource_id: + result['resourceId'] = self.resource_id + if self.resource_uri: + result['resourceUri'] = self.resource_uri + if self.expiration: + result['expiration'] = self.expiration + + return result + + def update(self, resp): + """Update a channel with information from the response of watch(). + + When a request is sent to watch() a resource, the response returned + from the watch() request is a dictionary with updated channel information, + such as the resource_id, which is needed when stopping a subscription. + + Args: + resp: dict, The response from a watch() method. + """ + for json_name, param_name in CHANNEL_PARAMS.iteritems(): + value = resp.get(json_name) + if value is not None: + setattr(self, param_name, value) + + +def notification_from_headers(channel, headers): + """Parse a notification from the webhook request headers, validate + the notification, and return a Notification object. + + Args: + channel: Channel, The channel that the notification is associated with. + headers: dict, A dictionary like object that contains the request headers + from the webhook HTTP request. + + Returns: + A Notification object. + + Raises: + errors.InvalidNotificationError if the notification is invalid. + ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int. + """ + headers = _upper_header_keys(headers) + channel_id = headers[X_GOOG_CHANNEL_ID] + if channel.id != channel_id: + raise errors.InvalidNotificationError( + 'Channel id mismatch: %s != %s' % (channel.id, channel_id)) + else: + message_number = int(headers[X_GOOG_MESSAGE_NUMBER]) + state = headers[X_GOOG_RESOURCE_STATE] + resource_uri = headers[X_GOOG_RESOURCE_URI] + resource_id = headers[X_GOOG_RESOURCE_ID] + return Notification(message_number, state, resource_uri, resource_id) + + +@util.positional(2) +def new_webhook_channel(url, token=None, expiration=None, params=None): + """Create a new webhook Channel. + + Args: + url: str, URL to post notifications to. + token: str, An arbitrary string associated with the channel that + is delivered to the target address with each notification delivered + over this channel. + expiration: datetime.datetime, A time in the future when the channel + should expire. Can also be None if the subscription should use the + default expiration. Note that different services may have different + limits on how long a subscription lasts. Check the response from the + watch() method to see the value the service has set for an expiration + time. + params: dict, Extra parameters to pass on channel creation. Currently + not used for webhook channels. + """ + expiration_ms = 0 + if expiration: + delta = expiration - EPOCH + expiration_ms = delta.microseconds/1000 + ( + delta.seconds + delta.days*24*3600)*1000 + if expiration_ms < 0: + expiration_ms = 0 + + return Channel('web_hook', str(uuid.uuid4()), + token, url, expiration=expiration_ms, + params=params) + diff --git a/googleapiclient/discovery.py b/googleapiclient/discovery.py new file mode 100644 index 00000000..45ae80ab --- /dev/null +++ b/googleapiclient/discovery.py @@ -0,0 +1,995 @@ +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for discovery based APIs. + +A client library for Google's discovery based APIs. +""" + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' +__all__ = [ + 'build', + 'build_from_document', + 'fix_method_name', + 'key2param', + ] + + +# Standard library imports +import StringIO +import copy +from email.generator import Generator +from email.mime.multipart import MIMEMultipart +from email.mime.nonmultipart import MIMENonMultipart +import json +import keyword +import logging +import mimetypes +import os +import re +import urllib +import urlparse + +try: + from urlparse import parse_qsl +except ImportError: + from cgi import parse_qsl + +# Third-party imports +import httplib2 +import mimeparse +import uritemplate + +# Local imports +from googleapiclient.errors import HttpError +from googleapiclient.errors import InvalidJsonError +from googleapiclient.errors import MediaUploadSizeError +from googleapiclient.errors import UnacceptableMimeTypeError +from googleapiclient.errors import UnknownApiNameOrVersion +from googleapiclient.errors import UnknownFileType +from googleapiclient.http import HttpRequest +from googleapiclient.http import MediaFileUpload +from googleapiclient.http import MediaUpload +from googleapiclient.model import JsonModel +from googleapiclient.model import MediaModel +from googleapiclient.model import RawModel +from googleapiclient.schema import Schemas +from oauth2client.client import GoogleCredentials +from oauth2client.util import _add_query_parameter +from oauth2client.util import positional + + +# The client library requires a version of httplib2 that supports RETRIES. +httplib2.RETRIES = 1 + +logger = logging.getLogger(__name__) + +URITEMPLATE = re.compile('{[^}]*}') +VARNAME = re.compile('[a-zA-Z0-9_-]+') +DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/' + '{api}/{apiVersion}/rest') +DEFAULT_METHOD_DOC = 'A description of how to use this function' +HTTP_PAYLOAD_METHODS = frozenset(['PUT', 'POST', 'PATCH']) +_MEDIA_SIZE_BIT_SHIFTS = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40} +BODY_PARAMETER_DEFAULT_VALUE = { + 'description': 'The request body.', + 'type': 'object', + 'required': True, +} +MEDIA_BODY_PARAMETER_DEFAULT_VALUE = { + 'description': ('The filename of the media request body, or an instance ' + 'of a MediaUpload object.'), + 'type': 'string', + 'required': False, +} + +# Parameters accepted by the stack, but not visible via discovery. +# TODO(dhermes): Remove 'userip' in 'v2'. +STACK_QUERY_PARAMETERS = frozenset(['trace', 'pp', 'userip', 'strict']) +STACK_QUERY_PARAMETER_DEFAULT_VALUE = {'type': 'string', 'location': 'query'} + +# Library-specific reserved words beyond Python keywords. +RESERVED_WORDS = frozenset(['body']) + + +def fix_method_name(name): + """Fix method names to avoid reserved word conflicts. + + Args: + name: string, method name. + + Returns: + The name with a '_' prefixed if the name is a reserved word. + """ + if keyword.iskeyword(name) or name in RESERVED_WORDS: + return name + '_' + else: + return name + + +def key2param(key): + """Converts key names into parameter names. + + For example, converting "max-results" -> "max_results" + + Args: + key: string, the method key name. + + Returns: + A safe method name based on the key name. + """ + result = [] + key = list(key) + if not key[0].isalpha(): + result.append('x') + for c in key: + if c.isalnum(): + result.append(c) + else: + result.append('_') + + return ''.join(result) + + +@positional(2) +def build(serviceName, + version, + http=None, + discoveryServiceUrl=DISCOVERY_URI, + developerKey=None, + model=None, + requestBuilder=HttpRequest, + credentials=None): + """Construct a Resource for interacting with an API. + + Construct a Resource object for interacting with an API. The serviceName and + version are the names from the Discovery service. + + Args: + serviceName: string, name of the service. + version: string, the version of the service. + http: httplib2.Http, An instance of httplib2.Http or something that acts + like it that HTTP requests will be made through. + discoveryServiceUrl: string, a URI Template that points to the location of + the discovery service. It should have two parameters {api} and + {apiVersion} that when filled in produce an absolute URI to the discovery + document for that service. + developerKey: string, key obtained from + https://code.google.com/apis/console. + model: googleapiclient.Model, converts to and from the wire format. + requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP + request. + credentials: oauth2client.Credentials, credentials to be used for + authentication. + + Returns: + A Resource object with methods for interacting with the service. + """ + params = { + 'api': serviceName, + 'apiVersion': version + } + + if http is None: + http = httplib2.Http() + + requested_url = uritemplate.expand(discoveryServiceUrl, params) + + # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment + # variable that contains the network address of the client sending the + # request. If it exists then add that to the request for the discovery + # document to avoid exceeding the quota on discovery requests. + if 'REMOTE_ADDR' in os.environ: + requested_url = _add_query_parameter(requested_url, 'userIp', + os.environ['REMOTE_ADDR']) + logger.info('URL being requested: GET %s' % requested_url) + + resp, content = http.request(requested_url) + + if resp.status == 404: + raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName, + version)) + if resp.status >= 400: + raise HttpError(resp, content, uri=requested_url) + + try: + service = json.loads(content) + except ValueError, e: + logger.error('Failed to parse as JSON: ' + content) + raise InvalidJsonError() + + return build_from_document(content, base=discoveryServiceUrl, http=http, + developerKey=developerKey, model=model, requestBuilder=requestBuilder, + credentials=credentials) + + +@positional(1) +def build_from_document( + service, + base=None, + future=None, + http=None, + developerKey=None, + model=None, + requestBuilder=HttpRequest, + credentials=None): + """Create a Resource for interacting with an API. + + Same as `build()`, but constructs the Resource object from a discovery + document that is it given, as opposed to retrieving one over HTTP. + + Args: + service: string or object, the JSON discovery document describing the API. + The value passed in may either be the JSON string or the deserialized + JSON. + base: string, base URI for all HTTP requests, usually the discovery URI. + This parameter is no longer used as rootUrl and servicePath are included + within the discovery document. (deprecated) + future: string, discovery document with future capabilities (deprecated). + http: httplib2.Http, An instance of httplib2.Http or something that acts + like it that HTTP requests will be made through. + developerKey: string, Key for controlling API usage, generated + from the API Console. + model: Model class instance that serializes and de-serializes requests and + responses. + requestBuilder: Takes an http request and packages it up to be executed. + credentials: object, credentials to be used for authentication. + + Returns: + A Resource object with methods for interacting with the service. + """ + + # future is no longer used. + future = {} + + if isinstance(service, basestring): + service = json.loads(service) + base = urlparse.urljoin(service['rootUrl'], service['servicePath']) + schema = Schemas(service) + + if credentials: + # If credentials were passed in, we could have two cases: + # 1. the scopes were specified, in which case the given credentials + # are used for authorizing the http; + # 2. the scopes were not provided (meaning the Application Default + # Credentials are to be used). In this case, the Application Default + # Credentials are built and used instead of the original credentials. + # If there are no scopes found (meaning the given service requires no + # authentication), there is no authorization of the http. + if (isinstance(credentials, GoogleCredentials) and + credentials.create_scoped_required()): + scopes = service.get('auth', {}).get('oauth2', {}).get('scopes', {}) + if scopes: + credentials = credentials.create_scoped(scopes.keys()) + else: + # No need to authorize the http object + # if the service does not require authentication. + credentials = None + + if credentials: + http = credentials.authorize(http) + + if model is None: + features = service.get('features', []) + model = JsonModel('dataWrapper' in features) + return Resource(http=http, baseUrl=base, model=model, + developerKey=developerKey, requestBuilder=requestBuilder, + resourceDesc=service, rootDesc=service, schema=schema) + + +def _cast(value, schema_type): + """Convert value to a string based on JSON Schema type. + + See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on + JSON Schema. + + Args: + value: any, the value to convert + schema_type: string, the type that value should be interpreted as + + Returns: + A string representation of 'value' based on the schema_type. + """ + if schema_type == 'string': + if type(value) == type('') or type(value) == type(u''): + return value + else: + return str(value) + elif schema_type == 'integer': + return str(int(value)) + elif schema_type == 'number': + return str(float(value)) + elif schema_type == 'boolean': + return str(bool(value)).lower() + else: + if type(value) == type('') or type(value) == type(u''): + return value + else: + return str(value) + + +def _media_size_to_long(maxSize): + """Convert a string media size, such as 10GB or 3TB into an integer. + + Args: + maxSize: string, size as a string, such as 2MB or 7GB. + + Returns: + The size as an integer value. + """ + if len(maxSize) < 2: + return 0L + units = maxSize[-2:].upper() + bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units) + if bit_shift is not None: + return long(maxSize[:-2]) << bit_shift + else: + return long(maxSize) + + +def _media_path_url_from_info(root_desc, path_url): + """Creates an absolute media path URL. + + Constructed using the API root URI and service path from the discovery + document and the relative path for the API method. + + Args: + root_desc: Dictionary; the entire original deserialized discovery document. + path_url: String; the relative URL for the API method. Relative to the API + root, which is specified in the discovery document. + + Returns: + String; the absolute URI for media upload for the API method. + """ + return '%(root)supload/%(service_path)s%(path)s' % { + 'root': root_desc['rootUrl'], + 'service_path': root_desc['servicePath'], + 'path': path_url, + } + + +def _fix_up_parameters(method_desc, root_desc, http_method): + """Updates parameters of an API method with values specific to this library. + + Specifically, adds whatever global parameters are specified by the API to the + parameters for the individual method. Also adds parameters which don't + appear in the discovery document, but are available to all discovery based + APIs (these are listed in STACK_QUERY_PARAMETERS). + + SIDE EFFECTS: This updates the parameters dictionary object in the method + description. + + Args: + method_desc: Dictionary with metadata describing an API method. Value comes + from the dictionary of methods stored in the 'methods' key in the + deserialized discovery document. + root_desc: Dictionary; the entire original deserialized discovery document. + http_method: String; the HTTP method used to call the API method described + in method_desc. + + Returns: + The updated Dictionary stored in the 'parameters' key of the method + description dictionary. + """ + parameters = method_desc.setdefault('parameters', {}) + + # Add in the parameters common to all methods. + for name, description in root_desc.get('parameters', {}).iteritems(): + parameters[name] = description + + # Add in undocumented query parameters. + for name in STACK_QUERY_PARAMETERS: + parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy() + + # Add 'body' (our own reserved word) to parameters if the method supports + # a request payload. + if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc: + body = BODY_PARAMETER_DEFAULT_VALUE.copy() + body.update(method_desc['request']) + parameters['body'] = body + + return parameters + + +def _fix_up_media_upload(method_desc, root_desc, path_url, parameters): + """Updates parameters of API by adding 'media_body' if supported by method. + + SIDE EFFECTS: If the method supports media upload and has a required body, + sets body to be optional (required=False) instead. Also, if there is a + 'mediaUpload' in the method description, adds 'media_upload' key to + parameters. + + Args: + method_desc: Dictionary with metadata describing an API method. Value comes + from the dictionary of methods stored in the 'methods' key in the + deserialized discovery document. + root_desc: Dictionary; the entire original deserialized discovery document. + path_url: String; the relative URL for the API method. Relative to the API + root, which is specified in the discovery document. + parameters: A dictionary describing method parameters for method described + in method_desc. + + Returns: + Triple (accept, max_size, media_path_url) where: + - accept is a list of strings representing what content types are + accepted for media upload. Defaults to empty list if not in the + discovery document. + - max_size is a long representing the max size in bytes allowed for a + media upload. Defaults to 0L if not in the discovery document. + - media_path_url is a String; the absolute URI for media upload for the + API method. Constructed using the API root URI and service path from + the discovery document and the relative path for the API method. If + media upload is not supported, this is None. + """ + media_upload = method_desc.get('mediaUpload', {}) + accept = media_upload.get('accept', []) + max_size = _media_size_to_long(media_upload.get('maxSize', '')) + media_path_url = None + + if media_upload: + media_path_url = _media_path_url_from_info(root_desc, path_url) + parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy() + if 'body' in parameters: + parameters['body']['required'] = False + + return accept, max_size, media_path_url + + +def _fix_up_method_description(method_desc, root_desc): + """Updates a method description in a discovery document. + + SIDE EFFECTS: Changes the parameters dictionary in the method description with + extra parameters which are used locally. + + Args: + method_desc: Dictionary with metadata describing an API method. Value comes + from the dictionary of methods stored in the 'methods' key in the + deserialized discovery document. + root_desc: Dictionary; the entire original deserialized discovery document. + + Returns: + Tuple (path_url, http_method, method_id, accept, max_size, media_path_url) + where: + - path_url is a String; the relative URL for the API method. Relative to + the API root, which is specified in the discovery document. + - http_method is a String; the HTTP method used to call the API method + described in the method description. + - method_id is a String; the name of the RPC method associated with the + API method, and is in the method description in the 'id' key. + - accept is a list of strings representing what content types are + accepted for media upload. Defaults to empty list if not in the + discovery document. + - max_size is a long representing the max size in bytes allowed for a + media upload. Defaults to 0L if not in the discovery document. + - media_path_url is a String; the absolute URI for media upload for the + API method. Constructed using the API root URI and service path from + the discovery document and the relative path for the API method. If + media upload is not supported, this is None. + """ + path_url = method_desc['path'] + http_method = method_desc['httpMethod'] + method_id = method_desc['id'] + + parameters = _fix_up_parameters(method_desc, root_desc, http_method) + # Order is important. `_fix_up_media_upload` needs `method_desc` to have a + # 'parameters' key and needs to know if there is a 'body' parameter because it + # also sets a 'media_body' parameter. + accept, max_size, media_path_url = _fix_up_media_upload( + method_desc, root_desc, path_url, parameters) + + return path_url, http_method, method_id, accept, max_size, media_path_url + + +# TODO(dhermes): Convert this class to ResourceMethod and make it callable +class ResourceMethodParameters(object): + """Represents the parameters associated with a method. + + Attributes: + argmap: Map from method parameter name (string) to query parameter name + (string). + required_params: List of required parameters (represented by parameter + name as string). + repeated_params: List of repeated parameters (represented by parameter + name as string). + pattern_params: Map from method parameter name (string) to regular + expression (as a string). If the pattern is set for a parameter, the + value for that parameter must match the regular expression. + query_params: List of parameters (represented by parameter name as string) + that will be used in the query string. + path_params: Set of parameters (represented by parameter name as string) + that will be used in the base URL path. + param_types: Map from method parameter name (string) to parameter type. Type + can be any valid JSON schema type; valid values are 'any', 'array', + 'boolean', 'integer', 'number', 'object', or 'string'. Reference: + http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1 + enum_params: Map from method parameter name (string) to list of strings, + where each list of strings is the list of acceptable enum values. + """ + + def __init__(self, method_desc): + """Constructor for ResourceMethodParameters. + + Sets default values and defers to set_parameters to populate. + + Args: + method_desc: Dictionary with metadata describing an API method. Value + comes from the dictionary of methods stored in the 'methods' key in + the deserialized discovery document. + """ + self.argmap = {} + self.required_params = [] + self.repeated_params = [] + self.pattern_params = {} + self.query_params = [] + # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE + # parsing is gotten rid of. + self.path_params = set() + self.param_types = {} + self.enum_params = {} + + self.set_parameters(method_desc) + + def set_parameters(self, method_desc): + """Populates maps and lists based on method description. + + Iterates through each parameter for the method and parses the values from + the parameter dictionary. + + Args: + method_desc: Dictionary with metadata describing an API method. Value + comes from the dictionary of methods stored in the 'methods' key in + the deserialized discovery document. + """ + for arg, desc in method_desc.get('parameters', {}).iteritems(): + param = key2param(arg) + self.argmap[param] = arg + + if desc.get('pattern'): + self.pattern_params[param] = desc['pattern'] + if desc.get('enum'): + self.enum_params[param] = desc['enum'] + if desc.get('required'): + self.required_params.append(param) + if desc.get('repeated'): + self.repeated_params.append(param) + if desc.get('location') == 'query': + self.query_params.append(param) + if desc.get('location') == 'path': + self.path_params.add(param) + self.param_types[param] = desc.get('type', 'string') + + # TODO(dhermes): Determine if this is still necessary. Discovery based APIs + # should have all path parameters already marked with + # 'location: path'. + for match in URITEMPLATE.finditer(method_desc['path']): + for namematch in VARNAME.finditer(match.group(0)): + name = key2param(namematch.group(0)) + self.path_params.add(name) + if name in self.query_params: + self.query_params.remove(name) + + +def createMethod(methodName, methodDesc, rootDesc, schema): + """Creates a method for attaching to a Resource. + + Args: + methodName: string, name of the method to use. + methodDesc: object, fragment of deserialized discovery document that + describes the method. + rootDesc: object, the entire deserialized discovery document. + schema: object, mapping of schema names to schema descriptions. + """ + methodName = fix_method_name(methodName) + (pathUrl, httpMethod, methodId, accept, + maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc) + + parameters = ResourceMethodParameters(methodDesc) + + def method(self, **kwargs): + # Don't bother with doc string, it will be over-written by createMethod. + + for name in kwargs.iterkeys(): + if name not in parameters.argmap: + raise TypeError('Got an unexpected keyword argument "%s"' % name) + + # Remove args that have a value of None. + keys = kwargs.keys() + for name in keys: + if kwargs[name] is None: + del kwargs[name] + + for name in parameters.required_params: + if name not in kwargs: + raise TypeError('Missing required parameter "%s"' % name) + + for name, regex in parameters.pattern_params.iteritems(): + if name in kwargs: + if isinstance(kwargs[name], basestring): + pvalues = [kwargs[name]] + else: + pvalues = kwargs[name] + for pvalue in pvalues: + if re.match(regex, pvalue) is None: + raise TypeError( + 'Parameter "%s" value "%s" does not match the pattern "%s"' % + (name, pvalue, regex)) + + for name, enums in parameters.enum_params.iteritems(): + if name in kwargs: + # We need to handle the case of a repeated enum + # name differently, since we want to handle both + # arg='value' and arg=['value1', 'value2'] + if (name in parameters.repeated_params and + not isinstance(kwargs[name], basestring)): + values = kwargs[name] + else: + values = [kwargs[name]] + for value in values: + if value not in enums: + raise TypeError( + 'Parameter "%s" value "%s" is not an allowed value in "%s"' % + (name, value, str(enums))) + + actual_query_params = {} + actual_path_params = {} + for key, value in kwargs.iteritems(): + to_type = parameters.param_types.get(key, 'string') + # For repeated parameters we cast each member of the list. + if key in parameters.repeated_params and type(value) == type([]): + cast_value = [_cast(x, to_type) for x in value] + else: + cast_value = _cast(value, to_type) + if key in parameters.query_params: + actual_query_params[parameters.argmap[key]] = cast_value + if key in parameters.path_params: + actual_path_params[parameters.argmap[key]] = cast_value + body_value = kwargs.get('body', None) + media_filename = kwargs.get('media_body', None) + + if self._developerKey: + actual_query_params['key'] = self._developerKey + + model = self._model + if methodName.endswith('_media'): + model = MediaModel() + elif 'response' not in methodDesc: + model = RawModel() + + headers = {} + headers, params, query, body = model.request(headers, + actual_path_params, actual_query_params, body_value) + + expanded_url = uritemplate.expand(pathUrl, params) + url = urlparse.urljoin(self._baseUrl, expanded_url + query) + + resumable = None + multipart_boundary = '' + + if media_filename: + # Ensure we end up with a valid MediaUpload object. + if isinstance(media_filename, basestring): + (media_mime_type, encoding) = mimetypes.guess_type(media_filename) + if media_mime_type is None: + raise UnknownFileType(media_filename) + if not mimeparse.best_match([media_mime_type], ','.join(accept)): + raise UnacceptableMimeTypeError(media_mime_type) + media_upload = MediaFileUpload(media_filename, + mimetype=media_mime_type) + elif isinstance(media_filename, MediaUpload): + media_upload = media_filename + else: + raise TypeError('media_filename must be str or MediaUpload.') + + # Check the maxSize + if maxSize > 0 and media_upload.size() > maxSize: + raise MediaUploadSizeError("Media larger than: %s" % maxSize) + + # Use the media path uri for media uploads + expanded_url = uritemplate.expand(mediaPathUrl, params) + url = urlparse.urljoin(self._baseUrl, expanded_url + query) + if media_upload.resumable(): + url = _add_query_parameter(url, 'uploadType', 'resumable') + + if media_upload.resumable(): + # This is all we need to do for resumable, if the body exists it gets + # sent in the first request, otherwise an empty body is sent. + resumable = media_upload + else: + # A non-resumable upload + if body is None: + # This is a simple media upload + headers['content-type'] = media_upload.mimetype() + body = media_upload.getbytes(0, media_upload.size()) + url = _add_query_parameter(url, 'uploadType', 'media') + else: + # This is a multipart/related upload. + msgRoot = MIMEMultipart('related') + # msgRoot should not write out it's own headers + setattr(msgRoot, '_write_headers', lambda self: None) + + # attach the body as one part + msg = MIMENonMultipart(*headers['content-type'].split('/')) + msg.set_payload(body) + msgRoot.attach(msg) + + # attach the media as the second part + msg = MIMENonMultipart(*media_upload.mimetype().split('/')) + msg['Content-Transfer-Encoding'] = 'binary' + + payload = media_upload.getbytes(0, media_upload.size()) + msg.set_payload(payload) + msgRoot.attach(msg) + # encode the body: note that we can't use `as_string`, because + # it plays games with `From ` lines. + fp = StringIO.StringIO() + g = Generator(fp, mangle_from_=False) + g.flatten(msgRoot, unixfrom=False) + body = fp.getvalue() + + multipart_boundary = msgRoot.get_boundary() + headers['content-type'] = ('multipart/related; ' + 'boundary="%s"') % multipart_boundary + url = _add_query_parameter(url, 'uploadType', 'multipart') + + logger.info('URL being requested: %s %s' % (httpMethod,url)) + return self._requestBuilder(self._http, + model.response, + url, + method=httpMethod, + body=body, + headers=headers, + methodId=methodId, + resumable=resumable) + + docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n'] + if len(parameters.argmap) > 0: + docs.append('Args:\n') + + # Skip undocumented params and params common to all methods. + skip_parameters = rootDesc.get('parameters', {}).keys() + skip_parameters.extend(STACK_QUERY_PARAMETERS) + + all_args = parameters.argmap.keys() + args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])] + + # Move body to the front of the line. + if 'body' in all_args: + args_ordered.append('body') + + for name in all_args: + if name not in args_ordered: + args_ordered.append(name) + + for arg in args_ordered: + if arg in skip_parameters: + continue + + repeated = '' + if arg in parameters.repeated_params: + repeated = ' (repeated)' + required = '' + if arg in parameters.required_params: + required = ' (required)' + paramdesc = methodDesc['parameters'][parameters.argmap[arg]] + paramdoc = paramdesc.get('description', 'A parameter') + if '$ref' in paramdesc: + docs.append( + (' %s: object, %s%s%s\n The object takes the' + ' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated, + schema.prettyPrintByName(paramdesc['$ref']))) + else: + paramtype = paramdesc.get('type', 'string') + docs.append(' %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required, + repeated)) + enum = paramdesc.get('enum', []) + enumDesc = paramdesc.get('enumDescriptions', []) + if enum and enumDesc: + docs.append(' Allowed values\n') + for (name, desc) in zip(enum, enumDesc): + docs.append(' %s - %s\n' % (name, desc)) + if 'response' in methodDesc: + if methodName.endswith('_media'): + docs.append('\nReturns:\n The media object as a string.\n\n ') + else: + docs.append('\nReturns:\n An object of the form:\n\n ') + docs.append(schema.prettyPrintSchema(methodDesc['response'])) + + setattr(method, '__doc__', ''.join(docs)) + return (methodName, method) + + +def createNextMethod(methodName): + """Creates any _next methods for attaching to a Resource. + + The _next methods allow for easy iteration through list() responses. + + Args: + methodName: string, name of the method to use. + """ + methodName = fix_method_name(methodName) + + def methodNext(self, previous_request, previous_response): + """Retrieves the next page of results. + +Args: + previous_request: The request for the previous page. (required) + previous_response: The response from the request for the previous page. (required) + +Returns: + A request object that you can call 'execute()' on to request the next + page. Returns None if there are no more items in the collection. + """ + # Retrieve nextPageToken from previous_response + # Use as pageToken in previous_request to create new request. + + if 'nextPageToken' not in previous_response: + return None + + request = copy.copy(previous_request) + + pageToken = previous_response['nextPageToken'] + parsed = list(urlparse.urlparse(request.uri)) + q = parse_qsl(parsed[4]) + + # Find and remove old 'pageToken' value from URI + newq = [(key, value) for (key, value) in q if key != 'pageToken'] + newq.append(('pageToken', pageToken)) + parsed[4] = urllib.urlencode(newq) + uri = urlparse.urlunparse(parsed) + + request.uri = uri + + logger.info('URL being requested: %s %s' % (methodName,uri)) + + return request + + return (methodName, methodNext) + + +class Resource(object): + """A class for interacting with a resource.""" + + def __init__(self, http, baseUrl, model, requestBuilder, developerKey, + resourceDesc, rootDesc, schema): + """Build a Resource from the API description. + + Args: + http: httplib2.Http, Object to make http requests with. + baseUrl: string, base URL for the API. All requests are relative to this + URI. + model: googleapiclient.Model, converts to and from the wire format. + requestBuilder: class or callable that instantiates an + googleapiclient.HttpRequest object. + developerKey: string, key obtained from + https://code.google.com/apis/console + resourceDesc: object, section of deserialized discovery document that + describes a resource. Note that the top level discovery document + is considered a resource. + rootDesc: object, the entire deserialized discovery document. + schema: object, mapping of schema names to schema descriptions. + """ + self._dynamic_attrs = [] + + self._http = http + self._baseUrl = baseUrl + self._model = model + self._developerKey = developerKey + self._requestBuilder = requestBuilder + self._resourceDesc = resourceDesc + self._rootDesc = rootDesc + self._schema = schema + + self._set_service_methods() + + def _set_dynamic_attr(self, attr_name, value): + """Sets an instance attribute and tracks it in a list of dynamic attributes. + + Args: + attr_name: string; The name of the attribute to be set + value: The value being set on the object and tracked in the dynamic cache. + """ + self._dynamic_attrs.append(attr_name) + self.__dict__[attr_name] = value + + def __getstate__(self): + """Trim the state down to something that can be pickled. + + Uses the fact that the instance variable _dynamic_attrs holds attrs that + will be wiped and restored on pickle serialization. + """ + state_dict = copy.copy(self.__dict__) + for dynamic_attr in self._dynamic_attrs: + del state_dict[dynamic_attr] + del state_dict['_dynamic_attrs'] + return state_dict + + def __setstate__(self, state): + """Reconstitute the state of the object from being pickled. + + Uses the fact that the instance variable _dynamic_attrs holds attrs that + will be wiped and restored on pickle serialization. + """ + self.__dict__.update(state) + self._dynamic_attrs = [] + self._set_service_methods() + + def _set_service_methods(self): + self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema) + self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema) + self._add_next_methods(self._resourceDesc, self._schema) + + def _add_basic_methods(self, resourceDesc, rootDesc, schema): + # Add basic methods to Resource + if 'methods' in resourceDesc: + for methodName, methodDesc in resourceDesc['methods'].iteritems(): + fixedMethodName, method = createMethod( + methodName, methodDesc, rootDesc, schema) + self._set_dynamic_attr(fixedMethodName, + method.__get__(self, self.__class__)) + # Add in _media methods. The functionality of the attached method will + # change when it sees that the method name ends in _media. + if methodDesc.get('supportsMediaDownload', False): + fixedMethodName, method = createMethod( + methodName + '_media', methodDesc, rootDesc, schema) + self._set_dynamic_attr(fixedMethodName, + method.__get__(self, self.__class__)) + + def _add_nested_resources(self, resourceDesc, rootDesc, schema): + # Add in nested resources + if 'resources' in resourceDesc: + + def createResourceMethod(methodName, methodDesc): + """Create a method on the Resource to access a nested Resource. + + Args: + methodName: string, name of the method to use. + methodDesc: object, fragment of deserialized discovery document that + describes the method. + """ + methodName = fix_method_name(methodName) + + def methodResource(self): + return Resource(http=self._http, baseUrl=self._baseUrl, + model=self._model, developerKey=self._developerKey, + requestBuilder=self._requestBuilder, + resourceDesc=methodDesc, rootDesc=rootDesc, + schema=schema) + + setattr(methodResource, '__doc__', 'A collection resource.') + setattr(methodResource, '__is_resource__', True) + + return (methodName, methodResource) + + for methodName, methodDesc in resourceDesc['resources'].iteritems(): + fixedMethodName, method = createResourceMethod(methodName, methodDesc) + self._set_dynamic_attr(fixedMethodName, + method.__get__(self, self.__class__)) + + def _add_next_methods(self, resourceDesc, schema): + # Add _next() methods + # Look for response bodies in schema that contain nextPageToken, and methods + # that take a pageToken parameter. + if 'methods' in resourceDesc: + for methodName, methodDesc in resourceDesc['methods'].iteritems(): + if 'response' in methodDesc: + responseSchema = methodDesc['response'] + if '$ref' in responseSchema: + responseSchema = schema.get(responseSchema['$ref']) + hasNextPageToken = 'nextPageToken' in responseSchema.get('properties', + {}) + hasPageToken = 'pageToken' in methodDesc.get('parameters', {}) + if hasNextPageToken and hasPageToken: + fixedMethodName, method = createNextMethod(methodName + '_next') + self._set_dynamic_attr(fixedMethodName, + method.__get__(self, self.__class__)) diff --git a/googleapiclient/errors.py b/googleapiclient/errors.py new file mode 100644 index 00000000..f832d627 --- /dev/null +++ b/googleapiclient/errors.py @@ -0,0 +1,140 @@ +#!/usr/bin/python2.4 +# +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Errors for the library. + +All exceptions defined by the library +should be defined in this file. +""" + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + +import json + +from oauth2client import util + + +class Error(Exception): + """Base error for this module.""" + pass + + +class HttpError(Error): + """HTTP data was invalid or unexpected.""" + + @util.positional(3) + def __init__(self, resp, content, uri=None): + self.resp = resp + self.content = content + self.uri = uri + + def _get_reason(self): + """Calculate the reason for the error from the response content.""" + reason = self.resp.reason + try: + data = json.loads(self.content) + reason = data['error']['message'] + except (ValueError, KeyError): + pass + if reason is None: + reason = '' + return reason + + def __repr__(self): + if self.uri: + return '' % ( + self.resp.status, self.uri, self._get_reason().strip()) + else: + return '' % (self.resp.status, self._get_reason()) + + __str__ = __repr__ + + +class InvalidJsonError(Error): + """The JSON returned could not be parsed.""" + pass + + +class UnknownFileType(Error): + """File type unknown or unexpected.""" + pass + + +class UnknownLinkType(Error): + """Link type unknown or unexpected.""" + pass + + +class UnknownApiNameOrVersion(Error): + """No API with that name and version exists.""" + pass + + +class UnacceptableMimeTypeError(Error): + """That is an unacceptable mimetype for this operation.""" + pass + + +class MediaUploadSizeError(Error): + """Media is larger than the method can accept.""" + pass + + +class ResumableUploadError(HttpError): + """Error occured during resumable upload.""" + pass + + +class InvalidChunkSizeError(Error): + """The given chunksize is not valid.""" + pass + +class InvalidNotificationError(Error): + """The channel Notification is invalid.""" + pass + +class BatchError(HttpError): + """Error occured during batch operations.""" + + @util.positional(2) + def __init__(self, reason, resp=None, content=None): + self.resp = resp + self.content = content + self.reason = reason + + def __repr__(self): + return '' % (self.resp.status, self.reason) + + __str__ = __repr__ + + +class UnexpectedMethodError(Error): + """Exception raised by RequestMockBuilder on unexpected calls.""" + + @util.positional(1) + def __init__(self, methodId=None): + """Constructor for an UnexpectedMethodError.""" + super(UnexpectedMethodError, self).__init__( + 'Received unexpected call %s' % methodId) + + +class UnexpectedBodyError(Error): + """Exception raised by RequestMockBuilder on unexpected bodies.""" + + def __init__(self, expected, provided): + """Constructor for an UnexpectedMethodError.""" + super(UnexpectedBodyError, self).__init__( + 'Expected: [%s] - Provided: [%s]' % (expected, provided)) diff --git a/googleapiclient/http.py b/googleapiclient/http.py new file mode 100644 index 00000000..3959d813 --- /dev/null +++ b/googleapiclient/http.py @@ -0,0 +1,1614 @@ +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Classes to encapsulate a single HTTP request. + +The classes implement a command pattern, with every +object supporting an execute() method that does the +actuall HTTP request. +""" + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + +import StringIO +import base64 +import copy +import gzip +import httplib2 +import json +import logging +import mimeparse +import mimetypes +import os +import random +import sys +import time +import urllib +import urlparse +import uuid + +from email.generator import Generator +from email.mime.multipart import MIMEMultipart +from email.mime.nonmultipart import MIMENonMultipart +from email.parser import FeedParser +from errors import BatchError +from errors import HttpError +from errors import InvalidChunkSizeError +from errors import ResumableUploadError +from errors import UnexpectedBodyError +from errors import UnexpectedMethodError +from model import JsonModel +from oauth2client import util + + +DEFAULT_CHUNK_SIZE = 512*1024 + +MAX_URI_LENGTH = 2048 + + +class MediaUploadProgress(object): + """Status of a resumable upload.""" + + def __init__(self, resumable_progress, total_size): + """Constructor. + + Args: + resumable_progress: int, bytes sent so far. + total_size: int, total bytes in complete upload, or None if the total + upload size isn't known ahead of time. + """ + self.resumable_progress = resumable_progress + self.total_size = total_size + + def progress(self): + """Percent of upload completed, as a float. + + Returns: + the percentage complete as a float, returning 0.0 if the total size of + the upload is unknown. + """ + if self.total_size is not None: + return float(self.resumable_progress) / float(self.total_size) + else: + return 0.0 + + +class MediaDownloadProgress(object): + """Status of a resumable download.""" + + def __init__(self, resumable_progress, total_size): + """Constructor. + + Args: + resumable_progress: int, bytes received so far. + total_size: int, total bytes in complete download. + """ + self.resumable_progress = resumable_progress + self.total_size = total_size + + def progress(self): + """Percent of download completed, as a float. + + Returns: + the percentage complete as a float, returning 0.0 if the total size of + the download is unknown. + """ + if self.total_size is not None: + return float(self.resumable_progress) / float(self.total_size) + else: + return 0.0 + + +class MediaUpload(object): + """Describes a media object to upload. + + Base class that defines the interface of MediaUpload subclasses. + + Note that subclasses of MediaUpload may allow you to control the chunksize + when uploading a media object. It is important to keep the size of the chunk + as large as possible to keep the upload efficient. Other factors may influence + the size of the chunk you use, particularly if you are working in an + environment where individual HTTP requests may have a hardcoded time limit, + such as under certain classes of requests under Google App Engine. + + Streams are io.Base compatible objects that support seek(). Some MediaUpload + subclasses support using streams directly to upload data. Support for + streaming may be indicated by a MediaUpload sub-class and if appropriate for a + platform that stream will be used for uploading the media object. The support + for streaming is indicated by has_stream() returning True. The stream() method + should return an io.Base object that supports seek(). On platforms where the + underlying httplib module supports streaming, for example Python 2.6 and + later, the stream will be passed into the http library which will result in + less memory being used and possibly faster uploads. + + If you need to upload media that can't be uploaded using any of the existing + MediaUpload sub-class then you can sub-class MediaUpload for your particular + needs. + """ + + def chunksize(self): + """Chunk size for resumable uploads. + + Returns: + Chunk size in bytes. + """ + raise NotImplementedError() + + def mimetype(self): + """Mime type of the body. + + Returns: + Mime type. + """ + return 'application/octet-stream' + + def size(self): + """Size of upload. + + Returns: + Size of the body, or None of the size is unknown. + """ + return None + + def resumable(self): + """Whether this upload is resumable. + + Returns: + True if resumable upload or False. + """ + return False + + def getbytes(self, begin, end): + """Get bytes from the media. + + Args: + begin: int, offset from beginning of file. + length: int, number of bytes to read, starting at begin. + + Returns: + A string of bytes read. May be shorter than length if EOF was reached + first. + """ + raise NotImplementedError() + + def has_stream(self): + """Does the underlying upload support a streaming interface. + + Streaming means it is an io.IOBase subclass that supports seek, i.e. + seekable() returns True. + + Returns: + True if the call to stream() will return an instance of a seekable io.Base + subclass. + """ + return False + + def stream(self): + """A stream interface to the data being uploaded. + + Returns: + The returned value is an io.IOBase subclass that supports seek, i.e. + seekable() returns True. + """ + raise NotImplementedError() + + @util.positional(1) + def _to_json(self, strip=None): + """Utility function for creating a JSON representation of a MediaUpload. + + Args: + strip: array, An array of names of members to not include in the JSON. + + Returns: + string, a JSON representation of this instance, suitable to pass to + from_json(). + """ + t = type(self) + d = copy.copy(self.__dict__) + if strip is not None: + for member in strip: + del d[member] + d['_class'] = t.__name__ + d['_module'] = t.__module__ + return json.dumps(d) + + def to_json(self): + """Create a JSON representation of an instance of MediaUpload. + + Returns: + string, a JSON representation of this instance, suitable to pass to + from_json(). + """ + return self._to_json() + + @classmethod + def new_from_json(cls, s): + """Utility class method to instantiate a MediaUpload subclass from a JSON + representation produced by to_json(). + + Args: + s: string, JSON from to_json(). + + Returns: + An instance of the subclass of MediaUpload that was serialized with + to_json(). + """ + data = json.loads(s) + # Find and call the right classmethod from_json() to restore the object. + module = data['_module'] + m = __import__(module, fromlist=module.split('.')[:-1]) + kls = getattr(m, data['_class']) + from_json = getattr(kls, 'from_json') + return from_json(s) + + +class MediaIoBaseUpload(MediaUpload): + """A MediaUpload for a io.Base objects. + + Note that the Python file object is compatible with io.Base and can be used + with this class also. + + fh = io.BytesIO('...Some data to upload...') + media = MediaIoBaseUpload(fh, mimetype='image/png', + chunksize=1024*1024, resumable=True) + farm.animals().insert( + id='cow', + name='cow.png', + media_body=media).execute() + + Depending on the platform you are working on, you may pass -1 as the + chunksize, which indicates that the entire file should be uploaded in a single + request. If the underlying platform supports streams, such as Python 2.6 or + later, then this can be very efficient as it avoids multiple connections, and + also avoids loading the entire file into memory before sending it. Note that + Google App Engine has a 5MB limit on request size, so you should never set + your chunksize larger than 5MB, or to -1. + """ + + @util.positional(3) + def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE, + resumable=False): + """Constructor. + + Args: + fd: io.Base or file object, The source of the bytes to upload. MUST be + opened in blocking mode, do not use streams opened in non-blocking mode. + The given stream must be seekable, that is, it must be able to call + seek() on fd. + mimetype: string, Mime-type of the file. + chunksize: int, File will be uploaded in chunks of this many bytes. Only + used if resumable=True. Pass in a value of -1 if the file is to be + uploaded as a single chunk. Note that Google App Engine has a 5MB limit + on request size, so you should never set your chunksize larger than 5MB, + or to -1. + resumable: bool, True if this is a resumable upload. False means upload + in a single request. + """ + super(MediaIoBaseUpload, self).__init__() + self._fd = fd + self._mimetype = mimetype + if not (chunksize == -1 or chunksize > 0): + raise InvalidChunkSizeError() + self._chunksize = chunksize + self._resumable = resumable + + self._fd.seek(0, os.SEEK_END) + self._size = self._fd.tell() + + def chunksize(self): + """Chunk size for resumable uploads. + + Returns: + Chunk size in bytes. + """ + return self._chunksize + + def mimetype(self): + """Mime type of the body. + + Returns: + Mime type. + """ + return self._mimetype + + def size(self): + """Size of upload. + + Returns: + Size of the body, or None of the size is unknown. + """ + return self._size + + def resumable(self): + """Whether this upload is resumable. + + Returns: + True if resumable upload or False. + """ + return self._resumable + + def getbytes(self, begin, length): + """Get bytes from the media. + + Args: + begin: int, offset from beginning of file. + length: int, number of bytes to read, starting at begin. + + Returns: + A string of bytes read. May be shorted than length if EOF was reached + first. + """ + self._fd.seek(begin) + return self._fd.read(length) + + def has_stream(self): + """Does the underlying upload support a streaming interface. + + Streaming means it is an io.IOBase subclass that supports seek, i.e. + seekable() returns True. + + Returns: + True if the call to stream() will return an instance of a seekable io.Base + subclass. + """ + return True + + def stream(self): + """A stream interface to the data being uploaded. + + Returns: + The returned value is an io.IOBase subclass that supports seek, i.e. + seekable() returns True. + """ + return self._fd + + def to_json(self): + """This upload type is not serializable.""" + raise NotImplementedError('MediaIoBaseUpload is not serializable.') + + +class MediaFileUpload(MediaIoBaseUpload): + """A MediaUpload for a file. + + Construct a MediaFileUpload and pass as the media_body parameter of the + method. For example, if we had a service that allowed uploading images: + + + media = MediaFileUpload('cow.png', mimetype='image/png', + chunksize=1024*1024, resumable=True) + farm.animals().insert( + id='cow', + name='cow.png', + media_body=media).execute() + + Depending on the platform you are working on, you may pass -1 as the + chunksize, which indicates that the entire file should be uploaded in a single + request. If the underlying platform supports streams, such as Python 2.6 or + later, then this can be very efficient as it avoids multiple connections, and + also avoids loading the entire file into memory before sending it. Note that + Google App Engine has a 5MB limit on request size, so you should never set + your chunksize larger than 5MB, or to -1. + """ + + @util.positional(2) + def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE, + resumable=False): + """Constructor. + + Args: + filename: string, Name of the file. + mimetype: string, Mime-type of the file. If None then a mime-type will be + guessed from the file extension. + chunksize: int, File will be uploaded in chunks of this many bytes. Only + used if resumable=True. Pass in a value of -1 if the file is to be + uploaded in a single chunk. Note that Google App Engine has a 5MB limit + on request size, so you should never set your chunksize larger than 5MB, + or to -1. + resumable: bool, True if this is a resumable upload. False means upload + in a single request. + """ + self._filename = filename + fd = open(self._filename, 'rb') + if mimetype is None: + (mimetype, encoding) = mimetypes.guess_type(filename) + super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize, + resumable=resumable) + + def to_json(self): + """Creating a JSON representation of an instance of MediaFileUpload. + + Returns: + string, a JSON representation of this instance, suitable to pass to + from_json(). + """ + return self._to_json(strip=['_fd']) + + @staticmethod + def from_json(s): + d = json.loads(s) + return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'], + chunksize=d['_chunksize'], resumable=d['_resumable']) + + +class MediaInMemoryUpload(MediaIoBaseUpload): + """MediaUpload for a chunk of bytes. + + DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for + the stream. + """ + + @util.positional(2) + def __init__(self, body, mimetype='application/octet-stream', + chunksize=DEFAULT_CHUNK_SIZE, resumable=False): + """Create a new MediaInMemoryUpload. + + DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for + the stream. + + Args: + body: string, Bytes of body content. + mimetype: string, Mime-type of the file or default of + 'application/octet-stream'. + chunksize: int, File will be uploaded in chunks of this many bytes. Only + used if resumable=True. + resumable: bool, True if this is a resumable upload. False means upload + in a single request. + """ + fd = StringIO.StringIO(body) + super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize, + resumable=resumable) + + +class MediaIoBaseDownload(object): + """"Download media resources. + + Note that the Python file object is compatible with io.Base and can be used + with this class also. + + + Example: + request = farms.animals().get_media(id='cow') + fh = io.FileIO('cow.png', mode='wb') + downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024) + + done = False + while done is False: + status, done = downloader.next_chunk() + if status: + print "Download %d%%." % int(status.progress() * 100) + print "Download Complete!" + """ + + @util.positional(3) + def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE): + """Constructor. + + Args: + fd: io.Base or file object, The stream in which to write the downloaded + bytes. + request: googleapiclient.http.HttpRequest, the media request to perform in + chunks. + chunksize: int, File will be downloaded in chunks of this many bytes. + """ + self._fd = fd + self._request = request + self._uri = request.uri + self._chunksize = chunksize + self._progress = 0 + self._total_size = None + self._done = False + + # Stubs for testing. + self._sleep = time.sleep + self._rand = random.random + + @util.positional(1) + def next_chunk(self, num_retries=0): + """Get the next chunk of the download. + + Args: + num_retries: Integer, number of times to retry 500's with randomized + exponential backoff. If all retries fail, the raised HttpError + represents the last request. If zero (default), we attempt the + request only once. + + Returns: + (status, done): (MediaDownloadStatus, boolean) + The value of 'done' will be True when the media has been fully + downloaded. + + Raises: + googleapiclient.errors.HttpError if the response was not a 2xx. + httplib2.HttpLib2Error if a transport error has occured. + """ + headers = { + 'range': 'bytes=%d-%d' % ( + self._progress, self._progress + self._chunksize) + } + http = self._request.http + + for retry_num in xrange(num_retries + 1): + if retry_num > 0: + self._sleep(self._rand() * 2**retry_num) + logging.warning( + 'Retry #%d for media download: GET %s, following status: %d' + % (retry_num, self._uri, resp.status)) + + resp, content = http.request(self._uri, headers=headers) + if resp.status < 500: + break + + if resp.status in [200, 206]: + if 'content-location' in resp and resp['content-location'] != self._uri: + self._uri = resp['content-location'] + self._progress += len(content) + self._fd.write(content) + + if 'content-range' in resp: + content_range = resp['content-range'] + length = content_range.rsplit('/', 1)[1] + self._total_size = int(length) + + if self._progress == self._total_size: + self._done = True + return MediaDownloadProgress(self._progress, self._total_size), self._done + else: + raise HttpError(resp, content, uri=self._uri) + + +class _StreamSlice(object): + """Truncated stream. + + Takes a stream and presents a stream that is a slice of the original stream. + This is used when uploading media in chunks. In later versions of Python a + stream can be passed to httplib in place of the string of data to send. The + problem is that httplib just blindly reads to the end of the stream. This + wrapper presents a virtual stream that only reads to the end of the chunk. + """ + + def __init__(self, stream, begin, chunksize): + """Constructor. + + Args: + stream: (io.Base, file object), the stream to wrap. + begin: int, the seek position the chunk begins at. + chunksize: int, the size of the chunk. + """ + self._stream = stream + self._begin = begin + self._chunksize = chunksize + self._stream.seek(begin) + + def read(self, n=-1): + """Read n bytes. + + Args: + n, int, the number of bytes to read. + + Returns: + A string of length 'n', or less if EOF is reached. + """ + # The data left available to read sits in [cur, end) + cur = self._stream.tell() + end = self._begin + self._chunksize + if n == -1 or cur + n > end: + n = end - cur + return self._stream.read(n) + + +class HttpRequest(object): + """Encapsulates a single HTTP request.""" + + @util.positional(4) + def __init__(self, http, postproc, uri, + method='GET', + body=None, + headers=None, + methodId=None, + resumable=None): + """Constructor for an HttpRequest. + + Args: + http: httplib2.Http, the transport object to use to make a request + postproc: callable, called on the HTTP response and content to transform + it into a data object before returning, or raising an exception + on an error. + uri: string, the absolute URI to send the request to + method: string, the HTTP method to use + body: string, the request body of the HTTP request, + headers: dict, the HTTP request headers + methodId: string, a unique identifier for the API method being called. + resumable: MediaUpload, None if this is not a resumbale request. + """ + self.uri = uri + self.method = method + self.body = body + self.headers = headers or {} + self.methodId = methodId + self.http = http + self.postproc = postproc + self.resumable = resumable + self.response_callbacks = [] + self._in_error_state = False + + # Pull the multipart boundary out of the content-type header. + major, minor, params = mimeparse.parse_mime_type( + headers.get('content-type', 'application/json')) + + # The size of the non-media part of the request. + self.body_size = len(self.body or '') + + # The resumable URI to send chunks to. + self.resumable_uri = None + + # The bytes that have been uploaded. + self.resumable_progress = 0 + + # Stubs for testing. + self._rand = random.random + self._sleep = time.sleep + + @util.positional(1) + def execute(self, http=None, num_retries=0): + """Execute the request. + + Args: + http: httplib2.Http, an http object to be used in place of the + one the HttpRequest request object was constructed with. + num_retries: Integer, number of times to retry 500's with randomized + exponential backoff. If all retries fail, the raised HttpError + represents the last request. If zero (default), we attempt the + request only once. + + Returns: + A deserialized object model of the response body as determined + by the postproc. + + Raises: + googleapiclient.errors.HttpError if the response was not a 2xx. + httplib2.HttpLib2Error if a transport error has occured. + """ + if http is None: + http = self.http + + if self.resumable: + body = None + while body is None: + _, body = self.next_chunk(http=http, num_retries=num_retries) + return body + + # Non-resumable case. + + if 'content-length' not in self.headers: + self.headers['content-length'] = str(self.body_size) + # If the request URI is too long then turn it into a POST request. + if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET': + self.method = 'POST' + self.headers['x-http-method-override'] = 'GET' + self.headers['content-type'] = 'application/x-www-form-urlencoded' + parsed = urlparse.urlparse(self.uri) + self.uri = urlparse.urlunparse( + (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, + None) + ) + self.body = parsed.query + self.headers['content-length'] = str(len(self.body)) + + # Handle retries for server-side errors. + for retry_num in xrange(num_retries + 1): + if retry_num > 0: + self._sleep(self._rand() * 2**retry_num) + logging.warning('Retry #%d for request: %s %s, following status: %d' + % (retry_num, self.method, self.uri, resp.status)) + + resp, content = http.request(str(self.uri), method=str(self.method), + body=self.body, headers=self.headers) + if resp.status < 500: + break + + for callback in self.response_callbacks: + callback(resp) + if resp.status >= 300: + raise HttpError(resp, content, uri=self.uri) + return self.postproc(resp, content) + + @util.positional(2) + def add_response_callback(self, cb): + """add_response_headers_callback + + Args: + cb: Callback to be called on receiving the response headers, of signature: + + def cb(resp): + # Where resp is an instance of httplib2.Response + """ + self.response_callbacks.append(cb) + + @util.positional(1) + def next_chunk(self, http=None, num_retries=0): + """Execute the next step of a resumable upload. + + Can only be used if the method being executed supports media uploads and + the MediaUpload object passed in was flagged as using resumable upload. + + Example: + + media = MediaFileUpload('cow.png', mimetype='image/png', + chunksize=1000, resumable=True) + request = farm.animals().insert( + id='cow', + name='cow.png', + media_body=media) + + response = None + while response is None: + status, response = request.next_chunk() + if status: + print "Upload %d%% complete." % int(status.progress() * 100) + + + Args: + http: httplib2.Http, an http object to be used in place of the + one the HttpRequest request object was constructed with. + num_retries: Integer, number of times to retry 500's with randomized + exponential backoff. If all retries fail, the raised HttpError + represents the last request. If zero (default), we attempt the + request only once. + + Returns: + (status, body): (ResumableMediaStatus, object) + The body will be None until the resumable media is fully uploaded. + + Raises: + googleapiclient.errors.HttpError if the response was not a 2xx. + httplib2.HttpLib2Error if a transport error has occured. + """ + if http is None: + http = self.http + + if self.resumable.size() is None: + size = '*' + else: + size = str(self.resumable.size()) + + if self.resumable_uri is None: + start_headers = copy.copy(self.headers) + start_headers['X-Upload-Content-Type'] = self.resumable.mimetype() + if size != '*': + start_headers['X-Upload-Content-Length'] = size + start_headers['content-length'] = str(self.body_size) + + for retry_num in xrange(num_retries + 1): + if retry_num > 0: + self._sleep(self._rand() * 2**retry_num) + logging.warning( + 'Retry #%d for resumable URI request: %s %s, following status: %d' + % (retry_num, self.method, self.uri, resp.status)) + + resp, content = http.request(self.uri, method=self.method, + body=self.body, + headers=start_headers) + if resp.status < 500: + break + + if resp.status == 200 and 'location' in resp: + self.resumable_uri = resp['location'] + else: + raise ResumableUploadError(resp, content) + elif self._in_error_state: + # If we are in an error state then query the server for current state of + # the upload by sending an empty PUT and reading the 'range' header in + # the response. + headers = { + 'Content-Range': 'bytes */%s' % size, + 'content-length': '0' + } + resp, content = http.request(self.resumable_uri, 'PUT', + headers=headers) + status, body = self._process_response(resp, content) + if body: + # The upload was complete. + return (status, body) + + # The httplib.request method can take streams for the body parameter, but + # only in Python 2.6 or later. If a stream is available under those + # conditions then use it as the body argument. + if self.resumable.has_stream() and sys.version_info[1] >= 6: + data = self.resumable.stream() + if self.resumable.chunksize() == -1: + data.seek(self.resumable_progress) + chunk_end = self.resumable.size() - self.resumable_progress - 1 + else: + # Doing chunking with a stream, so wrap a slice of the stream. + data = _StreamSlice(data, self.resumable_progress, + self.resumable.chunksize()) + chunk_end = min( + self.resumable_progress + self.resumable.chunksize() - 1, + self.resumable.size() - 1) + else: + data = self.resumable.getbytes( + self.resumable_progress, self.resumable.chunksize()) + + # A short read implies that we are at EOF, so finish the upload. + if len(data) < self.resumable.chunksize(): + size = str(self.resumable_progress + len(data)) + + chunk_end = self.resumable_progress + len(data) - 1 + + headers = { + 'Content-Range': 'bytes %d-%d/%s' % ( + self.resumable_progress, chunk_end, size), + # Must set the content-length header here because httplib can't + # calculate the size when working with _StreamSlice. + 'Content-Length': str(chunk_end - self.resumable_progress + 1) + } + + for retry_num in xrange(num_retries + 1): + if retry_num > 0: + self._sleep(self._rand() * 2**retry_num) + logging.warning( + 'Retry #%d for media upload: %s %s, following status: %d' + % (retry_num, self.method, self.uri, resp.status)) + + try: + resp, content = http.request(self.resumable_uri, method='PUT', + body=data, + headers=headers) + except: + self._in_error_state = True + raise + if resp.status < 500: + break + + return self._process_response(resp, content) + + def _process_response(self, resp, content): + """Process the response from a single chunk upload. + + Args: + resp: httplib2.Response, the response object. + content: string, the content of the response. + + Returns: + (status, body): (ResumableMediaStatus, object) + The body will be None until the resumable media is fully uploaded. + + Raises: + googleapiclient.errors.HttpError if the response was not a 2xx or a 308. + """ + if resp.status in [200, 201]: + self._in_error_state = False + return None, self.postproc(resp, content) + elif resp.status == 308: + self._in_error_state = False + # A "308 Resume Incomplete" indicates we are not done. + self.resumable_progress = int(resp['range'].split('-')[1]) + 1 + if 'location' in resp: + self.resumable_uri = resp['location'] + else: + self._in_error_state = True + raise HttpError(resp, content, uri=self.uri) + + return (MediaUploadProgress(self.resumable_progress, self.resumable.size()), + None) + + def to_json(self): + """Returns a JSON representation of the HttpRequest.""" + d = copy.copy(self.__dict__) + if d['resumable'] is not None: + d['resumable'] = self.resumable.to_json() + del d['http'] + del d['postproc'] + del d['_sleep'] + del d['_rand'] + + return json.dumps(d) + + @staticmethod + def from_json(s, http, postproc): + """Returns an HttpRequest populated with info from a JSON object.""" + d = json.loads(s) + if d['resumable'] is not None: + d['resumable'] = MediaUpload.new_from_json(d['resumable']) + return HttpRequest( + http, + postproc, + uri=d['uri'], + method=d['method'], + body=d['body'], + headers=d['headers'], + methodId=d['methodId'], + resumable=d['resumable']) + + +class BatchHttpRequest(object): + """Batches multiple HttpRequest objects into a single HTTP request. + + Example: + from googleapiclient.http import BatchHttpRequest + + def list_animals(request_id, response, exception): + \"\"\"Do something with the animals list response.\"\"\" + if exception is not None: + # Do something with the exception. + pass + else: + # Do something with the response. + pass + + def list_farmers(request_id, response, exception): + \"\"\"Do something with the farmers list response.\"\"\" + if exception is not None: + # Do something with the exception. + pass + else: + # Do something with the response. + pass + + service = build('farm', 'v2') + + batch = BatchHttpRequest() + + batch.add(service.animals().list(), list_animals) + batch.add(service.farmers().list(), list_farmers) + batch.execute(http=http) + """ + + @util.positional(1) + def __init__(self, callback=None, batch_uri=None): + """Constructor for a BatchHttpRequest. + + Args: + callback: callable, A callback to be called for each response, of the + form callback(id, response, exception). The first parameter is the + request id, and the second is the deserialized response object. The + third is an googleapiclient.errors.HttpError exception object if an HTTP error + occurred while processing the request, or None if no error occurred. + batch_uri: string, URI to send batch requests to. + """ + if batch_uri is None: + batch_uri = 'https://www.googleapis.com/batch' + self._batch_uri = batch_uri + + # Global callback to be called for each individual response in the batch. + self._callback = callback + + # A map from id to request. + self._requests = {} + + # A map from id to callback. + self._callbacks = {} + + # List of request ids, in the order in which they were added. + self._order = [] + + # The last auto generated id. + self._last_auto_id = 0 + + # Unique ID on which to base the Content-ID headers. + self._base_id = None + + # A map from request id to (httplib2.Response, content) response pairs + self._responses = {} + + # A map of id(Credentials) that have been refreshed. + self._refreshed_credentials = {} + + def _refresh_and_apply_credentials(self, request, http): + """Refresh the credentials and apply to the request. + + Args: + request: HttpRequest, the request. + http: httplib2.Http, the global http object for the batch. + """ + # For the credentials to refresh, but only once per refresh_token + # If there is no http per the request then refresh the http passed in + # via execute() + creds = None + if request.http is not None and hasattr(request.http.request, + 'credentials'): + creds = request.http.request.credentials + elif http is not None and hasattr(http.request, 'credentials'): + creds = http.request.credentials + if creds is not None: + if id(creds) not in self._refreshed_credentials: + creds.refresh(http) + self._refreshed_credentials[id(creds)] = 1 + + # Only apply the credentials if we are using the http object passed in, + # otherwise apply() will get called during _serialize_request(). + if request.http is None or not hasattr(request.http.request, + 'credentials'): + creds.apply(request.headers) + + def _id_to_header(self, id_): + """Convert an id to a Content-ID header value. + + Args: + id_: string, identifier of individual request. + + Returns: + A Content-ID header with the id_ encoded into it. A UUID is prepended to + the value because Content-ID headers are supposed to be universally + unique. + """ + if self._base_id is None: + self._base_id = uuid.uuid4() + + return '<%s+%s>' % (self._base_id, urllib.quote(id_)) + + def _header_to_id(self, header): + """Convert a Content-ID header value to an id. + + Presumes the Content-ID header conforms to the format that _id_to_header() + returns. + + Args: + header: string, Content-ID header value. + + Returns: + The extracted id value. + + Raises: + BatchError if the header is not in the expected format. + """ + if header[0] != '<' or header[-1] != '>': + raise BatchError("Invalid value for Content-ID: %s" % header) + if '+' not in header: + raise BatchError("Invalid value for Content-ID: %s" % header) + base, id_ = header[1:-1].rsplit('+', 1) + + return urllib.unquote(id_) + + def _serialize_request(self, request): + """Convert an HttpRequest object into a string. + + Args: + request: HttpRequest, the request to serialize. + + Returns: + The request as a string in application/http format. + """ + # Construct status line + parsed = urlparse.urlparse(request.uri) + request_line = urlparse.urlunparse( + (None, None, parsed.path, parsed.params, parsed.query, None) + ) + status_line = request.method + ' ' + request_line + ' HTTP/1.1\n' + major, minor = request.headers.get('content-type', 'application/json').split('/') + msg = MIMENonMultipart(major, minor) + headers = request.headers.copy() + + if request.http is not None and hasattr(request.http.request, + 'credentials'): + request.http.request.credentials.apply(headers) + + # MIMENonMultipart adds its own Content-Type header. + if 'content-type' in headers: + del headers['content-type'] + + for key, value in headers.iteritems(): + msg[key] = value + msg['Host'] = parsed.netloc + msg.set_unixfrom(None) + + if request.body is not None: + msg.set_payload(request.body) + msg['content-length'] = str(len(request.body)) + + # Serialize the mime message. + fp = StringIO.StringIO() + # maxheaderlen=0 means don't line wrap headers. + g = Generator(fp, maxheaderlen=0) + g.flatten(msg, unixfrom=False) + body = fp.getvalue() + + # Strip off the \n\n that the MIME lib tacks onto the end of the payload. + if request.body is None: + body = body[:-2] + + return status_line.encode('utf-8') + body + + def _deserialize_response(self, payload): + """Convert string into httplib2 response and content. + + Args: + payload: string, headers and body as a string. + + Returns: + A pair (resp, content), such as would be returned from httplib2.request. + """ + # Strip off the status line + status_line, payload = payload.split('\n', 1) + protocol, status, reason = status_line.split(' ', 2) + + # Parse the rest of the response + parser = FeedParser() + parser.feed(payload) + msg = parser.close() + msg['status'] = status + + # Create httplib2.Response from the parsed headers. + resp = httplib2.Response(msg) + resp.reason = reason + resp.version = int(protocol.split('/', 1)[1].replace('.', '')) + + content = payload.split('\r\n\r\n', 1)[1] + + return resp, content + + def _new_id(self): + """Create a new id. + + Auto incrementing number that avoids conflicts with ids already used. + + Returns: + string, a new unique id. + """ + self._last_auto_id += 1 + while str(self._last_auto_id) in self._requests: + self._last_auto_id += 1 + return str(self._last_auto_id) + + @util.positional(2) + def add(self, request, callback=None, request_id=None): + """Add a new request. + + Every callback added will be paired with a unique id, the request_id. That + unique id will be passed back to the callback when the response comes back + from the server. The default behavior is to have the library generate it's + own unique id. If the caller passes in a request_id then they must ensure + uniqueness for each request_id, and if they are not an exception is + raised. Callers should either supply all request_ids or nevery supply a + request id, to avoid such an error. + + Args: + request: HttpRequest, Request to add to the batch. + callback: callable, A callback to be called for this response, of the + form callback(id, response, exception). The first parameter is the + request id, and the second is the deserialized response object. The + third is an googleapiclient.errors.HttpError exception object if an HTTP error + occurred while processing the request, or None if no errors occurred. + request_id: string, A unique id for the request. The id will be passed to + the callback with the response. + + Returns: + None + + Raises: + BatchError if a media request is added to a batch. + KeyError is the request_id is not unique. + """ + if request_id is None: + request_id = self._new_id() + if request.resumable is not None: + raise BatchError("Media requests cannot be used in a batch request.") + if request_id in self._requests: + raise KeyError("A request with this ID already exists: %s" % request_id) + self._requests[request_id] = request + self._callbacks[request_id] = callback + self._order.append(request_id) + + def _execute(self, http, order, requests): + """Serialize batch request, send to server, process response. + + Args: + http: httplib2.Http, an http object to be used to make the request with. + order: list, list of request ids in the order they were added to the + batch. + request: list, list of request objects to send. + + Raises: + httplib2.HttpLib2Error if a transport error has occured. + googleapiclient.errors.BatchError if the response is the wrong format. + """ + message = MIMEMultipart('mixed') + # Message should not write out it's own headers. + setattr(message, '_write_headers', lambda self: None) + + # Add all the individual requests. + for request_id in order: + request = requests[request_id] + + msg = MIMENonMultipart('application', 'http') + msg['Content-Transfer-Encoding'] = 'binary' + msg['Content-ID'] = self._id_to_header(request_id) + + body = self._serialize_request(request) + msg.set_payload(body) + message.attach(msg) + + # encode the body: note that we can't use `as_string`, because + # it plays games with `From ` lines. + fp = StringIO.StringIO() + g = Generator(fp, mangle_from_=False) + g.flatten(message, unixfrom=False) + body = fp.getvalue() + + headers = {} + headers['content-type'] = ('multipart/mixed; ' + 'boundary="%s"') % message.get_boundary() + + resp, content = http.request(self._batch_uri, method='POST', body=body, + headers=headers) + + if resp.status >= 300: + raise HttpError(resp, content, uri=self._batch_uri) + + # Now break out the individual responses and store each one. + boundary, _ = content.split(None, 1) + + # Prepend with a content-type header so FeedParser can handle it. + header = 'content-type: %s\r\n\r\n' % resp['content-type'] + for_parser = header + content + + parser = FeedParser() + parser.feed(for_parser) + mime_response = parser.close() + + if not mime_response.is_multipart(): + raise BatchError("Response not in multipart/mixed format.", resp=resp, + content=content) + + for part in mime_response.get_payload(): + request_id = self._header_to_id(part['Content-ID']) + response, content = self._deserialize_response(part.get_payload()) + self._responses[request_id] = (response, content) + + @util.positional(1) + def execute(self, http=None): + """Execute all the requests as a single batched HTTP request. + + Args: + http: httplib2.Http, an http object to be used in place of the one the + HttpRequest request object was constructed with. If one isn't supplied + then use a http object from the requests in this batch. + + Returns: + None + + Raises: + httplib2.HttpLib2Error if a transport error has occured. + googleapiclient.errors.BatchError if the response is the wrong format. + """ + + # If http is not supplied use the first valid one given in the requests. + if http is None: + for request_id in self._order: + request = self._requests[request_id] + if request is not None: + http = request.http + break + + if http is None: + raise ValueError("Missing a valid http object.") + + self._execute(http, self._order, self._requests) + + # Loop over all the requests and check for 401s. For each 401 request the + # credentials should be refreshed and then sent again in a separate batch. + redo_requests = {} + redo_order = [] + + for request_id in self._order: + resp, content = self._responses[request_id] + if resp['status'] == '401': + redo_order.append(request_id) + request = self._requests[request_id] + self._refresh_and_apply_credentials(request, http) + redo_requests[request_id] = request + + if redo_requests: + self._execute(http, redo_order, redo_requests) + + # Now process all callbacks that are erroring, and raise an exception for + # ones that return a non-2xx response? Or add extra parameter to callback + # that contains an HttpError? + + for request_id in self._order: + resp, content = self._responses[request_id] + + request = self._requests[request_id] + callback = self._callbacks[request_id] + + response = None + exception = None + try: + if resp.status >= 300: + raise HttpError(resp, content, uri=request.uri) + response = request.postproc(resp, content) + except HttpError, e: + exception = e + + if callback is not None: + callback(request_id, response, exception) + if self._callback is not None: + self._callback(request_id, response, exception) + + +class HttpRequestMock(object): + """Mock of HttpRequest. + + Do not construct directly, instead use RequestMockBuilder. + """ + + def __init__(self, resp, content, postproc): + """Constructor for HttpRequestMock + + Args: + resp: httplib2.Response, the response to emulate coming from the request + content: string, the response body + postproc: callable, the post processing function usually supplied by + the model class. See model.JsonModel.response() as an example. + """ + self.resp = resp + self.content = content + self.postproc = postproc + if resp is None: + self.resp = httplib2.Response({'status': 200, 'reason': 'OK'}) + if 'reason' in self.resp: + self.resp.reason = self.resp['reason'] + + def execute(self, http=None): + """Execute the request. + + Same behavior as HttpRequest.execute(), but the response is + mocked and not really from an HTTP request/response. + """ + return self.postproc(self.resp, self.content) + + +class RequestMockBuilder(object): + """A simple mock of HttpRequest + + Pass in a dictionary to the constructor that maps request methodIds to + tuples of (httplib2.Response, content, opt_expected_body) that should be + returned when that method is called. None may also be passed in for the + httplib2.Response, in which case a 200 OK response will be generated. + If an opt_expected_body (str or dict) is provided, it will be compared to + the body and UnexpectedBodyError will be raised on inequality. + + Example: + response = '{"data": {"id": "tag:google.c...' + requestBuilder = RequestMockBuilder( + { + 'plus.activities.get': (None, response), + } + ) + googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder) + + Methods that you do not supply a response for will return a + 200 OK with an empty string as the response content or raise an excpetion + if check_unexpected is set to True. The methodId is taken from the rpcName + in the discovery document. + + For more details see the project wiki. + """ + + def __init__(self, responses, check_unexpected=False): + """Constructor for RequestMockBuilder + + The constructed object should be a callable object + that can replace the class HttpResponse. + + responses - A dictionary that maps methodIds into tuples + of (httplib2.Response, content). The methodId + comes from the 'rpcName' field in the discovery + document. + check_unexpected - A boolean setting whether or not UnexpectedMethodError + should be raised on unsupplied method. + """ + self.responses = responses + self.check_unexpected = check_unexpected + + def __call__(self, http, postproc, uri, method='GET', body=None, + headers=None, methodId=None, resumable=None): + """Implements the callable interface that discovery.build() expects + of requestBuilder, which is to build an object compatible with + HttpRequest.execute(). See that method for the description of the + parameters and the expected response. + """ + if methodId in self.responses: + response = self.responses[methodId] + resp, content = response[:2] + if len(response) > 2: + # Test the body against the supplied expected_body. + expected_body = response[2] + if bool(expected_body) != bool(body): + # Not expecting a body and provided one + # or expecting a body and not provided one. + raise UnexpectedBodyError(expected_body, body) + if isinstance(expected_body, str): + expected_body = json.loads(expected_body) + body = json.loads(body) + if body != expected_body: + raise UnexpectedBodyError(expected_body, body) + return HttpRequestMock(resp, content, postproc) + elif self.check_unexpected: + raise UnexpectedMethodError(methodId=methodId) + else: + model = JsonModel(False) + return HttpRequestMock(None, '{}', model.response) + + +class HttpMock(object): + """Mock of httplib2.Http""" + + def __init__(self, filename=None, headers=None): + """ + Args: + filename: string, absolute filename to read response from + headers: dict, header to return with response + """ + if headers is None: + headers = {'status': '200 OK'} + if filename: + f = file(filename, 'r') + self.data = f.read() + f.close() + else: + self.data = None + self.response_headers = headers + self.headers = None + self.uri = None + self.method = None + self.body = None + self.headers = None + + + def request(self, uri, + method='GET', + body=None, + headers=None, + redirections=1, + connection_type=None): + self.uri = uri + self.method = method + self.body = body + self.headers = headers + return httplib2.Response(self.response_headers), self.data + + +class HttpMockSequence(object): + """Mock of httplib2.Http + + Mocks a sequence of calls to request returning different responses for each + call. Create an instance initialized with the desired response headers + and content and then use as if an httplib2.Http instance. + + http = HttpMockSequence([ + ({'status': '401'}, ''), + ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'), + ({'status': '200'}, 'echo_request_headers'), + ]) + resp, content = http.request("http://examples.com") + + There are special values you can pass in for content to trigger + behavours that are helpful in testing. + + 'echo_request_headers' means return the request headers in the response body + 'echo_request_headers_as_json' means return the request headers in + the response body + 'echo_request_body' means return the request body in the response body + 'echo_request_uri' means return the request uri in the response body + """ + + def __init__(self, iterable): + """ + Args: + iterable: iterable, a sequence of pairs of (headers, body) + """ + self._iterable = iterable + self.follow_redirects = True + + def request(self, uri, + method='GET', + body=None, + headers=None, + redirections=1, + connection_type=None): + resp, content = self._iterable.pop(0) + if content == 'echo_request_headers': + content = headers + elif content == 'echo_request_headers_as_json': + content = json.dumps(headers) + elif content == 'echo_request_body': + if hasattr(body, 'read'): + content = body.read() + else: + content = body + elif content == 'echo_request_uri': + content = uri + return httplib2.Response(resp), content + + +def set_user_agent(http, user_agent): + """Set the user-agent on every request. + + Args: + http - An instance of httplib2.Http + or something that acts like it. + user_agent: string, the value for the user-agent header. + + Returns: + A modified instance of http that was passed in. + + Example: + + h = httplib2.Http() + h = set_user_agent(h, "my-app-name/6.0") + + Most of the time the user-agent will be set doing auth, this is for the rare + cases where you are accessing an unauthenticated endpoint. + """ + request_orig = http.request + + # The closure that will replace 'httplib2.Http.request'. + def new_request(uri, method='GET', body=None, headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, + connection_type=None): + """Modify the request headers to add the user-agent.""" + if headers is None: + headers = {} + if 'user-agent' in headers: + headers['user-agent'] = user_agent + ' ' + headers['user-agent'] + else: + headers['user-agent'] = user_agent + resp, content = request_orig(uri, method, body, headers, + redirections, connection_type) + return resp, content + + http.request = new_request + return http + + +def tunnel_patch(http): + """Tunnel PATCH requests over POST. + Args: + http - An instance of httplib2.Http + or something that acts like it. + + Returns: + A modified instance of http that was passed in. + + Example: + + h = httplib2.Http() + h = tunnel_patch(h, "my-app-name/6.0") + + Useful if you are running on a platform that doesn't support PATCH. + Apply this last if you are using OAuth 1.0, as changing the method + will result in a different signature. + """ + request_orig = http.request + + # The closure that will replace 'httplib2.Http.request'. + def new_request(uri, method='GET', body=None, headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, + connection_type=None): + """Modify the request headers to add the user-agent.""" + if headers is None: + headers = {} + if method == 'PATCH': + if 'oauth_token' in headers.get('authorization', ''): + logging.warning( + 'OAuth 1.0 request made with Credentials after tunnel_patch.') + headers['x-http-method-override'] = "PATCH" + method = 'POST' + resp, content = request_orig(uri, method, body, headers, + redirections, connection_type) + return resp, content + + http.request = new_request + return http diff --git a/googleapiclient/mimeparse.py b/googleapiclient/mimeparse.py new file mode 100644 index 00000000..8038af18 --- /dev/null +++ b/googleapiclient/mimeparse.py @@ -0,0 +1,172 @@ +# Copyright 2014 Joe Gregorio +# +# Licensed under the MIT License + +"""MIME-Type Parser + +This module provides basic functions for handling mime-types. It can handle +matching mime-types against a list of media-ranges. See section 14.1 of the +HTTP specification [RFC 2616] for a complete explanation. + + http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 + +Contents: + - parse_mime_type(): Parses a mime-type into its component parts. + - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q' + quality parameter. + - quality(): Determines the quality ('q') of a mime-type when + compared against a list of media-ranges. + - quality_parsed(): Just like quality() except the second parameter must be + pre-parsed. + - best_match(): Choose the mime-type with the highest quality ('q') + from a list of candidates. +""" + +__version__ = '0.1.3' +__author__ = 'Joe Gregorio' +__email__ = 'joe@bitworking.org' +__license__ = 'MIT License' +__credits__ = '' + + +def parse_mime_type(mime_type): + """Parses a mime-type into its component parts. + + Carves up a mime-type and returns a tuple of the (type, subtype, params) + where 'params' is a dictionary of all the parameters for the media range. + For example, the media range 'application/xhtml;q=0.5' would get parsed + into: + + ('application', 'xhtml', {'q', '0.5'}) + """ + parts = mime_type.split(';') + params = dict([tuple([s.strip() for s in param.split('=', 1)])\ + for param in parts[1:] + ]) + full_type = parts[0].strip() + # Java URLConnection class sends an Accept header that includes a + # single '*'. Turn it into a legal wildcard. + if full_type == '*': + full_type = '*/*' + (type, subtype) = full_type.split('/') + + return (type.strip(), subtype.strip(), params) + + +def parse_media_range(range): + """Parse a media-range into its component parts. + + Carves up a media range and returns a tuple of the (type, subtype, + params) where 'params' is a dictionary of all the parameters for the media + range. For example, the media range 'application/*;q=0.5' would get parsed + into: + + ('application', '*', {'q', '0.5'}) + + In addition this function also guarantees that there is a value for 'q' + in the params dictionary, filling it in with a proper default if + necessary. + """ + (type, subtype, params) = parse_mime_type(range) + if not params.has_key('q') or not params['q'] or \ + not float(params['q']) or float(params['q']) > 1\ + or float(params['q']) < 0: + params['q'] = '1' + + return (type, subtype, params) + + +def fitness_and_quality_parsed(mime_type, parsed_ranges): + """Find the best match for a mime-type amongst parsed media-ranges. + + Find the best match for a given mime-type against a list of media_ranges + that have already been parsed by parse_media_range(). Returns a tuple of + the fitness value and the value of the 'q' quality parameter of the best + match, or (-1, 0) if no match was found. Just as for quality_parsed(), + 'parsed_ranges' must be a list of parsed media ranges. + """ + best_fitness = -1 + best_fit_q = 0 + (target_type, target_subtype, target_params) =\ + parse_media_range(mime_type) + for (type, subtype, params) in parsed_ranges: + type_match = (type == target_type or\ + type == '*' or\ + target_type == '*') + subtype_match = (subtype == target_subtype or\ + subtype == '*' or\ + target_subtype == '*') + if type_match and subtype_match: + param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \ + target_params.iteritems() if key != 'q' and \ + params.has_key(key) and value == params[key]], 0) + fitness = (type == target_type) and 100 or 0 + fitness += (subtype == target_subtype) and 10 or 0 + fitness += param_matches + if fitness > best_fitness: + best_fitness = fitness + best_fit_q = params['q'] + + return best_fitness, float(best_fit_q) + + +def quality_parsed(mime_type, parsed_ranges): + """Find the best match for a mime-type amongst parsed media-ranges. + + Find the best match for a given mime-type against a list of media_ranges + that have already been parsed by parse_media_range(). Returns the 'q' + quality parameter of the best match, 0 if no match was found. This function + bahaves the same as quality() except that 'parsed_ranges' must be a list of + parsed media ranges. + """ + + return fitness_and_quality_parsed(mime_type, parsed_ranges)[1] + + +def quality(mime_type, ranges): + """Return the quality ('q') of a mime-type against a list of media-ranges. + + Returns the quality 'q' of a mime-type when compared against the + media-ranges in ranges. For example: + + >>> quality('text/html','text/*;q=0.3, text/html;q=0.7, + text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5') + 0.7 + + """ + parsed_ranges = [parse_media_range(r) for r in ranges.split(',')] + + return quality_parsed(mime_type, parsed_ranges) + + +def best_match(supported, header): + """Return mime-type with the highest quality ('q') from list of candidates. + + Takes a list of supported mime-types and finds the best match for all the + media-ranges listed in header. The value of header must be a string that + conforms to the format of the HTTP Accept: header. The value of 'supported' + is a list of mime-types. The list of supported mime-types should be sorted + in order of increasing desirability, in case of a situation where there is + a tie. + + >>> best_match(['application/xbel+xml', 'text/xml'], + 'text/*;q=0.5,*/*; q=0.1') + 'text/xml' + """ + split_header = _filter_blank(header.split(',')) + parsed_header = [parse_media_range(r) for r in split_header] + weighted_matches = [] + pos = 0 + for mime_type in supported: + weighted_matches.append((fitness_and_quality_parsed(mime_type, + parsed_header), pos, mime_type)) + pos += 1 + weighted_matches.sort() + + return weighted_matches[-1][0][1] and weighted_matches[-1][2] or '' + + +def _filter_blank(i): + for s in i: + if s.strip(): + yield s diff --git a/googleapiclient/model.py b/googleapiclient/model.py new file mode 100644 index 00000000..0f0172ca --- /dev/null +++ b/googleapiclient/model.py @@ -0,0 +1,383 @@ +#!/usr/bin/python2.4 +# +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Model objects for requests and responses. + +Each API may support one or more serializations, such +as JSON, Atom, etc. The model classes are responsible +for converting between the wire format and the Python +object representation. +""" + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + +import json +import logging +import urllib + +from googleapiclient import __version__ +from errors import HttpError + + +dump_request_response = False + + +def _abstract(): + raise NotImplementedError('You need to override this function') + + +class Model(object): + """Model base class. + + All Model classes should implement this interface. + The Model serializes and de-serializes between a wire + format such as JSON and a Python object representation. + """ + + def request(self, headers, path_params, query_params, body_value): + """Updates outgoing requests with a serialized body. + + Args: + headers: dict, request headers + path_params: dict, parameters that appear in the request path + query_params: dict, parameters that appear in the query + body_value: object, the request body as a Python object, which must be + serializable. + Returns: + A tuple of (headers, path_params, query, body) + + headers: dict, request headers + path_params: dict, parameters that appear in the request path + query: string, query part of the request URI + body: string, the body serialized in the desired wire format. + """ + _abstract() + + def response(self, resp, content): + """Convert the response wire format into a Python object. + + Args: + resp: httplib2.Response, the HTTP response headers and status + content: string, the body of the HTTP response + + Returns: + The body de-serialized as a Python object. + + Raises: + googleapiclient.errors.HttpError if a non 2xx response is received. + """ + _abstract() + + +class BaseModel(Model): + """Base model class. + + Subclasses should provide implementations for the "serialize" and + "deserialize" methods, as well as values for the following class attributes. + + Attributes: + accept: The value to use for the HTTP Accept header. + content_type: The value to use for the HTTP Content-type header. + no_content_response: The value to return when deserializing a 204 "No + Content" response. + alt_param: The value to supply as the "alt" query parameter for requests. + """ + + accept = None + content_type = None + no_content_response = None + alt_param = None + + def _log_request(self, headers, path_params, query, body): + """Logs debugging information about the request if requested.""" + if dump_request_response: + logging.info('--request-start--') + logging.info('-headers-start-') + for h, v in headers.iteritems(): + logging.info('%s: %s', h, v) + logging.info('-headers-end-') + logging.info('-path-parameters-start-') + for h, v in path_params.iteritems(): + logging.info('%s: %s', h, v) + logging.info('-path-parameters-end-') + logging.info('body: %s', body) + logging.info('query: %s', query) + logging.info('--request-end--') + + def request(self, headers, path_params, query_params, body_value): + """Updates outgoing requests with a serialized body. + + Args: + headers: dict, request headers + path_params: dict, parameters that appear in the request path + query_params: dict, parameters that appear in the query + body_value: object, the request body as a Python object, which must be + serializable by json. + Returns: + A tuple of (headers, path_params, query, body) + + headers: dict, request headers + path_params: dict, parameters that appear in the request path + query: string, query part of the request URI + body: string, the body serialized as JSON + """ + query = self._build_query(query_params) + headers['accept'] = self.accept + headers['accept-encoding'] = 'gzip, deflate' + if 'user-agent' in headers: + headers['user-agent'] += ' ' + else: + headers['user-agent'] = '' + headers['user-agent'] += 'google-api-python-client/%s (gzip)' % __version__ + + if body_value is not None: + headers['content-type'] = self.content_type + body_value = self.serialize(body_value) + self._log_request(headers, path_params, query, body_value) + return (headers, path_params, query, body_value) + + def _build_query(self, params): + """Builds a query string. + + Args: + params: dict, the query parameters + + Returns: + The query parameters properly encoded into an HTTP URI query string. + """ + if self.alt_param is not None: + params.update({'alt': self.alt_param}) + astuples = [] + for key, value in params.iteritems(): + if type(value) == type([]): + for x in value: + x = x.encode('utf-8') + astuples.append((key, x)) + else: + if getattr(value, 'encode', False) and callable(value.encode): + value = value.encode('utf-8') + astuples.append((key, value)) + return '?' + urllib.urlencode(astuples) + + def _log_response(self, resp, content): + """Logs debugging information about the response if requested.""" + if dump_request_response: + logging.info('--response-start--') + for h, v in resp.iteritems(): + logging.info('%s: %s', h, v) + if content: + logging.info(content) + logging.info('--response-end--') + + def response(self, resp, content): + """Convert the response wire format into a Python object. + + Args: + resp: httplib2.Response, the HTTP response headers and status + content: string, the body of the HTTP response + + Returns: + The body de-serialized as a Python object. + + Raises: + googleapiclient.errors.HttpError if a non 2xx response is received. + """ + self._log_response(resp, content) + # Error handling is TBD, for example, do we retry + # for some operation/error combinations? + if resp.status < 300: + if resp.status == 204: + # A 204: No Content response should be treated differently + # to all the other success states + return self.no_content_response + return self.deserialize(content) + else: + logging.debug('Content from bad request was: %s' % content) + raise HttpError(resp, content) + + def serialize(self, body_value): + """Perform the actual Python object serialization. + + Args: + body_value: object, the request body as a Python object. + + Returns: + string, the body in serialized form. + """ + _abstract() + + def deserialize(self, content): + """Perform the actual deserialization from response string to Python + object. + + Args: + content: string, the body of the HTTP response + + Returns: + The body de-serialized as a Python object. + """ + _abstract() + + +class JsonModel(BaseModel): + """Model class for JSON. + + Serializes and de-serializes between JSON and the Python + object representation of HTTP request and response bodies. + """ + accept = 'application/json' + content_type = 'application/json' + alt_param = 'json' + + def __init__(self, data_wrapper=False): + """Construct a JsonModel. + + Args: + data_wrapper: boolean, wrap requests and responses in a data wrapper + """ + self._data_wrapper = data_wrapper + + def serialize(self, body_value): + if (isinstance(body_value, dict) and 'data' not in body_value and + self._data_wrapper): + body_value = {'data': body_value} + return json.dumps(body_value) + + def deserialize(self, content): + content = content.decode('utf-8') + body = json.loads(content) + if self._data_wrapper and isinstance(body, dict) and 'data' in body: + body = body['data'] + return body + + @property + def no_content_response(self): + return {} + + +class RawModel(JsonModel): + """Model class for requests that don't return JSON. + + Serializes and de-serializes between JSON and the Python + object representation of HTTP request, and returns the raw bytes + of the response body. + """ + accept = '*/*' + content_type = 'application/json' + alt_param = None + + def deserialize(self, content): + return content + + @property + def no_content_response(self): + return '' + + +class MediaModel(JsonModel): + """Model class for requests that return Media. + + Serializes and de-serializes between JSON and the Python + object representation of HTTP request, and returns the raw bytes + of the response body. + """ + accept = '*/*' + content_type = 'application/json' + alt_param = 'media' + + def deserialize(self, content): + return content + + @property + def no_content_response(self): + return '' + + +class ProtocolBufferModel(BaseModel): + """Model class for protocol buffers. + + Serializes and de-serializes the binary protocol buffer sent in the HTTP + request and response bodies. + """ + accept = 'application/x-protobuf' + content_type = 'application/x-protobuf' + alt_param = 'proto' + + def __init__(self, protocol_buffer): + """Constructs a ProtocolBufferModel. + + The serialzed protocol buffer returned in an HTTP response will be + de-serialized using the given protocol buffer class. + + Args: + protocol_buffer: The protocol buffer class used to de-serialize a + response from the API. + """ + self._protocol_buffer = protocol_buffer + + def serialize(self, body_value): + return body_value.SerializeToString() + + def deserialize(self, content): + return self._protocol_buffer.FromString(content) + + @property + def no_content_response(self): + return self._protocol_buffer() + + +def makepatch(original, modified): + """Create a patch object. + + Some methods support PATCH, an efficient way to send updates to a resource. + This method allows the easy construction of patch bodies by looking at the + differences between a resource before and after it was modified. + + Args: + original: object, the original deserialized resource + modified: object, the modified deserialized resource + Returns: + An object that contains only the changes from original to modified, in a + form suitable to pass to a PATCH method. + + Example usage: + item = service.activities().get(postid=postid, userid=userid).execute() + original = copy.deepcopy(item) + item['object']['content'] = 'This is updated.' + service.activities.patch(postid=postid, userid=userid, + body=makepatch(original, item)).execute() + """ + patch = {} + for key, original_value in original.iteritems(): + modified_value = modified.get(key, None) + if modified_value is None: + # Use None to signal that the element is deleted + patch[key] = None + elif original_value != modified_value: + if type(original_value) == type({}): + # Recursively descend objects + patch[key] = makepatch(original_value, modified_value) + else: + # In the case of simple types or arrays we just replace + patch[key] = modified_value + else: + # Don't add anything to patch if there's no change + pass + for key in modified: + if key not in original: + patch[key] = modified[key] + + return patch diff --git a/googleapiclient/sample_tools.py b/googleapiclient/sample_tools.py new file mode 100644 index 00000000..69f698e9 --- /dev/null +++ b/googleapiclient/sample_tools.py @@ -0,0 +1,102 @@ +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for making samples. + +Consolidates a lot of code commonly repeated in sample applications. +""" + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' +__all__ = ['init'] + + +import argparse +import httplib2 +import os + +from googleapiclient import discovery +from oauth2client import client +from oauth2client import file +from oauth2client import tools + + +def init(argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None): + """A common initialization routine for samples. + + Many of the sample applications do the same initialization, which has now + been consolidated into this function. This function uses common idioms found + in almost all the samples, i.e. for an API with name 'apiname', the + credentials are stored in a file named apiname.dat, and the + client_secrets.json file is stored in the same directory as the application + main file. + + Args: + argv: list of string, the command-line parameters of the application. + name: string, name of the API. + version: string, version of the API. + doc: string, description of the application. Usually set to __doc__. + file: string, filename of the application. Usually set to __file__. + parents: list of argparse.ArgumentParser, additional command-line flags. + scope: string, The OAuth scope used. + discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL. + + Returns: + A tuple of (service, flags), where service is the service object and flags + is the parsed command-line flags. + """ + if scope is None: + scope = 'https://www.googleapis.com/auth/' + name + + # Parser command-line arguments. + parent_parsers = [tools.argparser] + parent_parsers.extend(parents) + parser = argparse.ArgumentParser( + description=doc, + formatter_class=argparse.RawDescriptionHelpFormatter, + parents=parent_parsers) + flags = parser.parse_args(argv[1:]) + + # Name of a file containing the OAuth 2.0 information for this + # application, including client_id and client_secret, which are found + # on the API Access tab on the Google APIs + # Console . + client_secrets = os.path.join(os.path.dirname(filename), + 'client_secrets.json') + + # Set up a Flow object to be used if we need to authenticate. + flow = client.flow_from_clientsecrets(client_secrets, + scope=scope, + message=tools.message_if_missing(client_secrets)) + + # Prepare credentials, and authorize HTTP object with them. + # If the credentials don't exist or are invalid run through the native client + # flow. The Storage object will ensure that if successful the good + # credentials will get written back to a file. + storage = file.Storage(name + '.dat') + credentials = storage.get() + if credentials is None or credentials.invalid: + credentials = tools.run_flow(flow, storage, flags) + http = credentials.authorize(http = httplib2.Http()) + + if discovery_filename is None: + # Construct a service object via the discovery service. + service = discovery.build(name, version, http=http) + else: + # Construct a service object using a local discovery document file. + with open(discovery_filename) as discovery_file: + service = discovery.build_from_document( + discovery_file.read(), + base='https://www.googleapis.com/', + http=http) + return (service, flags) diff --git a/googleapiclient/schema.py b/googleapiclient/schema.py new file mode 100644 index 00000000..af413177 --- /dev/null +++ b/googleapiclient/schema.py @@ -0,0 +1,311 @@ +# Copyright 2014 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Schema processing for discovery based APIs + +Schemas holds an APIs discovery schemas. It can return those schema as +deserialized JSON objects, or pretty print them as prototype objects that +conform to the schema. + +For example, given the schema: + + schema = \"\"\"{ + "Foo": { + "type": "object", + "properties": { + "etag": { + "type": "string", + "description": "ETag of the collection." + }, + "kind": { + "type": "string", + "description": "Type of the collection ('calendar#acl').", + "default": "calendar#acl" + }, + "nextPageToken": { + "type": "string", + "description": "Token used to access the next + page of this result. Omitted if no further results are available." + } + } + } + }\"\"\" + + s = Schemas(schema) + print s.prettyPrintByName('Foo') + + Produces the following output: + + { + "nextPageToken": "A String", # Token used to access the + # next page of this result. Omitted if no further results are available. + "kind": "A String", # Type of the collection ('calendar#acl'). + "etag": "A String", # ETag of the collection. + }, + +The constructor takes a discovery document in which to look up named schema. +""" + +# TODO(jcgregorio) support format, enum, minimum, maximum + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + +import copy + +from oauth2client import util + + +class Schemas(object): + """Schemas for an API.""" + + def __init__(self, discovery): + """Constructor. + + Args: + discovery: object, Deserialized discovery document from which we pull + out the named schema. + """ + self.schemas = discovery.get('schemas', {}) + + # Cache of pretty printed schemas. + self.pretty = {} + + @util.positional(2) + def _prettyPrintByName(self, name, seen=None, dent=0): + """Get pretty printed object prototype from the schema name. + + Args: + name: string, Name of schema in the discovery document. + seen: list of string, Names of schema already seen. Used to handle + recursive definitions. + + Returns: + string, A string that contains a prototype object with + comments that conforms to the given schema. + """ + if seen is None: + seen = [] + + if name in seen: + # Do not fall into an infinite loop over recursive definitions. + return '# Object with schema name: %s' % name + seen.append(name) + + if name not in self.pretty: + self.pretty[name] = _SchemaToStruct(self.schemas[name], + seen, dent=dent).to_str(self._prettyPrintByName) + + seen.pop() + + return self.pretty[name] + + def prettyPrintByName(self, name): + """Get pretty printed object prototype from the schema name. + + Args: + name: string, Name of schema in the discovery document. + + Returns: + string, A string that contains a prototype object with + comments that conforms to the given schema. + """ + # Return with trailing comma and newline removed. + return self._prettyPrintByName(name, seen=[], dent=1)[:-2] + + @util.positional(2) + def _prettyPrintSchema(self, schema, seen=None, dent=0): + """Get pretty printed object prototype of schema. + + Args: + schema: object, Parsed JSON schema. + seen: list of string, Names of schema already seen. Used to handle + recursive definitions. + + Returns: + string, A string that contains a prototype object with + comments that conforms to the given schema. + """ + if seen is None: + seen = [] + + return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName) + + def prettyPrintSchema(self, schema): + """Get pretty printed object prototype of schema. + + Args: + schema: object, Parsed JSON schema. + + Returns: + string, A string that contains a prototype object with + comments that conforms to the given schema. + """ + # Return with trailing comma and newline removed. + return self._prettyPrintSchema(schema, dent=1)[:-2] + + def get(self, name): + """Get deserialized JSON schema from the schema name. + + Args: + name: string, Schema name. + """ + return self.schemas[name] + + +class _SchemaToStruct(object): + """Convert schema to a prototype object.""" + + @util.positional(3) + def __init__(self, schema, seen, dent=0): + """Constructor. + + Args: + schema: object, Parsed JSON schema. + seen: list, List of names of schema already seen while parsing. Used to + handle recursive definitions. + dent: int, Initial indentation depth. + """ + # The result of this parsing kept as list of strings. + self.value = [] + + # The final value of the parsing. + self.string = None + + # The parsed JSON schema. + self.schema = schema + + # Indentation level. + self.dent = dent + + # Method that when called returns a prototype object for the schema with + # the given name. + self.from_cache = None + + # List of names of schema already seen while parsing. + self.seen = seen + + def emit(self, text): + """Add text as a line to the output. + + Args: + text: string, Text to output. + """ + self.value.extend([" " * self.dent, text, '\n']) + + def emitBegin(self, text): + """Add text to the output, but with no line terminator. + + Args: + text: string, Text to output. + """ + self.value.extend([" " * self.dent, text]) + + def emitEnd(self, text, comment): + """Add text and comment to the output with line terminator. + + Args: + text: string, Text to output. + comment: string, Python comment. + """ + if comment: + divider = '\n' + ' ' * (self.dent + 2) + '# ' + lines = comment.splitlines() + lines = [x.rstrip() for x in lines] + comment = divider.join(lines) + self.value.extend([text, ' # ', comment, '\n']) + else: + self.value.extend([text, '\n']) + + def indent(self): + """Increase indentation level.""" + self.dent += 1 + + def undent(self): + """Decrease indentation level.""" + self.dent -= 1 + + def _to_str_impl(self, schema): + """Prototype object based on the schema, in Python code with comments. + + Args: + schema: object, Parsed JSON schema file. + + Returns: + Prototype object based on the schema, in Python code with comments. + """ + stype = schema.get('type') + if stype == 'object': + self.emitEnd('{', schema.get('description', '')) + self.indent() + if 'properties' in schema: + for pname, pschema in schema.get('properties', {}).iteritems(): + self.emitBegin('"%s": ' % pname) + self._to_str_impl(pschema) + elif 'additionalProperties' in schema: + self.emitBegin('"a_key": ') + self._to_str_impl(schema['additionalProperties']) + self.undent() + self.emit('},') + elif '$ref' in schema: + schemaName = schema['$ref'] + description = schema.get('description', '') + s = self.from_cache(schemaName, seen=self.seen) + parts = s.splitlines() + self.emitEnd(parts[0], description) + for line in parts[1:]: + self.emit(line.rstrip()) + elif stype == 'boolean': + value = schema.get('default', 'True or False') + self.emitEnd('%s,' % str(value), schema.get('description', '')) + elif stype == 'string': + value = schema.get('default', 'A String') + self.emitEnd('"%s",' % str(value), schema.get('description', '')) + elif stype == 'integer': + value = schema.get('default', '42') + self.emitEnd('%s,' % str(value), schema.get('description', '')) + elif stype == 'number': + value = schema.get('default', '3.14') + self.emitEnd('%s,' % str(value), schema.get('description', '')) + elif stype == 'null': + self.emitEnd('None,', schema.get('description', '')) + elif stype == 'any': + self.emitEnd('"",', schema.get('description', '')) + elif stype == 'array': + self.emitEnd('[', schema.get('description')) + self.indent() + self.emitBegin('') + self._to_str_impl(schema['items']) + self.undent() + self.emit('],') + else: + self.emit('Unknown type! %s' % stype) + self.emitEnd('', '') + + self.string = ''.join(self.value) + return self.string + + def to_str(self, from_cache): + """Prototype object based on the schema, in Python code with comments. + + Args: + from_cache: callable(name, seen), Callable that retrieves an object + prototype for a schema with the given name. Seen is a list of schema + names already seen as we recursively descend the schema definition. + + Returns: + Prototype object based on the schema, in Python code with comments. + The lines of the code will all be properly indented. + """ + self.from_cache = from_cache + return self._to_str_impl(self.schema) diff --git a/oauth2client/__init__.py b/oauth2client/__init__.py index ac847483..7e4673d4 100644 --- a/oauth2client/__init__.py +++ b/oauth2client/__init__.py @@ -1,5 +1,8 @@ -__version__ = "1.2" +"""Client library for using OAuth2, especially with Google APIs.""" + +__version__ = '1.3.1' GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth' +GOOGLE_DEVICE_URI = 'https://accounts.google.com/o/oauth2/device/code' GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke' GOOGLE_TOKEN_URI = 'https://accounts.google.com/o/oauth2/token' diff --git a/oauth2client/appengine.py b/oauth2client/appengine.py index 5cd3f4ba..7321dcb9 100644 --- a/oauth2client/appengine.py +++ b/oauth2client/appengine.py @@ -1,4 +1,4 @@ -# Copyright (C) 2010 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,14 +19,14 @@ Utilities for making it easier to use OAuth 2.0 on Google App Engine. __author__ = 'jcgregorio@google.com (Joe Gregorio)' -import base64 import cgi -import httplib2 +import json import logging import os import pickle import threading -import time + +import httplib2 from google.appengine.api import app_identity from google.appengine.api import memcache @@ -41,7 +41,6 @@ from oauth2client import GOOGLE_TOKEN_URI from oauth2client import clientsecrets from oauth2client import util from oauth2client import xsrfutil -from oauth2client.anyjson import simplejson from oauth2client.client import AccessTokenRefreshError from oauth2client.client import AssertionCredentials from oauth2client.client import Credentials @@ -159,15 +158,20 @@ class AppAssertionCredentials(AssertionCredentials): Args: scope: string or iterable of strings, scope(s) of the credentials being requested. + **kwargs: optional keyword args, including: + service_account_id: service account id of the application. If None or + unspecified, the default service account for the app is used. """ self.scope = util.scopes_to_string(scope) + self._kwargs = kwargs + self.service_account_id = kwargs.get('service_account_id', None) # Assertion type is no longer used, but still in the parent class signature. super(AppAssertionCredentials, self).__init__(None) @classmethod - def from_json(cls, json): - data = simplejson.loads(json) + def from_json(cls, json_data): + data = json.loads(json_data) return AppAssertionCredentials(data['scope']) def _refresh(self, http_request): @@ -186,11 +190,22 @@ class AppAssertionCredentials(AssertionCredentials): """ try: scopes = self.scope.split() - (token, _) = app_identity.get_access_token(scopes) - except app_identity.Error, e: + (token, _) = app_identity.get_access_token( + scopes, service_account_id=self.service_account_id) + except app_identity.Error as e: raise AccessTokenRefreshError(str(e)) self.access_token = token + @property + def serialization_data(self): + raise NotImplementedError('Cannot serialize credentials for AppEngine.') + + def create_scoped_required(self): + return not self.scope + + def create_scoped(self, scopes): + return AppAssertionCredentials(scopes, **self._kwargs) + class FlowProperty(db.Property): """App Engine datastore Property for Flow. @@ -434,6 +449,7 @@ class StorageByKeyName(Storage): entity_key = db.Key.from_path(self._model.kind(), self._key_name) db.delete(entity_key) + @db.non_transactional(allow_existing=True) def locked_get(self): """Retrieve Credential from datastore. @@ -456,6 +472,7 @@ class StorageByKeyName(Storage): credentials.set_store(self) return credentials + @db.non_transactional(allow_existing=True) def locked_put(self, credentials): """Write a Credentials to the datastore. @@ -468,6 +485,7 @@ class StorageByKeyName(Storage): if self._cache: self._cache.set(self._key_name, credentials.to_json()) + @db.non_transactional(allow_existing=True) def locked_delete(self): """Delete Credential from datastore.""" @@ -650,8 +668,9 @@ class OAuth2Decorator(object): provided to this constructor. A string indicating the name of the field on the _credentials_class where a Credentials object will be stored. Defaults to 'credentials'. - **kwargs: dict, Keyword arguments are be passed along as kwargs to the - OAuth2WebServerFlow constructor. + **kwargs: dict, Keyword arguments are passed along as kwargs to + the OAuth2WebServerFlow constructor. + """ self._tls = threading.local() self.flow = None @@ -798,14 +817,18 @@ class OAuth2Decorator(object): url = self.flow.step1_get_authorize_url() return str(url) - def http(self): + def http(self, *args, **kwargs): """Returns an authorized http instance. Must only be called from within an @oauth_required decorated method, or from within an @oauth_aware decorated method where has_credentials() returns True. + + Args: + *args: Positional arguments passed to httplib2.Http constructor. + **kwargs: Positional arguments passed to httplib2.Http constructor. """ - return self.credentials.authorize(httplib2.Http()) + return self.credentials.authorize(httplib2.Http(*args, **kwargs)) @property def callback_path(self): @@ -858,7 +881,7 @@ class OAuth2Decorator(object): user) if decorator._token_response_param and credentials.token_response: - resp_json = simplejson.dumps(credentials.token_response) + resp_json = json.dumps(credentials.token_response) redirect_uri = util._add_query_parameter( redirect_uri, decorator._token_response_param, resp_json) @@ -904,7 +927,7 @@ class OAuth2DecoratorFromClientSecrets(OAuth2Decorator): """ @util.positional(3) - def __init__(self, filename, scope, message=None, cache=None): + def __init__(self, filename, scope, message=None, cache=None, **kwargs): """Constructor Args: @@ -917,17 +940,20 @@ class OAuth2DecoratorFromClientSecrets(OAuth2Decorator): decorator. cache: An optional cache service client that implements get() and set() methods. See clientsecrets.loadfile() for details. + **kwargs: dict, Keyword arguments are passed along as kwargs to + the OAuth2WebServerFlow constructor. """ client_type, client_info = clientsecrets.loadfile(filename, cache=cache) if client_type not in [ clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED]: raise InvalidClientSecretsError( - 'OAuth2Decorator doesn\'t support this OAuth 2.0 flow.') - constructor_kwargs = { - 'auth_uri': client_info['auth_uri'], - 'token_uri': client_info['token_uri'], - 'message': message, - } + "OAuth2Decorator doesn't support this OAuth 2.0 flow.") + constructor_kwargs = dict(kwargs) + constructor_kwargs.update({ + 'auth_uri': client_info['auth_uri'], + 'token_uri': client_info['token_uri'], + 'message': message, + }) revoke_uri = client_info.get('revoke_uri') if revoke_uri is not None: constructor_kwargs['revoke_uri'] = revoke_uri diff --git a/oauth2client/client.py b/oauth2client/client.py index 4f16c720..2a77e302 100644 --- a/oauth2client/client.py +++ b/oauth2client/client.py @@ -1,4 +1,4 @@ -# Copyright (C) 2010 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -20,10 +20,10 @@ Tools for interacting with OAuth 2.0 protected resources. __author__ = 'jcgregorio@google.com (Joe Gregorio)' import base64 -import clientsecrets +import collections import copy import datetime -import httplib2 +import json import logging import os import sys @@ -31,11 +31,13 @@ import time import urllib import urlparse +import httplib2 +from oauth2client import clientsecrets from oauth2client import GOOGLE_AUTH_URI +from oauth2client import GOOGLE_DEVICE_URI from oauth2client import GOOGLE_REVOKE_URI from oauth2client import GOOGLE_TOKEN_URI from oauth2client import util -from oauth2client.anyjson import simplejson HAS_OPENSSL = False HAS_CRYPTO = False @@ -47,18 +49,16 @@ try: except ImportError: pass -try: - from urlparse import parse_qsl -except ImportError: - from cgi import parse_qsl - logger = logging.getLogger(__name__) # Expiry is stored in RFC3339 UTC format EXPIRY_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # Which certs to use to validate id_tokens received. -ID_TOKEN_VERIFICATON_CERTS = 'https://www.googleapis.com/oauth2/v1/certs' +ID_TOKEN_VERIFICATION_CERTS = 'https://www.googleapis.com/oauth2/v1/certs' +# This symbol previously had a typo in the name; we keep the old name +# around for now, but will remove it in the future. +ID_TOKEN_VERIFICATON_CERTS = ID_TOKEN_VERIFICATION_CERTS # Constant to use for the out of band OAuth 2.0 flow. OOB_CALLBACK_URN = 'urn:ietf:wg:oauth:2.0:oob' @@ -66,6 +66,30 @@ OOB_CALLBACK_URN = 'urn:ietf:wg:oauth:2.0:oob' # Google Data client libraries may need to set this to [401, 403]. REFRESH_STATUS_CODES = [401] +# The value representing user credentials. +AUTHORIZED_USER = 'authorized_user' + +# The value representing service account credentials. +SERVICE_ACCOUNT = 'service_account' + +# The environment variable pointing the file with local +# Application Default Credentials. +GOOGLE_APPLICATION_CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS' + +# The error message we show users when we can't find the Application +# Default Credentials. +ADC_HELP_MSG = ( + 'The Application Default Credentials are not available. They are available ' + 'if running in Google Compute Engine. Otherwise, the environment variable ' + + GOOGLE_APPLICATION_CREDENTIALS + + ' must be defined pointing to a file defining the credentials. See ' + 'https://developers.google.com/accounts/docs/application-default-credentials' # pylint:disable=line-too-long + ' for more information.') + +# The access token along with the seconds in which it expires. +AccessTokenInfo = collections.namedtuple( + 'AccessTokenInfo', ['access_token', 'expires_in']) + class Error(Exception): """Base error for this module.""" @@ -92,13 +116,25 @@ class AccessTokenCredentialsError(Error): class VerifyJwtTokenError(Error): - """Could on retrieve certificates for validation.""" + """Could not retrieve certificates for validation.""" class NonAsciiHeaderError(Error): """Header names and values must be ASCII strings.""" +class ApplicationDefaultCredentialsError(Error): + """Error retrieving the Application Default Credentials.""" + + +class OAuth2DeviceCodeError(Error): + """Error trying to retrieve a device code.""" + + +class CryptoUnavailableError(Error, NotImplementedError): + """Raised when a crypto library is required, but none is available.""" + + def _abstract(): raise NotImplementedError('You need to override this function') @@ -126,11 +162,12 @@ class Credentials(object): an HTTP transport. Subclasses must also specify a classmethod named 'from_json' that takes a JSON - string as input and returns an instaniated Credentials object. + string as input and returns an instantiated Credentials object. """ NON_SERIALIZED_MEMBERS = ['store'] + def authorize(self, http): """Take an httplib2.Http instance (or equivalent) and authorizes it. @@ -144,6 +181,7 @@ class Credentials(object): """ _abstract() + def refresh(self, http): """Forces a refresh of the access_token. @@ -153,6 +191,7 @@ class Credentials(object): """ _abstract() + def revoke(self, http): """Revokes a refresh_token and makes the credentials void. @@ -162,6 +201,7 @@ class Credentials(object): """ _abstract() + def apply(self, headers): """Add the authorization to the headers. @@ -185,12 +225,13 @@ class Credentials(object): for member in strip: if member in d: del d[member] - if 'token_expiry' in d and isinstance(d['token_expiry'], datetime.datetime): + if (d.get('token_expiry') and + isinstance(d['token_expiry'], datetime.datetime)): d['token_expiry'] = d['token_expiry'].strftime(EXPIRY_FORMAT) # Add in information we will need later to reconsistitue this instance. d['_class'] = t.__name__ d['_module'] = t.__module__ - return simplejson.dumps(d) + return json.dumps(d) def to_json(self): """Creating a JSON representation of an instance of Credentials. @@ -213,14 +254,14 @@ class Credentials(object): An instance of the subclass of Credentials that was serialized with to_json(). """ - data = simplejson.loads(s) + data = json.loads(s) # Find and call the right classmethod from_json() to restore the object. module = data['_module'] try: m = __import__(module) except ImportError: # In case there's an object from the old package structure, update it - module = module.replace('.apiclient', '') + module = module.replace('.googleapiclient', '') m = __import__(module) m = __import__(module, fromlist=module.split('.')[:-1]) @@ -229,13 +270,13 @@ class Credentials(object): return from_json(s) @classmethod - def from_json(cls, s): + def from_json(cls, unused_data): """Instantiate a Credentials object from a JSON description of it. The JSON should have been produced by calling .to_json() on the object. Args: - data: dict, A deserialized JSON object. + unused_data: dict, A deserialized JSON object. Returns: An instance of a Credentials subclass. @@ -374,11 +415,11 @@ def _update_query_params(uri, params): Returns: The same URI but with the new query parameters added. """ - parts = list(urlparse.urlparse(uri)) - query_params = dict(parse_qsl(parts[4])) # 4 is the index of the query part + parts = urlparse.urlparse(uri) + query_params = dict(urlparse.parse_qsl(parts.query)) query_params.update(params) - parts[4] = urllib.urlencode(query_params) - return urlparse.urlunparse(parts) + new_parts = parts._replace(query=urllib.urlencode(query_params)) + return urlparse.urlunparse(new_parts) class OAuth2Credentials(Credentials): @@ -457,7 +498,7 @@ class OAuth2Credentials(Credentials): h = httplib2.Http() h = credentials.authorize(h) - You can't create a new OAuth subclass of httplib2.Authenication + You can't create a new OAuth subclass of httplib2.Authentication because it never gets passed the absolute URI, which is needed for signing. So instead we have to overload 'request' with a closure that adds in the Authorization header and then calls the original @@ -474,10 +515,12 @@ class OAuth2Credentials(Credentials): logger.info('Attempting refresh to obtain initial access_token') self._refresh(request_orig) - # Modify the request headers to add the appropriate + # Clone and modify the request headers to add the appropriate # Authorization header. if headers is None: headers = {} + else: + headers = dict(headers) self.apply(headers) if self.user_agent is not None: @@ -490,7 +533,7 @@ class OAuth2Credentials(Credentials): redirections, connection_type) if resp.status in REFRESH_STATUS_CODES: - logger.info('Refreshing due to a %s' % str(resp.status)) + logger.info('Refreshing due to a %s', resp.status) self._refresh(request_orig) self.apply(headers) return request_orig(uri, method, body, clean_headers(headers), @@ -546,13 +589,13 @@ class OAuth2Credentials(Credentials): Returns: An instance of a Credentials subclass. """ - data = simplejson.loads(s) - if 'token_expiry' in data and not isinstance(data['token_expiry'], - datetime.datetime): + data = json.loads(s) + if (data.get('token_expiry') and + not isinstance(data['token_expiry'], datetime.datetime)): try: data['token_expiry'] = datetime.datetime.strptime( data['token_expiry'], EXPIRY_FORMAT) - except: + except ValueError: data['token_expiry'] = None retval = cls( data['access_token'], @@ -587,11 +630,24 @@ class OAuth2Credentials(Credentials): return True return False + def get_access_token(self, http=None): + """Return the access token and its expiration information. + + If the token does not exist, get one. + If the token expired, refresh it. + """ + if not self.access_token or self.access_token_expired: + if not http: + http = httplib2.Http() + self.refresh(http) + return AccessTokenInfo(access_token=self.access_token, + expires_in=self._expires_in()) + def set_store(self, store): """Set the Storage for the credential. Args: - store: Storage, an implementation of Stroage object. + store: Storage, an implementation of Storage object. This is needed to store the latest access_token if it has expired and been refreshed. This implementation uses locking to check for updates before updating the @@ -599,6 +655,25 @@ class OAuth2Credentials(Credentials): """ self.store = store + def _expires_in(self): + """Return the number of seconds until this token expires. + + If token_expiry is in the past, this method will return 0, meaning the + token has already expired. + If token_expiry is None, this method will return None. Note that returning + 0 in such a case would not be fair: the token may still be valid; + we just don't know anything about it. + """ + if self.token_expiry: + now = datetime.datetime.utcnow() + if self.token_expiry > now: + time_delta = self.token_expiry - now + # TODO(orestica): return time_delta.total_seconds() + # once dropping support for Python 2.6 + return time_delta.days * 86400 + time_delta.seconds + else: + return 0 + def _updateFromCredential(self, other): """Update this Credential from another instance.""" self.__dict__.update(other.__getstate__()) @@ -682,7 +757,7 @@ class OAuth2Credentials(Credentials): self.token_uri, method='POST', body=body, headers=headers) if resp.status == 200: # TODO(jcgregorio) Raise an error if loads fails? - d = simplejson.loads(content) + d = json.loads(content) self.token_response = d self.access_token = d['access_token'] self.refresh_token = d.get('refresh_token', self.refresh_token) @@ -691,17 +766,22 @@ class OAuth2Credentials(Credentials): seconds=int(d['expires_in'])) + datetime.datetime.utcnow() else: self.token_expiry = None + # On temporary refresh errors, the user does not actually have to + # re-authorize, so we unflag here. + self.invalid = False if self.store: self.store.locked_put(self) else: # An {'error':...} response body means the token is expired or revoked, # so we flag the credentials as such. - logger.info('Failed to retrieve access token: %s' % content) + logger.info('Failed to retrieve access token: %s', content) error_msg = 'Invalid response %s.' % resp['status'] try: - d = simplejson.loads(content) + d = json.loads(content) if 'error' in d: error_msg = d['error'] + if 'error_description' in d: + error_msg += ': ' + d['error_description'] self.invalid = True if self.store: self.store.locked_put(self) @@ -739,7 +819,7 @@ class OAuth2Credentials(Credentials): else: error_msg = 'Invalid response %s.' % resp.status try: - d = simplejson.loads(content) + d = json.loads(content) if 'error' in d: error_msg = d['error'] except StandardError: @@ -800,7 +880,7 @@ class AccessTokenCredentials(OAuth2Credentials): @classmethod def from_json(cls, s): - data = simplejson.loads(s) + data = json.loads(s) retval = AccessTokenCredentials( data['access_token'], data['user_agent']) @@ -820,7 +900,341 @@ class AccessTokenCredentials(OAuth2Credentials): self._do_revoke(http_request, self.access_token) -class AssertionCredentials(OAuth2Credentials): +_env_name = None + + +def _get_environment(urllib2_urlopen=None): + """Detect the environment the code is being run on.""" + + global _env_name + + if _env_name: + return _env_name + + server_software = os.environ.get('SERVER_SOFTWARE', '') + if server_software.startswith('Google App Engine/'): + _env_name = 'GAE_PRODUCTION' + elif server_software.startswith('Development/'): + _env_name = 'GAE_LOCAL' + else: + import urllib2 + try: + if urllib2_urlopen is None: + urllib2_urlopen = urllib2.urlopen + response = urllib2_urlopen('http://metadata.google.internal') + if any('Metadata-Flavor: Google' in h for h in response.info().headers): + _env_name = 'GCE_PRODUCTION' + else: + _env_name = 'UNKNOWN' + except urllib2.URLError: + _env_name = 'UNKNOWN' + + return _env_name + + +class GoogleCredentials(OAuth2Credentials): + """Application Default Credentials for use in calling Google APIs. + + The Application Default Credentials are being constructed as a function of + the environment where the code is being run. + More details can be found on this page: + https://developers.google.com/accounts/docs/application-default-credentials + + Here is an example of how to use the Application Default Credentials for a + service that requires authentication: + + + from googleapiclient.discovery import build + from oauth2client.client import GoogleCredentials + + PROJECT = 'bamboo-machine-422' # replace this with one of your projects + ZONE = 'us-central1-a' # replace this with the zone you care about + + credentials = GoogleCredentials.get_application_default() + service = build('compute', 'v1', credentials=credentials) + + request = service.instances().list(project=PROJECT, zone=ZONE) + response = request.execute() + + print response + + + A service that does not require authentication does not need credentials + to be passed in: + + + from googleapiclient.discovery import build + + service = build('discovery', 'v1') + + request = service.apis().list() + response = request.execute() + + print response + + """ + + def __init__(self, access_token, client_id, client_secret, refresh_token, + token_expiry, token_uri, user_agent, + revoke_uri=GOOGLE_REVOKE_URI): + """Create an instance of GoogleCredentials. + + This constructor is not usually called by the user, instead + GoogleCredentials objects are instantiated by + GoogleCredentials.from_stream() or + GoogleCredentials.get_application_default(). + + Args: + access_token: string, access token. + client_id: string, client identifier. + client_secret: string, client secret. + refresh_token: string, refresh token. + token_expiry: datetime, when the access_token expires. + token_uri: string, URI of token endpoint. + user_agent: string, The HTTP User-Agent to provide for this application. + revoke_uri: string, URI for revoke endpoint. + Defaults to GOOGLE_REVOKE_URI; a token can't be revoked if this is None. + """ + super(GoogleCredentials, self).__init__( + access_token, client_id, client_secret, refresh_token, token_expiry, + token_uri, user_agent, revoke_uri=revoke_uri) + + def create_scoped_required(self): + """Whether this Credentials object is scopeless. + + create_scoped(scopes) method needs to be called in order to create + a Credentials object for API calls. + """ + return False + + def create_scoped(self, scopes): + """Create a Credentials object for the given scopes. + + The Credentials type is preserved. + """ + return self + + @property + def serialization_data(self): + """Get the fields and their values identifying the current credentials.""" + return { + 'type': 'authorized_user', + 'client_id': self.client_id, + 'client_secret': self.client_secret, + 'refresh_token': self.refresh_token + } + + @staticmethod + def get_application_default(): + """Get the Application Default Credentials for the current environment. + + Exceptions: + ApplicationDefaultCredentialsError: raised when the credentials fail + to be retrieved. + """ + + env_name = _get_environment() + + if env_name in ('GAE_PRODUCTION', 'GAE_LOCAL'): + # if we are running inside Google App Engine + # there is no need to look for credentials in local files + application_default_credential_filename = None + well_known_file = None + else: + application_default_credential_filename = _get_environment_variable_file() + well_known_file = _get_well_known_file() + if not os.path.isfile(well_known_file): + well_known_file = None + + if application_default_credential_filename: + try: + return _get_application_default_credential_from_file( + application_default_credential_filename) + except (ApplicationDefaultCredentialsError, ValueError) as error: + extra_help = (' (pointed to by ' + GOOGLE_APPLICATION_CREDENTIALS + + ' environment variable)') + _raise_exception_for_reading_json( + application_default_credential_filename, extra_help, error) + elif well_known_file: + try: + return _get_application_default_credential_from_file(well_known_file) + except (ApplicationDefaultCredentialsError, ValueError) as error: + extra_help = (' (produced automatically when running' + ' "gcloud auth login" command)') + _raise_exception_for_reading_json(well_known_file, extra_help, error) + elif env_name in ('GAE_PRODUCTION', 'GAE_LOCAL'): + return _get_application_default_credential_GAE() + elif env_name == 'GCE_PRODUCTION': + return _get_application_default_credential_GCE() + else: + raise ApplicationDefaultCredentialsError(ADC_HELP_MSG) + + @staticmethod + def from_stream(credential_filename): + """Create a Credentials object by reading the information from a given file. + + It returns an object of type GoogleCredentials. + + Args: + credential_filename: the path to the file from where the credentials + are to be read + + Exceptions: + ApplicationDefaultCredentialsError: raised when the credentials fail + to be retrieved. + """ + + if credential_filename and os.path.isfile(credential_filename): + try: + return _get_application_default_credential_from_file( + credential_filename) + except (ApplicationDefaultCredentialsError, ValueError) as error: + extra_help = ' (provided as parameter to the from_stream() method)' + _raise_exception_for_reading_json(credential_filename, + extra_help, + error) + else: + raise ApplicationDefaultCredentialsError( + 'The parameter passed to the from_stream() ' + 'method should point to a file.') + + +def save_to_well_known_file(credentials, well_known_file=None): + """Save the provided GoogleCredentials to the well known file. + + Args: + credentials: + the credentials to be saved to the well known file; + it should be an instance of GoogleCredentials + well_known_file: + the name of the file where the credentials are to be saved; + this parameter is supposed to be used for testing only + """ + # TODO(orestica): move this method to tools.py + # once the argparse import gets fixed (it is not present in Python 2.6) + + if well_known_file is None: + well_known_file = _get_well_known_file() + + credentials_data = credentials.serialization_data + + with open(well_known_file, 'w') as f: + json.dump(credentials_data, f, sort_keys=True, indent=2, separators=(',', ': ')) + + +def _get_environment_variable_file(): + application_default_credential_filename = ( + os.environ.get(GOOGLE_APPLICATION_CREDENTIALS, + None)) + + if application_default_credential_filename: + if os.path.isfile(application_default_credential_filename): + return application_default_credential_filename + else: + raise ApplicationDefaultCredentialsError( + 'File ' + application_default_credential_filename + ' (pointed by ' + + GOOGLE_APPLICATION_CREDENTIALS + + ' environment variable) does not exist!') + + +def _get_well_known_file(): + """Get the well known file produced by command 'gcloud auth login'.""" + # TODO(orestica): Revisit this method once gcloud provides a better way + # of pinpointing the exact location of the file. + + WELL_KNOWN_CREDENTIALS_FILE = 'application_default_credentials.json' + CLOUDSDK_CONFIG_DIRECTORY = 'gcloud' + + if os.name == 'nt': + try: + default_config_path = os.path.join(os.environ['APPDATA'], + CLOUDSDK_CONFIG_DIRECTORY) + except KeyError: + # This should never happen unless someone is really messing with things. + drive = os.environ.get('SystemDrive', 'C:') + default_config_path = os.path.join(drive, '\\', CLOUDSDK_CONFIG_DIRECTORY) + else: + default_config_path = os.path.join(os.path.expanduser('~'), + '.config', + CLOUDSDK_CONFIG_DIRECTORY) + + default_config_path = os.path.join(default_config_path, + WELL_KNOWN_CREDENTIALS_FILE) + + return default_config_path + + +def _get_application_default_credential_from_file( + application_default_credential_filename): + """Build the Application Default Credentials from file.""" + + import service_account + + # read the credentials from the file + with open(application_default_credential_filename) as ( + application_default_credential): + client_credentials = json.load(application_default_credential) + + credentials_type = client_credentials.get('type') + if credentials_type == AUTHORIZED_USER: + required_fields = set(['client_id', 'client_secret', 'refresh_token']) + elif credentials_type == SERVICE_ACCOUNT: + required_fields = set(['client_id', 'client_email', 'private_key_id', + 'private_key']) + else: + raise ApplicationDefaultCredentialsError( + "'type' field should be defined (and have one of the '" + + AUTHORIZED_USER + "' or '" + SERVICE_ACCOUNT + "' values)") + + missing_fields = required_fields.difference(client_credentials.keys()) + + if missing_fields: + _raise_exception_for_missing_fields(missing_fields) + + if client_credentials['type'] == AUTHORIZED_USER: + return GoogleCredentials( + access_token=None, + client_id=client_credentials['client_id'], + client_secret=client_credentials['client_secret'], + refresh_token=client_credentials['refresh_token'], + token_expiry=None, + token_uri=GOOGLE_TOKEN_URI, + user_agent='Python client library') + else: # client_credentials['type'] == SERVICE_ACCOUNT + return service_account._ServiceAccountCredentials( + service_account_id=client_credentials['client_id'], + service_account_email=client_credentials['client_email'], + private_key_id=client_credentials['private_key_id'], + private_key_pkcs8_text=client_credentials['private_key'], + scopes=[]) + + +def _raise_exception_for_missing_fields(missing_fields): + raise ApplicationDefaultCredentialsError( + 'The following field(s) must be defined: ' + ', '.join(missing_fields)) + + +def _raise_exception_for_reading_json(credential_file, + extra_help, + error): + raise ApplicationDefaultCredentialsError( + 'An error was encountered while reading json file: '+ + credential_file + extra_help + ': ' + str(error)) + + +def _get_application_default_credential_GAE(): + from oauth2client.appengine import AppAssertionCredentials + + return AppAssertionCredentials([]) + + +def _get_application_default_credential_GCE(): + from oauth2client.gce import AppAssertionCredentials + + return AppAssertionCredentials([]) + + +class AssertionCredentials(GoogleCredentials): """Abstract Credentials object used for OAuth 2.0 assertion grants. This credential does not require a flow to instantiate because it @@ -883,135 +1297,148 @@ class AssertionCredentials(OAuth2Credentials): self._do_revoke(http_request, self.access_token) -if HAS_CRYPTO: - # PyOpenSSL and PyCrypto are not prerequisites for oauth2client, so if it is - # missing then don't create the SignedJwtAssertionCredentials or the - # verify_id_token() method. +def _RequireCryptoOrDie(): + """Ensure we have a crypto library, or throw CryptoUnavailableError. - class SignedJwtAssertionCredentials(AssertionCredentials): - """Credentials object used for OAuth 2.0 Signed JWT assertion grants. + The oauth2client.crypt module requires either PyCrypto or PyOpenSSL + to be available in order to function, but these are optional + dependencies. + """ + if not HAS_CRYPTO: + raise CryptoUnavailableError('No crypto library available') - This credential does not require a flow to instantiate because it represents - a two legged flow, and therefore has all of the required information to - generate and refresh its own access tokens. - SignedJwtAssertionCredentials requires either PyOpenSSL, or PyCrypto 2.6 or - later. For App Engine you may also consider using AppAssertionCredentials. - """ +class SignedJwtAssertionCredentials(AssertionCredentials): + """Credentials object used for OAuth 2.0 Signed JWT assertion grants. - MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds + This credential does not require a flow to instantiate because it + represents a two legged flow, and therefore has all of the required + information to generate and refresh its own access tokens. - @util.positional(4) - def __init__(self, - service_account_name, - private_key, - scope, - private_key_password='notasecret', - user_agent=None, - token_uri=GOOGLE_TOKEN_URI, - revoke_uri=GOOGLE_REVOKE_URI, - **kwargs): - """Constructor for SignedJwtAssertionCredentials. + SignedJwtAssertionCredentials requires either PyOpenSSL, or PyCrypto + 2.6 or later. For App Engine you may also consider using + AppAssertionCredentials. + """ - Args: - service_account_name: string, id for account, usually an email address. - private_key: string, private key in PKCS12 or PEM format. - scope: string or iterable of strings, scope(s) of the credentials being - requested. - private_key_password: string, password for private_key, unused if - private_key is in PEM format. - user_agent: string, HTTP User-Agent to provide for this application. - token_uri: string, URI for token endpoint. For convenience - defaults to Google's endpoints but any OAuth 2.0 provider can be used. - revoke_uri: string, URI for revoke endpoint. - kwargs: kwargs, Additional parameters to add to the JWT token, for - example sub=joe@xample.org.""" + MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds - super(SignedJwtAssertionCredentials, self).__init__( - None, - user_agent=user_agent, - token_uri=token_uri, - revoke_uri=revoke_uri, - ) - - self.scope = util.scopes_to_string(scope) - - # Keep base64 encoded so it can be stored in JSON. - self.private_key = base64.b64encode(private_key) - - self.private_key_password = private_key_password - self.service_account_name = service_account_name - self.kwargs = kwargs - - @classmethod - def from_json(cls, s): - data = simplejson.loads(s) - retval = SignedJwtAssertionCredentials( - data['service_account_name'], - base64.b64decode(data['private_key']), - data['scope'], - private_key_password=data['private_key_password'], - user_agent=data['user_agent'], - token_uri=data['token_uri'], - **data['kwargs'] - ) - retval.invalid = data['invalid'] - retval.access_token = data['access_token'] - return retval - - def _generate_assertion(self): - """Generate the assertion that will be used in the request.""" - now = long(time.time()) - payload = { - 'aud': self.token_uri, - 'scope': self.scope, - 'iat': now, - 'exp': now + SignedJwtAssertionCredentials.MAX_TOKEN_LIFETIME_SECS, - 'iss': self.service_account_name - } - payload.update(self.kwargs) - logger.debug(str(payload)) - - private_key = base64.b64decode(self.private_key) - return crypt.make_signed_jwt(crypt.Signer.from_string( - private_key, self.private_key_password), payload) - - # Only used in verify_id_token(), which is always calling to the same URI - # for the certs. - _cached_http = httplib2.Http(MemoryCache()) - - @util.positional(2) - def verify_id_token(id_token, audience, http=None, - cert_uri=ID_TOKEN_VERIFICATON_CERTS): - """Verifies a signed JWT id_token. - - This function requires PyOpenSSL and because of that it does not work on - App Engine. + @util.positional(4) + def __init__(self, + service_account_name, + private_key, + scope, + private_key_password='notasecret', + user_agent=None, + token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI, + **kwargs): + """Constructor for SignedJwtAssertionCredentials. Args: - id_token: string, A Signed JWT. - audience: string, The audience 'aud' that the token should be for. - http: httplib2.Http, instance to use to make the HTTP request. Callers - should supply an instance that has caching enabled. - cert_uri: string, URI of the certificates in JSON format to - verify the JWT against. - - Returns: - The deserialized JSON in the JWT. + service_account_name: string, id for account, usually an email address. + private_key: string, private key in PKCS12 or PEM format. + scope: string or iterable of strings, scope(s) of the credentials being + requested. + private_key_password: string, password for private_key, unused if + private_key is in PEM format. + user_agent: string, HTTP User-Agent to provide for this application. + token_uri: string, URI for token endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 provider can be used. + revoke_uri: string, URI for revoke endpoint. + kwargs: kwargs, Additional parameters to add to the JWT token, for + example sub=joe@xample.org. Raises: - oauth2client.crypt.AppIdentityError if the JWT fails to verify. + CryptoUnavailableError if no crypto library is available. """ - if http is None: - http = _cached_http + _RequireCryptoOrDie() + super(SignedJwtAssertionCredentials, self).__init__( + None, + user_agent=user_agent, + token_uri=token_uri, + revoke_uri=revoke_uri, + ) - resp, content = http.request(cert_uri) + self.scope = util.scopes_to_string(scope) - if resp.status == 200: - certs = simplejson.loads(content) - return crypt.verify_signed_jwt_with_certs(id_token, certs, audience) - else: - raise VerifyJwtTokenError('Status code: %d' % resp.status) + # Keep base64 encoded so it can be stored in JSON. + self.private_key = base64.b64encode(private_key) + + self.private_key_password = private_key_password + self.service_account_name = service_account_name + self.kwargs = kwargs + + @classmethod + def from_json(cls, s): + data = json.loads(s) + retval = SignedJwtAssertionCredentials( + data['service_account_name'], + base64.b64decode(data['private_key']), + data['scope'], + private_key_password=data['private_key_password'], + user_agent=data['user_agent'], + token_uri=data['token_uri'], + **data['kwargs'] + ) + retval.invalid = data['invalid'] + retval.access_token = data['access_token'] + return retval + + def _generate_assertion(self): + """Generate the assertion that will be used in the request.""" + now = long(time.time()) + payload = { + 'aud': self.token_uri, + 'scope': self.scope, + 'iat': now, + 'exp': now + SignedJwtAssertionCredentials.MAX_TOKEN_LIFETIME_SECS, + 'iss': self.service_account_name + } + payload.update(self.kwargs) + logger.debug(str(payload)) + + private_key = base64.b64decode(self.private_key) + return crypt.make_signed_jwt(crypt.Signer.from_string( + private_key, self.private_key_password), payload) + +# Only used in verify_id_token(), which is always calling to the same URI +# for the certs. +_cached_http = httplib2.Http(MemoryCache()) + +@util.positional(2) +def verify_id_token(id_token, audience, http=None, + cert_uri=ID_TOKEN_VERIFICATION_CERTS): + """Verifies a signed JWT id_token. + + This function requires PyOpenSSL and because of that it does not work on + App Engine. + + Args: + id_token: string, A Signed JWT. + audience: string, The audience 'aud' that the token should be for. + http: httplib2.Http, instance to use to make the HTTP request. Callers + should supply an instance that has caching enabled. + cert_uri: string, URI of the certificates in JSON format to + verify the JWT against. + + Returns: + The deserialized JSON in the JWT. + + Raises: + oauth2client.crypt.AppIdentityError: if the JWT fails to verify. + CryptoUnavailableError: if no crypto library is available. + """ + _RequireCryptoOrDie() + if http is None: + http = _cached_http + + resp, content = http.request(cert_uri) + + if resp.status == 200: + certs = json.loads(content) + return crypt.verify_signed_jwt_with_certs(id_token, certs, audience) + else: + raise VerifyJwtTokenError('Status code: %d' % resp.status) def _urlsafe_b64decode(b64string): @@ -1034,11 +1461,11 @@ def _extract_id_token(id_token): """ segments = id_token.split('.') - if (len(segments) != 3): + if len(segments) != 3: raise VerifyJwtTokenError( - 'Wrong number of segments in token: %s' % id_token) + 'Wrong number of segments in token: %s' % id_token) - return simplejson.loads(_urlsafe_b64decode(segments[1])) + return json.loads(_urlsafe_b64decode(segments[1])) def _parse_exchange_token_response(content): @@ -1056,11 +1483,11 @@ def _parse_exchange_token_response(content): """ resp = {} try: - resp = simplejson.loads(content) + resp = json.loads(content) except StandardError: # different JSON libs raise different exceptions, # so we just do a catch-all here - resp = dict(parse_qsl(content)) + resp = dict(urlparse.parse_qsl(content)) # some providers respond with 'expires', others with 'expires_in' if resp and 'expires' in resp: @@ -1074,7 +1501,8 @@ def credentials_from_code(client_id, client_secret, scope, code, redirect_uri='postmessage', http=None, user_agent=None, token_uri=GOOGLE_TOKEN_URI, auth_uri=GOOGLE_AUTH_URI, - revoke_uri=GOOGLE_REVOKE_URI): + revoke_uri=GOOGLE_REVOKE_URI, + device_uri=GOOGLE_DEVICE_URI): """Exchanges an authorization code for an OAuth2Credentials object. Args: @@ -1092,6 +1520,8 @@ def credentials_from_code(client_id, client_secret, scope, code, defaults to Google's endpoints but any OAuth 2.0 provider can be used. revoke_uri: string, URI for revoke endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. + device_uri: string, URI for device authorization endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 provider can be used. Returns: An OAuth2Credentials object. @@ -1103,7 +1533,7 @@ def credentials_from_code(client_id, client_secret, scope, code, flow = OAuth2WebServerFlow(client_id, client_secret, scope, redirect_uri=redirect_uri, user_agent=user_agent, auth_uri=auth_uri, token_uri=token_uri, - revoke_uri=revoke_uri) + revoke_uri=revoke_uri, device_uri=device_uri) credentials = flow.step2_exchange(code, http=http) return credentials @@ -1114,7 +1544,8 @@ def credentials_from_clientsecrets_and_code(filename, scope, code, message = None, redirect_uri='postmessage', http=None, - cache=None): + cache=None, + device_uri=None): """Returns OAuth2Credentials from a clientsecrets file and an auth code. Will create the right kind of Flow based on the contents of the clientsecrets @@ -1134,6 +1565,7 @@ def credentials_from_clientsecrets_and_code(filename, scope, code, http: httplib2.Http, optional http instance to use to do the fetch cache: An optional cache service client that implements get() and set() methods. See clientsecrets.loadfile() for details. + device_uri: string, OAuth 2.0 device authorization endpoint Returns: An OAuth2Credentials object. @@ -1146,11 +1578,49 @@ def credentials_from_clientsecrets_and_code(filename, scope, code, invalid. """ flow = flow_from_clientsecrets(filename, scope, message=message, cache=cache, - redirect_uri=redirect_uri) + redirect_uri=redirect_uri, + device_uri=device_uri) credentials = flow.step2_exchange(code, http=http) return credentials +class DeviceFlowInfo(collections.namedtuple('DeviceFlowInfo', ( + 'device_code', 'user_code', 'interval', 'verification_url', + 'user_code_expiry'))): + """Intermediate information the OAuth2 for devices flow.""" + + @classmethod + def FromResponse(cls, response): + """Create a DeviceFlowInfo from a server response. + + The response should be a dict containing entries as described + here: + http://tools.ietf.org/html/draft-ietf-oauth-v2-05#section-3.7.1 + """ + # device_code, user_code, and verification_url are required. + kwargs = { + 'device_code': response['device_code'], + 'user_code': response['user_code'], + } + # The response may list the verification address as either + # verification_url or verification_uri, so we check for both. + verification_url = response.get( + 'verification_url', response.get('verification_uri')) + if verification_url is None: + raise OAuth2DeviceCodeError( + 'No verification_url provided in server response') + kwargs['verification_url'] = verification_url + # expires_in and interval are optional. + kwargs.update({ + 'interval': response.get('interval'), + 'user_code_expiry': None, + }) + if 'expires_in' in response: + kwargs['user_code_expiry'] = datetime.datetime.now() + datetime.timedelta( + seconds=int(response['expires_in'])) + + return cls(**kwargs) + class OAuth2WebServerFlow(Flow): """Does the Web Server Flow for OAuth 2.0. @@ -1164,6 +1634,8 @@ class OAuth2WebServerFlow(Flow): auth_uri=GOOGLE_AUTH_URI, token_uri=GOOGLE_TOKEN_URI, revoke_uri=GOOGLE_REVOKE_URI, + login_hint=None, + device_uri=GOOGLE_DEVICE_URI, **kwargs): """Constructor for OAuth2WebServerFlow. @@ -1186,6 +1658,11 @@ class OAuth2WebServerFlow(Flow): defaults to Google's endpoints but any OAuth 2.0 provider can be used. revoke_uri: string, URI for revoke endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. + login_hint: string, Either an email address or domain. Passing this hint + will either pre-fill the email box on the sign-in form or select the + proper multi-login session, thereby simplifying the login flow. + device_uri: string, URI for device authorization endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 provider can be used. **kwargs: dict, The keyword arguments are all optional and required parameters for the OAuth calls. """ @@ -1193,10 +1670,12 @@ class OAuth2WebServerFlow(Flow): self.client_secret = client_secret self.scope = util.scopes_to_string(scope) self.redirect_uri = redirect_uri + self.login_hint = login_hint self.user_agent = user_agent self.auth_uri = auth_uri self.token_uri = token_uri self.revoke_uri = revoke_uri + self.device_uri = device_uri self.params = { 'access_type': 'offline', 'response_type': 'code', @@ -1217,8 +1696,9 @@ class OAuth2WebServerFlow(Flow): A URI as a string to redirect the user to begin the authorization flow. """ if redirect_uri is not None: - logger.warning(('The redirect_uri parameter for' - 'OAuth2WebServerFlow.step1_get_authorize_url is deprecated. Please' + logger.warning(( + 'The redirect_uri parameter for ' + 'OAuth2WebServerFlow.step1_get_authorize_url is deprecated. Please ' 'move to passing the redirect_uri in via the constructor.')) self.redirect_uri = redirect_uri @@ -1229,48 +1709,108 @@ class OAuth2WebServerFlow(Flow): 'client_id': self.client_id, 'redirect_uri': self.redirect_uri, 'scope': self.scope, - 'include_granted_scopes': 'true', - 'approval_prompt': 'force', } + if self.login_hint is not None: + query_params['login_hint'] = self.login_hint query_params.update(self.params) return _update_query_params(self.auth_uri, query_params) + @util.positional(1) + def step1_get_device_and_user_codes(self, http=None): + """Returns a user code and the verification URL where to enter it + + Returns: + A user code as a string for the user to authorize the application + An URL as a string where the user has to enter the code + """ + if self.device_uri is None: + raise ValueError('The value of device_uri must not be None.') + + body = urllib.urlencode({ + 'client_id': self.client_id, + 'scope': self.scope, + }) + headers = { + 'content-type': 'application/x-www-form-urlencoded', + } + + if self.user_agent is not None: + headers['user-agent'] = self.user_agent + + if http is None: + http = httplib2.Http() + + resp, content = http.request(self.device_uri, method='POST', body=body, + headers=headers) + if resp.status == 200: + try: + flow_info = json.loads(content) + except ValueError as e: + raise OAuth2DeviceCodeError( + 'Could not parse server response as JSON: "%s", error: "%s"' % ( + content, e)) + return DeviceFlowInfo.FromResponse(flow_info) + else: + error_msg = 'Invalid response %s.' % resp.status + try: + d = json.loads(content) + if 'error' in d: + error_msg += ' Error: %s' % d['error'] + except ValueError: + # Couldn't decode a JSON response, stick with the default message. + pass + raise OAuth2DeviceCodeError(error_msg) + @util.positional(2) - def step2_exchange(self, code, http=None): - """Exhanges a code for OAuth2Credentials. + def step2_exchange(self, code=None, http=None, device_flow_info=None): + """Exchanges a code for OAuth2Credentials. Args: - code: string or dict, either the code as a string, or a dictionary - of the query parameters to the redirect_uri, which contains - the code. - http: httplib2.Http, optional http instance to use to do the fetch + + code: string, dict or None. For a non-device flow, this is + either the response code as a string, or a dictionary of + query parameters to the redirect_uri. For a device flow, + this should be None. + http: httplib2.Http, optional http instance to use when fetching + credentials. + device_flow_info: DeviceFlowInfo, return value from step1 in the + case of a device flow. Returns: An OAuth2Credentials object that can be used to authorize requests. Raises: - FlowExchangeError if a problem occured exchanging the code for a - refresh_token. + FlowExchangeError: if a problem occured exchanging the code for a + refresh_token. + ValueError: if code and device_flow_info are both provided or both + missing. + """ + if code is None and device_flow_info is None: + raise ValueError('No code or device_flow_info provided.') + if code is not None and device_flow_info is not None: + raise ValueError('Cannot provide both code and device_flow_info.') - if not (isinstance(code, str) or isinstance(code, unicode)): + if code is None: + code = device_flow_info.device_code + elif isinstance(code, dict): if 'code' not in code: - if 'error' in code: - error_msg = code['error'] - else: - error_msg = 'No code was supplied in the query parameters.' - raise FlowExchangeError(error_msg) - else: - code = code['code'] + raise FlowExchangeError(code.get( + 'error', 'No code was supplied in the query parameters.')) + code = code['code'] - body = urllib.urlencode({ - 'grant_type': 'authorization_code', + post_data = { 'client_id': self.client_id, 'client_secret': self.client_secret, 'code': code, - 'redirect_uri': self.redirect_uri, # 'scope': self.scope, - }) + } + if device_flow_info is not None: + post_data['grant_type'] = 'http://oauth.net/grant_type/device/1.0' + else: + post_data['grant_type'] = 'authorization_code' + post_data['redirect_uri'] = self.redirect_uri + body = urllib.urlencode(post_data) headers = { 'content-type': 'application/x-www-form-urlencoded', } @@ -1287,6 +1827,10 @@ class OAuth2WebServerFlow(Flow): if resp.status == 200 and 'access_token' in d: access_token = d['access_token'] refresh_token = d.get('refresh_token', None) + if not refresh_token: + logger.info( + 'Received token response with no refresh_token. Consider ' + "reauthenticating with approval_prompt='force'.") token_expiry = None if 'expires_in' in d: token_expiry = datetime.datetime.utcnow() + datetime.timedelta( @@ -1303,7 +1847,7 @@ class OAuth2WebServerFlow(Flow): id_token=d.get('id_token', None), token_response=d) else: - logger.info('Failed to retrieve access token: %s' % content) + logger.info('Failed to retrieve access token: %s', content) if 'error' in d: # you never know what those providers got to say error_msg = unicode(d['error']) @@ -1314,7 +1858,8 @@ class OAuth2WebServerFlow(Flow): @util.positional(2) def flow_from_clientsecrets(filename, scope, redirect_uri=None, - message=None, cache=None): + message=None, cache=None, login_hint=None, + device_uri=None): """Create a Flow from a clientsecrets file. Will create the right kind of Flow based on the contents of the clientsecrets @@ -1332,6 +1877,11 @@ def flow_from_clientsecrets(filename, scope, redirect_uri=None, provided then clientsecrets.InvalidClientSecretsError will be raised. cache: An optional cache service client that implements get() and set() methods. See clientsecrets.loadfile() for details. + login_hint: string, Either an email address or domain. Passing this hint + will either pre-fill the email box on the sign-in form or select the + proper multi-login session, thereby simplifying the login flow. + device_uri: string, URI for device authorization endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 provider can be used. Returns: A Flow object. @@ -1348,10 +1898,13 @@ def flow_from_clientsecrets(filename, scope, redirect_uri=None, 'redirect_uri': redirect_uri, 'auth_uri': client_info['auth_uri'], 'token_uri': client_info['token_uri'], + 'login_hint': login_hint, } revoke_uri = client_info.get('revoke_uri') if revoke_uri is not None: constructor_kwargs['revoke_uri'] = revoke_uri + if device_uri is not None: + constructor_kwargs['device_uri'] = device_uri return OAuth2WebServerFlow( client_info['client_id'], client_info['client_secret'], scope, **constructor_kwargs) diff --git a/oauth2client/clientsecrets.py b/oauth2client/clientsecrets.py index ac99aae9..bfe51a65 100644 --- a/oauth2client/clientsecrets.py +++ b/oauth2client/clientsecrets.py @@ -1,4 +1,4 @@ -# Copyright (C) 2011 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -20,8 +20,8 @@ an OAuth 2.0 protected service. __author__ = 'jcgregorio@google.com (Joe Gregorio)' +import json -from anyjson import simplejson # Properties that make a client_secrets.json file valid. TYPE_WEB = 'web' @@ -87,12 +87,12 @@ def _validate_clientsecrets(obj): def load(fp): - obj = simplejson.load(fp) + obj = json.load(fp) return _validate_clientsecrets(obj) def loads(s): - obj = simplejson.loads(s) + obj = json.loads(s) return _validate_clientsecrets(obj) @@ -100,7 +100,7 @@ def _loadfile(filename): try: fp = file(filename, 'r') try: - obj = simplejson.load(fp) + obj = json.load(fp) finally: fp.close() except IOError: diff --git a/oauth2client/crypt.py b/oauth2client/crypt.py index 2d31815d..78112661 100644 --- a/oauth2client/crypt.py +++ b/oauth2client/crypt.py @@ -1,7 +1,6 @@ -#!/usr/bin/python2.4 # -*- coding: utf-8 -*- # -# Copyright (C) 2011 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,14 +13,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +"""Crypto-related routines for oauth2client.""" import base64 -import hashlib +import json import logging import time -from anyjson import simplejson - CLOCK_SKEW_SECS = 300 # 5 minutes in seconds AUTH_TOKEN_LIFETIME_SECS = 300 # 5 minutes in seconds @@ -38,7 +36,6 @@ class AppIdentityError(Exception): try: from OpenSSL import crypto - class OpenSSLVerifier(object): """Verifies the signature on a message.""" @@ -125,10 +122,11 @@ try: Raises: OpenSSL.crypto.Error if the key can't be parsed. """ - if key.startswith('-----BEGIN '): - pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key) + parsed_pem_key = _parse_pem_key(key) + if parsed_pem_key: + pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key) else: - pkey = crypto.load_pkcs12(key, password).get_privatekey() + pkey = crypto.load_pkcs12(key, password.encode('utf8')).get_privatekey() return OpenSSLSigner(pkey) except ImportError: @@ -140,6 +138,7 @@ try: from Crypto.PublicKey import RSA from Crypto.Hash import SHA256 from Crypto.Signature import PKCS1_v1_5 + from Crypto.Util.asn1 import DerSequence class PyCryptoVerifier(object): @@ -181,14 +180,15 @@ try: Returns: Verifier instance. - - Raises: - NotImplementedError if is_x509_cert is true. """ if is_x509_cert: - raise NotImplementedError( - 'X509 certs are not supported by the PyCrypto library. ' - 'Try using PyOpenSSL if native code is an option.') + pemLines = key_pem.replace(' ', '').split() + certDer = _urlsafe_b64decode(''.join(pemLines[1:-1])) + certSeq = DerSequence() + certSeq.decode(certDer) + tbsSeq = DerSequence() + tbsSeq.decode(certSeq[0]) + pubkey = RSA.importKey(tbsSeq[6]) else: pubkey = RSA.importKey(key_pem) return PyCryptoVerifier(pubkey) @@ -230,11 +230,12 @@ try: Raises: NotImplementedError if they key isn't in PEM format. """ - if key.startswith('-----BEGIN '): - pkey = RSA.importKey(key) + parsed_pem_key = _parse_pem_key(key) + if parsed_pem_key: + pkey = RSA.importKey(parsed_pem_key) else: raise NotImplementedError( - 'PKCS12 format is not supported by the PyCrpto library. ' + 'PKCS12 format is not supported by the PyCrypto library. ' 'Try converting to a "PEM" ' '(openssl pkcs12 -in xxxxx.p12 -nodes -nocerts > privatekey.pem) ' 'or using PyOpenSSL if native code is an option.') @@ -256,6 +257,23 @@ else: 'PyOpenSSL, or PyCrypto 2.6 or later') +def _parse_pem_key(raw_key_input): + """Identify and extract PEM keys. + + Determines whether the given key is in the format of PEM key, and extracts + the relevant part of the key if it is. + + Args: + raw_key_input: The contents of a private key file (either PEM or PKCS12). + + Returns: + string, The actual key if the contents are from a PEM file, or else None. + """ + offset = raw_key_input.find('-----BEGIN ') + if offset != -1: + return raw_key_input[offset:] + + def _urlsafe_b64encode(raw_bytes): return base64.urlsafe_b64encode(raw_bytes).rstrip('=') @@ -268,7 +286,7 @@ def _urlsafe_b64decode(b64string): def _json_encode(data): - return simplejson.dumps(data, separators = (',', ':')) + return json.dumps(data, separators=(',', ':')) def make_signed_jwt(signer, payload): @@ -286,8 +304,8 @@ def make_signed_jwt(signer, payload): header = {'typ': 'JWT', 'alg': 'RS256'} segments = [ - _urlsafe_b64encode(_json_encode(header)), - _urlsafe_b64encode(_json_encode(payload)), + _urlsafe_b64encode(_json_encode(header)), + _urlsafe_b64encode(_json_encode(payload)), ] signing_input = '.'.join(segments) @@ -318,9 +336,8 @@ def verify_signed_jwt_with_certs(jwt, certs, audience): """ segments = jwt.split('.') - if (len(segments) != 3): - raise AppIdentityError( - 'Wrong number of segments in token: %s' % jwt) + if len(segments) != 3: + raise AppIdentityError('Wrong number of segments in token: %s' % jwt) signed = '%s.%s' % (segments[0], segments[1]) signature = _urlsafe_b64decode(segments[2]) @@ -328,15 +345,15 @@ def verify_signed_jwt_with_certs(jwt, certs, audience): # Parse token. json_body = _urlsafe_b64decode(segments[1]) try: - parsed = simplejson.loads(json_body) + parsed = json.loads(json_body) except: raise AppIdentityError('Can\'t parse token: %s' % json_body) # Check signature. verified = False - for (keyname, pem) in certs.items(): + for _, pem in certs.items(): verifier = Verifier.from_string(pem, True) - if (verifier.verify(signed, signature)): + if verifier.verify(signed, signature): verified = True break if not verified: @@ -354,16 +371,15 @@ def verify_signed_jwt_with_certs(jwt, certs, audience): if exp is None: raise AppIdentityError('No exp field in token: %s' % json_body) if exp >= now + MAX_TOKEN_LIFETIME_SECS: - raise AppIdentityError( - 'exp field too far in future: %s' % json_body) + raise AppIdentityError('exp field too far in future: %s' % json_body) latest = exp + CLOCK_SKEW_SECS if now < earliest: raise AppIdentityError('Token used too early, %d < %d: %s' % - (now, earliest, json_body)) + (now, earliest, json_body)) if now > latest: raise AppIdentityError('Token used too late, %d > %d: %s' % - (now, latest, json_body)) + (now, latest, json_body)) # Check audience. if audience is not None: @@ -372,6 +388,6 @@ def verify_signed_jwt_with_certs(jwt, certs, audience): raise AppIdentityError('No aud field in token: %s' % json_body) if aud != audience: raise AppIdentityError('Wrong recipient, %s != %s: %s' % - (aud, audience, json_body)) + (aud, audience, json_body)) return parsed diff --git a/oauth2client/django_orm.py b/oauth2client/django_orm.py index d54d20c2..65c5d201 100644 --- a/oauth2client/django_orm.py +++ b/oauth2client/django_orm.py @@ -1,4 +1,4 @@ -# Copyright (C) 2010 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -116,14 +116,21 @@ class Storage(BaseStorage): credential.set_store(self) return credential - def locked_put(self, credentials): + def locked_put(self, credentials, overwrite=False): """Write a Credentials to the datastore. Args: credentials: Credentials, the credentials to store. + overwrite: Boolean, indicates whether you would like these credentials to + overwrite any existing stored credentials. """ args = {self.key_name: self.key_value} - entity = self.model_class(**args) + + if overwrite: + entity, unused_is_new = self.model_class.objects.get_or_create(**args) + else: + entity = self.model_class(**args) + setattr(entity, self.property_name, credentials) entity.save() diff --git a/oauth2client/file.py b/oauth2client/file.py index 7667f260..914c5090 100644 --- a/oauth2client/file.py +++ b/oauth2client/file.py @@ -1,4 +1,4 @@ -# Copyright (C) 2010 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,12 +21,10 @@ credentials. __author__ = 'jcgregorio@google.com (Joe Gregorio)' import os -import stat import threading -from anyjson import simplejson -from client import Storage as BaseStorage -from client import Credentials +from oauth2client.client import Credentials +from oauth2client.client import Storage as BaseStorage class CredentialsFileSymbolicLinkError(Exception): diff --git a/oauth2client/gce.py b/oauth2client/gce.py index c7fd7c18..5a3e8249 100644 --- a/oauth2client/gce.py +++ b/oauth2client/gce.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,12 +19,11 @@ Utilities for making it easier to use OAuth 2.0 on Google Compute Engine. __author__ = 'jcgregorio@google.com (Joe Gregorio)' -import httplib2 +import json import logging -import uritemplate +import urllib from oauth2client import util -from oauth2client.anyjson import simplejson from oauth2client.client import AccessTokenRefreshError from oauth2client.client import AssertionCredentials @@ -57,13 +56,14 @@ class AppAssertionCredentials(AssertionCredentials): requested. """ self.scope = util.scopes_to_string(scope) + self.kwargs = kwargs # Assertion type is no longer used, but still in the parent class signature. super(AppAssertionCredentials, self).__init__(None) @classmethod - def from_json(cls, json): - data = simplejson.loads(json) + def from_json(cls, json_data): + data = json.loads(json_data) return AppAssertionCredentials(data['scope']) def _refresh(self, http_request): @@ -78,13 +78,28 @@ class AppAssertionCredentials(AssertionCredentials): Raises: AccessTokenRefreshError: When the refresh fails. """ - uri = uritemplate.expand(META, {'scope': self.scope}) + query = '?scope=%s' % urllib.quote(self.scope, '') + uri = META.replace('{?scope}', query) response, content = http_request(uri) if response.status == 200: try: - d = simplejson.loads(content) - except StandardError, e: + d = json.loads(content) + except StandardError as e: raise AccessTokenRefreshError(str(e)) self.access_token = d['accessToken'] else: + if response.status == 404: + content += (' This can occur if a VM was created' + ' with no service account or scopes.') raise AccessTokenRefreshError(content) + + @property + def serialization_data(self): + raise NotImplementedError( + 'Cannot serialize credentials for GCE service accounts.') + + def create_scoped_required(self): + return not self.scope + + def create_scoped(self, scopes): + return AppAssertionCredentials(scopes, **self.kwargs) diff --git a/oauth2client/keyring_storage.py b/oauth2client/keyring_storage.py index efe2949c..cda1d9a3 100644 --- a/oauth2client/keyring_storage.py +++ b/oauth2client/keyring_storage.py @@ -1,4 +1,4 @@ -# Copyright (C) 2012 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,11 +19,12 @@ A Storage for Credentials that uses the keyring module. __author__ = 'jcgregorio@google.com (Joe Gregorio)' -import keyring import threading -from client import Storage as BaseStorage -from client import Credentials +import keyring + +from oauth2client.client import Credentials +from oauth2client.client import Storage as BaseStorage class Storage(BaseStorage): diff --git a/oauth2client/locked_file.py b/oauth2client/locked_file.py index 31514dcf..27fc1ea8 100644 --- a/oauth2client/locked_file.py +++ b/oauth2client/locked_file.py @@ -1,4 +1,4 @@ -# Copyright 2011 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -70,6 +70,7 @@ class _Opener(object): self._mode = mode self._fallback_mode = fallback_mode self._fh = None + self._lock_fd = None def is_locked(self): """Was the file locked.""" @@ -122,7 +123,7 @@ class _PosixOpener(_Opener): validate_file(self._filename) try: self._fh = open(self._filename, self._mode) - except IOError, e: + except IOError as e: # If we can't access with _mode, try _fallback_mode and don't lock. if e.errno == errno.EACCES: self._fh = open(self._filename, self._fallback_mode) @@ -137,12 +138,12 @@ class _PosixOpener(_Opener): self._locked = True break - except OSError, e: + except OSError as e: if e.errno != errno.EEXIST: raise if (time.time() - start_time) >= timeout: - logger.warn('Could not acquire lock %s in %s seconds' % ( - lock_filename, timeout)) + logger.warn('Could not acquire lock %s in %s seconds', + lock_filename, timeout) # Close the file and open in fallback_mode. if self._fh: self._fh.close() @@ -192,9 +193,9 @@ try: validate_file(self._filename) try: self._fh = open(self._filename, self._mode) - except IOError, e: + except IOError as e: # If we can't access with _mode, try _fallback_mode and don't lock. - if e.errno == errno.EACCES: + if e.errno in (errno.EPERM, errno.EACCES): self._fh = open(self._filename, self._fallback_mode) return @@ -204,7 +205,7 @@ try: fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX) self._locked = True return - except IOError, e: + except IOError as e: # If not retrying, then just pass on the error. if timeout == 0: raise e @@ -212,8 +213,8 @@ try: raise e # We could not acquire the lock. Try again. if (time.time() - start_time) >= timeout: - logger.warn('Could not lock %s in %s seconds' % ( - self._filename, timeout)) + logger.warn('Could not lock %s in %s seconds', + self._filename, timeout) if self._fh: self._fh.close() self._fh = open(self._filename, self._fallback_mode) @@ -267,7 +268,7 @@ try: validate_file(self._filename) try: self._fh = open(self._filename, self._mode) - except IOError, e: + except IOError as e: # If we can't access with _mode, try _fallback_mode and don't lock. if e.errno == errno.EACCES: self._fh = open(self._filename, self._fallback_mode) @@ -284,7 +285,7 @@ try: pywintypes.OVERLAPPED()) self._locked = True return - except pywintypes.error, e: + except pywintypes.error as e: if timeout == 0: raise e @@ -308,7 +309,7 @@ try: try: hfile = win32file._get_osfhandle(self._fh.fileno()) win32file.UnlockFileEx(hfile, 0, -0x10000, pywintypes.OVERLAPPED()) - except pywintypes.error, e: + except pywintypes.error as e: if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR: raise self._locked = False diff --git a/oauth2client/multistore_file.py b/oauth2client/multistore_file.py index ce7a5194..6d2ef577 100644 --- a/oauth2client/multistore_file.py +++ b/oauth2client/multistore_file.py @@ -1,4 +1,4 @@ -# Copyright 2011 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -43,17 +43,15 @@ The format of the stored data is like so: __author__ = 'jbeda@google.com (Joe Beda)' -import base64 -import errno +import json import logging import os import threading -from anyjson import simplejson -from oauth2client.client import Storage as BaseStorage from oauth2client.client import Credentials +from oauth2client.client import Storage as BaseStorage from oauth2client import util -from locked_file import LockedFile +from oauth2client.locked_file import LockedFile logger = logging.getLogger(__name__) @@ -286,7 +284,7 @@ class _MultiStore(object): if self._warn_on_readonly: logger.warn('The credentials file (%s) is not writable. Opening in ' 'read-only mode. Any refreshed credentials will only be ' - 'valid for this run.' % self._file.filename()) + 'valid for this run.', self._file.filename()) if os.path.getsize(self._file.filename()) == 0: logger.debug('Initializing empty multistore file') # The multistore is empty so write out an empty file. @@ -315,7 +313,7 @@ class _MultiStore(object): """ assert self._thread_lock.locked() self._file.file_handle().seek(0) - return simplejson.load(self._file.file_handle()) + return json.load(self._file.file_handle()) def _locked_json_write(self, data): """Write a JSON serializable data structure to the multistore. @@ -329,7 +327,7 @@ class _MultiStore(object): if self._read_only: return self._file.file_handle().seek(0) - simplejson.dump(data, self._file.file_handle(), sort_keys=True, indent=2) + json.dump(data, self._file.file_handle(), sort_keys=True, indent=2, separators=(',', ': ')) self._file.file_handle().truncate() def _refresh_data_cache(self): @@ -387,7 +385,7 @@ class _MultiStore(object): raw_key = cred_entry['key'] key = util.dict_to_tuple_key(raw_key) credential = None - credential = Credentials.new_from_json(simplejson.dumps(cred_entry['credential'])) + credential = Credentials.new_from_json(json.dumps(cred_entry['credential'])) return (key, credential) def _write(self): @@ -400,7 +398,7 @@ class _MultiStore(object): raw_data['data'] = raw_creds for (cred_key, cred) in self._data.items(): raw_key = dict(cred_key) - raw_cred = simplejson.loads(cred.to_json()) + raw_cred = json.loads(cred.to_json()) raw_creds.append({'key': raw_key, 'credential': raw_cred}) self._locked_json_write(raw_data) diff --git a/oauth2client/old_run.py b/oauth2client/old_run.py index da233584..c7383c37 100644 --- a/oauth2client/old_run.py +++ b/oauth2client/old_run.py @@ -1,4 +1,4 @@ -# Copyright (C) 2013 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -96,7 +96,7 @@ def run(flow, storage, http=None): try: httpd = ClientRedirectServer((FLAGS.auth_host_name, port), ClientRedirectHandler) - except socket.error, e: + except socket.error as e: pass else: success = True @@ -150,7 +150,7 @@ def run(flow, storage, http=None): try: credential = flow.step2_exchange(code, http=http) - except client.FlowExchangeError, e: + except client.FlowExchangeError as e: sys.exit('Authentication has failed: %s' % e) storage.put(credential) diff --git a/oauth2client/service_account.py b/oauth2client/service_account.py new file mode 100644 index 00000000..45d955b4 --- /dev/null +++ b/oauth2client/service_account.py @@ -0,0 +1,132 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A service account credentials class. + +This credentials class is implemented on top of rsa library. +""" + +import base64 +import json +import time + +from pyasn1.codec.ber import decoder +from pyasn1_modules.rfc5208 import PrivateKeyInfo +import rsa + +from oauth2client import GOOGLE_REVOKE_URI +from oauth2client import GOOGLE_TOKEN_URI +from oauth2client import util +from oauth2client.client import AssertionCredentials + + +class _ServiceAccountCredentials(AssertionCredentials): + """Class representing a service account (signed JWT) credential.""" + + MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds + + def __init__(self, service_account_id, service_account_email, private_key_id, + private_key_pkcs8_text, scopes, user_agent=None, + token_uri=GOOGLE_TOKEN_URI, revoke_uri=GOOGLE_REVOKE_URI, + **kwargs): + + super(_ServiceAccountCredentials, self).__init__( + None, user_agent=user_agent, token_uri=token_uri, revoke_uri=revoke_uri) + + self._service_account_id = service_account_id + self._service_account_email = service_account_email + self._private_key_id = private_key_id + self._private_key = _get_private_key(private_key_pkcs8_text) + self._private_key_pkcs8_text = private_key_pkcs8_text + self._scopes = util.scopes_to_string(scopes) + self._user_agent = user_agent + self._token_uri = token_uri + self._revoke_uri = revoke_uri + self._kwargs = kwargs + + def _generate_assertion(self): + """Generate the assertion that will be used in the request.""" + + header = { + 'alg': 'RS256', + 'typ': 'JWT', + 'kid': self._private_key_id + } + + now = long(time.time()) + payload = { + 'aud': self._token_uri, + 'scope': self._scopes, + 'iat': now, + 'exp': now + _ServiceAccountCredentials.MAX_TOKEN_LIFETIME_SECS, + 'iss': self._service_account_email + } + payload.update(self._kwargs) + + assertion_input = '%s.%s' % ( + _urlsafe_b64encode(header), + _urlsafe_b64encode(payload)) + + # Sign the assertion. + signature = base64.urlsafe_b64encode(rsa.pkcs1.sign( + assertion_input, self._private_key, 'SHA-256')).rstrip('=') + + return '%s.%s' % (assertion_input, signature) + + def sign_blob(self, blob): + return (self._private_key_id, + rsa.pkcs1.sign(blob, self._private_key, 'SHA-256')) + + @property + def service_account_email(self): + return self._service_account_email + + @property + def serialization_data(self): + return { + 'type': 'service_account', + 'client_id': self._service_account_id, + 'client_email': self._service_account_email, + 'private_key_id': self._private_key_id, + 'private_key': self._private_key_pkcs8_text + } + + def create_scoped_required(self): + return not self._scopes + + def create_scoped(self, scopes): + return _ServiceAccountCredentials(self._service_account_id, + self._service_account_email, + self._private_key_id, + self._private_key_pkcs8_text, + scopes, + user_agent=self._user_agent, + token_uri=self._token_uri, + revoke_uri=self._revoke_uri, + **self._kwargs) + + +def _urlsafe_b64encode(data): + return base64.urlsafe_b64encode( + json.dumps(data, separators=(',', ':')).encode('UTF-8')).rstrip('=') + + +def _get_private_key(private_key_pkcs8_text): + """Get an RSA private key object from a pkcs8 representation.""" + + der = rsa.pem.load_pem(private_key_pkcs8_text, 'PRIVATE KEY') + asn1_private_key, _ = decoder.decode(der, asn1Spec=PrivateKeyInfo()) + return rsa.PrivateKey.load_pkcs1( + asn1_private_key.getComponentByName('privateKey').asOctets(), + format='DER') diff --git a/oauth2client/tools.py b/oauth2client/tools.py index 4bca775c..f87e4df3 100644 --- a/oauth2client/tools.py +++ b/oauth2client/tools.py @@ -1,4 +1,4 @@ -# Copyright (C) 2013 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,24 +23,17 @@ __author__ = 'jcgregorio@google.com (Joe Gregorio)' __all__ = ['argparser', 'run_flow', 'run', 'message_if_missing'] -import BaseHTTPServer #import argparse -import httplib2 +import BaseHTTPServer import logging -import os import socket import sys +import urlparse import webbrowser from oauth2client import client -from oauth2client import file from oauth2client import util -try: - from urlparse import parse_qsl -except ImportError: - from cgi import parse_qsl - _CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0 To make this sample run you will need to populate the client_secrets.json file @@ -52,20 +45,20 @@ with information from the APIs Console . """ -# run_parser is an ArgumentParser that contains command-line options expected +# argparser is an ArgumentParser that contains command-line options expected # by tools.run(). Pass it in as part of the 'parents' argument to your own # ArgumentParser. #argparser = argparse.ArgumentParser(add_help=False) #argparser.add_argument('--auth_host_name', default='localhost', -# help='Hostname when running a local web server.') +# help='Hostname when running a local web server.') #argparser.add_argument('--noauth_local_webserver', action='store_true', -# default=False, help='Do not run a local web server.') +# default=False, help='Do not run a local web server.') #argparser.add_argument('--auth_host_port', default=[8080, 8090], type=int, -# nargs='*', help='Port web server should listen on.') +# nargs='*', help='Port web server should listen on.') #argparser.add_argument('--logging_level', default='ERROR', -# choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', -# 'CRITICAL'], -# help='Set the logging level of detail.') +# choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', +# 'CRITICAL'], +# help='Set the logging level of detail.') class ClientRedirectServer(BaseHTTPServer.HTTPServer): @@ -84,26 +77,25 @@ class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler): into the servers query_params and then stops serving. """ - def do_GET(s): + def do_GET(self): """Handle a GET request. Parses the query parameters and prints a message if the flow has completed. Note that we can't detect if an error occurred. """ - s.send_response(200) - s.send_header("Content-type", "text/html") - s.end_headers() - query = s.path.split('?', 1)[-1] - query = dict(parse_qsl(query)) - s.server.query_params = query - s.wfile.write("Authentication Status") - s.wfile.write("

The authentication flow has completed.

") - s.wfile.write("") + self.send_response(200) + self.send_header("Content-type", "text/html") + self.end_headers() + query = self.path.split('?', 1)[-1] + query = dict(urlparse.parse_qsl(query)) + self.server.query_params = query + self.wfile.write("Authentication Status") + self.wfile.write("

The authentication flow has completed.

") + self.wfile.write("") def log_message(self, format, *args): """Do not log messages to stdout while running as command line program.""" - pass @util.positional(3) @@ -141,7 +133,7 @@ def run_flow(flow, storage, flags, http=None): parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, - parents=[tools.run_parser]) + parents=[tools.argparser]) flags = parser.parse_args(argv) Args: @@ -163,7 +155,7 @@ def run_flow(flow, storage, flags, http=None): try: httpd = ClientRedirectServer((flags.auth_host_name, port), ClientRedirectHandler) - except socket.error, e: + except socket.error as e: pass else: success = True @@ -186,7 +178,7 @@ def run_flow(flow, storage, flags, http=None): authorize_url = flow.step1_get_authorize_url() if flags.short_url: - from apiclient.discovery import build + from googleapiclient.discovery import build service = build('urlshortener', 'v1', http=http) url_result = service.url().insert(body={'longUrl': authorize_url}).execute() authorize_url = url_result['id'] @@ -199,7 +191,6 @@ def run_flow(flow, storage, flags, http=None): print print 'If your browser is on a different machine then exit and re-run this' print 'after creating a file called nobrowser.txt in the same path as GAM.' -# print 'If your browser is on a different machine then exit and re-run this' # print 'application with the command-line parameter ' # print # print ' --noauth_local_webserver' @@ -225,7 +216,7 @@ def run_flow(flow, storage, flags, http=None): try: credential = flow.step2_exchange(code, http=http) - except client.FlowExchangeError, e: + except client.FlowExchangeError as e: sys.exit('Authentication has failed: %s' % e) storage.put(credential) @@ -241,8 +232,8 @@ def message_if_missing(filename): return _CLIENT_SECRETS_MESSAGE % filename try: - from old_run import run - from old_run import FLAGS + from oauth2client.old_run import run + from oauth2client.old_run import FLAGS except ImportError: def run(*args, **kwargs): raise NotImplementedError( diff --git a/oauth2client/util.py b/oauth2client/util.py index 90dff155..292f1df7 100644 --- a/oauth2client/util.py +++ b/oauth2client/util.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2010 Google Inc. +# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,14 +17,16 @@ """Common utility library.""" -__author__ = ['rafek@google.com (Rafe Kaplan)', - 'guido@google.com (Guido van Rossum)', +__author__ = [ + 'rafek@google.com (Rafe Kaplan)', + 'guido@google.com (Guido van Rossum)', ] + __all__ = [ - 'positional', - 'POSITIONAL_WARNING', - 'POSITIONAL_EXCEPTION', - 'POSITIONAL_IGNORE', + 'positional', + 'POSITIONAL_WARNING', + 'POSITIONAL_EXCEPTION', + 'POSITIONAL_IGNORE', ] import inspect @@ -33,11 +35,6 @@ import types import urllib import urlparse -try: - from urlparse import parse_qsl -except ImportError: - from cgi import parse_qsl - logger = logging.getLogger(__name__) POSITIONAL_WARNING = 'WARNING' @@ -190,7 +187,7 @@ def _add_query_parameter(url, name, value): return url else: parsed = list(urlparse.urlparse(url)) - q = dict(parse_qsl(parsed[4])) + q = dict(urlparse.parse_qsl(parsed[4])) q[name] = value parsed[4] = urllib.urlencode(q) return urlparse.urlunparse(parsed) diff --git a/oauth2client/xsrfutil.py b/oauth2client/xsrfutil.py index 7e1fe5c8..1dc5b5c6 100644 --- a/oauth2client/xsrfutil.py +++ b/oauth2client/xsrfutil.py @@ -1,6 +1,6 @@ #!/usr/bin/python2.5 # -# Copyright 2010 the Melange authors. +# Copyright 2014 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,14 +17,13 @@ """Helper methods for creating & verifying XSRF tokens.""" __authors__ = [ - '"Doug Coker" ', - '"Joe Gregorio" ', + '"Doug Coker" ', + '"Joe Gregorio" ', ] import base64 import hmac -import os # for urandom import time from oauth2client import util