upgrade googleapiclient and oauth2client versions

This commit is contained in:
Jay Lee
2014-11-19 09:22:13 -05:00
parent 71ade81064
commit 0ca14a918b
26 changed files with 5139 additions and 385 deletions

80
gam.py
View File

@ -31,10 +31,10 @@ import sys, os, time, datetime, random, socket, csv, platform, re, calendar, bas
import json import json
import httplib2 import httplib2
import apiclient import googleapiclient
import apiclient.discovery import googleapiclient.discovery
import apiclient.errors import googleapiclient.errors
import apiclient.http import googleapiclient.http
import oauth2client.client import oauth2client.client
import oauth2client.file import oauth2client.file
import oauth2client.tools import oauth2client.tools
@ -148,7 +148,7 @@ def getGamPath():
def doGAMVersion(): def doGAMVersion():
import struct import struct
print u'Dito GAM %s - http://git.io/gam\n%s\nPython %s.%s.%s %s-bit %s\ngoogle-api-python-client %s\n%s %s\nPath: %s' % (__version__, __author__, print u'Dito GAM %s - http://git.io/gam\n%s\nPython %s.%s.%s %s-bit %s\ngoogle-api-python-client %s\n%s %s\nPath: %s' % (__version__, __author__,
sys.version_info[0], sys.version_info[1], sys.version_info[2], struct.calcsize('P')*8, sys.version_info[3], apiclient.__version__, sys.version_info[0], sys.version_info[1], sys.version_info[2], struct.calcsize('P')*8, sys.version_info[3], googleapiclient.__version__,
platform.platform(), platform.machine(), getGamPath()) platform.platform(), platform.machine(), getGamPath())
def doGAMCheckForUpdates(): def doGAMCheckForUpdates():
@ -368,7 +368,7 @@ def callGAPI(service, function, silent_errors=False, soft_errors=False, throw_re
for n in range(1, retries+1): for n in range(1, retries+1):
try: try:
return method(prettyPrint=prettyPrint, **kwargs).execute() return method(prettyPrint=prettyPrint, **kwargs).execute()
except apiclient.errors.HttpError, e: except googleapiclient.errors.HttpError, e:
try: try:
error = json.loads(e.content) error = json.loads(e.content)
except ValueError: except ValueError:
@ -542,14 +542,14 @@ def buildGAPIObject(api):
if api in [u'directory', u'reports']: if api in [u'directory', u'reports']:
api = u'admin' api = u'admin'
try: try:
service = apiclient.discovery.build(api, version, http=http) service = googleapiclient.discovery.build(api, version, http=http)
except apiclient.errors.UnknownApiNameOrVersion: except googleapiclient.errors.UnknownApiNameOrVersion:
disc_file = getGamPath()+u'%s-%s.json' % (api, version) disc_file = getGamPath()+u'%s-%s.json' % (api, version)
if os.path.isfile(disc_file): if os.path.isfile(disc_file):
f = file(disc_file, 'rb') f = file(disc_file, 'rb')
discovery = f.read() discovery = f.read()
f.close() f.close()
service = apiclient.discovery.build_from_document(discovery, base=u'https://www.googleapis.com', http=http) service = googleapiclient.discovery.build_from_document(discovery, base=u'https://www.googleapis.com', http=http)
else: else:
raise raise
except httplib2.CertificateValidationUnsupported: except httplib2.CertificateValidationUnsupported:
@ -619,7 +619,7 @@ def buildGAPIServiceObject(api, act_as=None):
http = credentials.authorize(http) http = credentials.authorize(http)
version = getAPIVer(api) version = getAPIVer(api)
try: try:
return apiclient.discovery.build(api, version, http=http) return googleapiclient.discovery.build(api, version, http=http)
except oauth2client.client.AccessTokenRefreshError, e: except oauth2client.client.AccessTokenRefreshError, e:
if e.message == u'access_denied': if e.message == u'access_denied':
print u'Error: Access Denied. Please make sure the Client Name:\n\n%s\n\nis authorized for the API Scope(s):\n\n%s\n\nThis can be configured in your Control Panel under:\n\nSecurity -->\nAdvanced Settings -->\nManage third party OAuth Client access' % (SERVICE_ACCOUNT_CLIENT_ID, ','.join(scope)) print u'Error: Access Denied. Please make sure the Client Name:\n\n%s\n\nis authorized for the API Scope(s):\n\n%s\n\nThis can be configured in your Control Panel under:\n\nSecurity -->\nAdvanced Settings -->\nManage third party OAuth Client access' % (SERVICE_ACCOUNT_CLIENT_ID, ','.join(scope))
@ -643,10 +643,10 @@ def buildDiscoveryObject(api):
if not os.path.isfile(getGamPath()+u'nocache.txt'): if not os.path.isfile(getGamPath()+u'nocache.txt'):
cache = u'%sgamcache' % getGamPath() cache = u'%sgamcache' % getGamPath()
http = httplib2.Http(ca_certs=getGamPath()+u'cacert.pem', disable_ssl_certificate_validation=disable_ssl_certificate_validation, cache=cache) http = httplib2.Http(ca_certs=getGamPath()+u'cacert.pem', disable_ssl_certificate_validation=disable_ssl_certificate_validation, cache=cache)
requested_url = uritemplate.expand(apiclient.discovery.DISCOVERY_URI, params) requested_url = uritemplate.expand(googleapiclient.discovery.DISCOVERY_URI, params)
resp, content = http.request(requested_url) resp, content = http.request(requested_url)
if resp.status == 404: if resp.status == 404:
raise apiclient.errors.UnknownApiNameOrVersion("name: %s version: %s" % (api, version)) raise googleapiclient.errors.UnknownApiNameOrVersion("name: %s version: %s" % (api, version))
if resp.status >= 400: if resp.status >= 400:
raise HttpError(resp, content, uri=requested_url) raise HttpError(resp, content, uri=requested_url)
try: try:
@ -766,7 +766,7 @@ def showReport():
page_message = u'Got %%num_items%% users\n' page_message = u'Got %%num_items%% users\n'
usage = callGAPIpages(service=rep.userUsageReport(), function=u'get', items=u'usageReports', page_message=page_message, throw_reasons=[u'invalid'], date=str(try_date), userKey=userKey, customerId=customerId, filters=filters, parameters=parameters) usage = callGAPIpages(service=rep.userUsageReport(), function=u'get', items=u'usageReports', page_message=page_message, throw_reasons=[u'invalid'], date=str(try_date), userKey=userKey, customerId=customerId, filters=filters, parameters=parameters)
break break
except apiclient.errors.HttpError, e: except googleapiclient.errors.HttpError, e:
error = json.loads(e.content) error = json.loads(e.content)
try: try:
message = error[u'error'][u'errors'][0][u'message'] message = error[u'error'][u'errors'][0][u'message']
@ -803,7 +803,7 @@ def showReport():
try: try:
usage = callGAPIpages(service=rep.customerUsageReports(), function=u'get', items=u'usageReports', throw_reasons=[u'invalid'], customerId=customerId, date=str(try_date), parameters=parameters) usage = callGAPIpages(service=rep.customerUsageReports(), function=u'get', items=u'usageReports', throw_reasons=[u'invalid'], customerId=customerId, date=str(try_date), parameters=parameters)
break break
except apiclient.errors.HttpError, e: except googleapiclient.errors.HttpError, e:
error = json.loads(e.content) error = json.loads(e.content)
try: try:
message = error[u'error'][u'errors'][0][u'message'] message = error[u'error'][u'errors'][0][u'message']
@ -1543,7 +1543,7 @@ def getPhoto(users):
i += 1 i += 1
try: try:
photo = callGAPI(service=cd.users().photos(), function=u'get', throw_reasons=[u'notFound'], userKey=user) photo = callGAPI(service=cd.users().photos(), function=u'get', throw_reasons=[u'notFound'], userKey=user)
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
print u' no photo for %s' % user print u' no photo for %s' % user
continue continue
try: try:
@ -2128,7 +2128,7 @@ def doUpdateDriveFile(users):
if drivefilename: if drivefilename:
fileIds = doDriveSearch(drive, query=u'"me" in owners and title = "%s"' % drivefilename) fileIds = doDriveSearch(drive, query=u'"me" in owners and title = "%s"' % drivefilename)
if local_filepath: if local_filepath:
media_body = apiclient.http.MediaFileUpload(local_filepath, mimetype=mimetype, resumable=True) media_body = googleapiclient.http.MediaFileUpload(local_filepath, mimetype=mimetype, resumable=True)
for fileId in fileIds: for fileId in fileIds:
if operation == u'update': if operation == u'update':
if media_body: if media_body:
@ -2243,7 +2243,7 @@ def createDriveFile(users):
for a_parent in more_parents: for a_parent in more_parents:
body[u'parents'].append({u'id': a_parent}) body[u'parents'].append({u'id': a_parent})
if local_filepath: if local_filepath:
media_body = apiclient.http.MediaFileUpload(local_filepath, mimetype=mimetype, resumable=True) media_body = googleapiclient.http.MediaFileUpload(local_filepath, mimetype=mimetype, resumable=True)
result = callGAPI(service=drive.files(), function=u'insert', convert=convert, ocr=ocr, ocrLanguage=ocrLanguage, media_body=media_body, body=body, fields='id') result = callGAPI(service=drive.files(), function=u'insert', convert=convert, ocr=ocr, ocrLanguage=ocrLanguage, media_body=media_body, body=body, fields='id')
try: try:
print u'Successfully uploaded %s to Drive file ID %s' % (local_filename, result[u'id']) print u'Successfully uploaded %s to Drive file ID %s' % (local_filename, result[u'id'])
@ -2886,14 +2886,14 @@ def doDeleteLabel(users):
continue continue
del_me_count = len(del_labels) del_me_count = len(del_labels)
i = 1 i = 1
dbatch = apiclient.http.BatchHttpRequest() dbatch = googleapiclient.http.BatchHttpRequest()
for del_me in del_labels: for del_me in del_labels:
print u' deleting label %s (%s/%s)' % (del_me[u'name'], i, del_me_count) print u' deleting label %s (%s/%s)' % (del_me[u'name'], i, del_me_count)
i += 1 i += 1
dbatch.add(gmail.users().labels().delete(userId=user, id=del_me[u'id']), callback=label_del_result) dbatch.add(gmail.users().labels().delete(userId=user, id=del_me[u'id']), callback=label_del_result)
if len(dbatch._order) == 25: if len(dbatch._order) == 25:
dbatch.execute() dbatch.execute()
dbatch = apiclient.http.BatchHttpRequest() dbatch = googleapiclient.http.BatchHttpRequest()
if len(dbatch._order) > 0: if len(dbatch._order) > 0:
dbatch.execute() dbatch.execute()
@ -2993,7 +2993,7 @@ def renameLabels(users):
print u' Renaming "%s" to "%s"' % (label[u'name'], new_label_name) print u' Renaming "%s" to "%s"' % (label[u'name'], new_label_name)
try: try:
callGAPI(service=gmail.users().labels(), function=u'patch', soft_errors=True, throw_reasons=[u'aborted'], id=label[u'id'], userId=user, body={u'name': new_label_name}) callGAPI(service=gmail.users().labels(), function=u'patch', soft_errors=True, throw_reasons=[u'aborted'], id=label[u'id'], userId=user, body={u'name': new_label_name})
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
if merge: if merge:
print u' Merging %s label to existing %s label' % (label[u'name'], new_label_name) print u' Merging %s label to existing %s label' % (label[u'name'], new_label_name)
q = u'label:"%s"' % label[u'name'] q = u'label:"%s"' % label[u'name']
@ -3826,7 +3826,7 @@ def doCreateAlias():
elif target_type == u'target': elif target_type == u'target':
try: try:
callGAPI(service=cd.users().aliases(), function=u'insert', throw_reasons=[u'invalid'], userKey=targetKey, body=body) callGAPI(service=cd.users().aliases(), function=u'insert', throw_reasons=[u'invalid'], userKey=targetKey, body=body)
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
callGAPI(service=cd.groups().aliases(), function=u'insert', groupKey=targetKey, body=body) callGAPI(service=cd.groups().aliases(), function=u'insert', groupKey=targetKey, body=body)
def doCreateOrg(): def doCreateOrg():
@ -4310,7 +4310,7 @@ def doUpdateGroup():
print u'added %s to group' % result[u'email'] print u'added %s to group' % result[u'email']
except TypeError: except TypeError:
pass pass
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
pass pass
elif sys.argv[4].lower() == u'sync': elif sys.argv[4].lower() == u'sync':
role = sys.argv[5].upper() role = sys.argv[5].upper()
@ -4328,7 +4328,7 @@ def doUpdateGroup():
sys.stderr.write(u' adding %s %s\n' % (role, user_email)) sys.stderr.write(u' adding %s %s\n' % (role, user_email))
try: try:
result = callGAPI(service=cd.members(), function=u'insert', soft_errors=True, throw_reasons=[u'duplicate'], groupKey=group, body={u'email': user_email, u'role': role}) result = callGAPI(service=cd.members(), function=u'insert', soft_errors=True, throw_reasons=[u'duplicate'], groupKey=group, body={u'email': user_email, u'role': role})
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
result = callGAPI(service=cd.members(), function=u'update', soft_errors=True, groupKey=group, memberKey=user_email, body={u'email': user_email, u'role': role}) result = callGAPI(service=cd.members(), function=u'update', soft_errors=True, groupKey=group, memberKey=user_email, body={u'email': user_email, u'role': role})
for user_email in to_remove: for user_email in to_remove:
sys.stderr.write(u' removing %s\n' % user_email) sys.stderr.write(u' removing %s\n' % user_email)
@ -4435,7 +4435,7 @@ def doUpdateAlias():
target_email = u'%s@%s' % (target_email, domain) target_email = u'%s@%s' % (target_email, domain)
try: try:
callGAPI(service=cd.users().aliases(), function=u'delete', throw_reasons=[u'invalid'], userKey=alias, alias=alias) callGAPI(service=cd.users().aliases(), function=u'delete', throw_reasons=[u'invalid'], userKey=alias, alias=alias)
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
callGAPI(service=cd.groups().aliases(), function=u'delete', groupKey=alias, alias=alias) callGAPI(service=cd.groups().aliases(), function=u'delete', groupKey=alias, alias=alias)
if target_type == u'user': if target_type == u'user':
callGAPI(service=cd.users().aliases(), function=u'insert', userKey=target_email, body={u'alias': alias}) callGAPI(service=cd.users().aliases(), function=u'insert', userKey=target_email, body={u'alias': alias})
@ -4444,7 +4444,7 @@ def doUpdateAlias():
elif target_type == u'target': elif target_type == u'target':
try: try:
callGAPI(service=cd.users().aliases(), function=u'insert', throw_reasons=[u'invalid'], userKey=target_email, body={u'alias': alias}) callGAPI(service=cd.users().aliases(), function=u'insert', throw_reasons=[u'invalid'], userKey=target_email, body={u'alias': alias})
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
callGAPI(service=cd.groups().aliases(), function=u'insert', groupKey=target_email, body={u'alias': alias}) callGAPI(service=cd.groups().aliases(), function=u'insert', groupKey=target_email, body={u'alias': alias})
print u'updated alias %s' % alias print u'updated alias %s' % alias
@ -4579,7 +4579,7 @@ def doUpdateOrg():
sys.stderr.write(u' moving %s to %s (%s/%s)\n' % (user, orgUnitPath, current_user, user_count)) sys.stderr.write(u' moving %s to %s (%s/%s)\n' % (user, orgUnitPath, current_user, user_count))
try: try:
callGAPI(service=cd.users(), function=u'patch', throw_reasons=[u'conditionNotMet'], userKey=user, body={u'orgUnitPath': orgUnitPath}) callGAPI(service=cd.users(), function=u'patch', throw_reasons=[u'conditionNotMet'], userKey=user, body={u'orgUnitPath': orgUnitPath})
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
pass pass
current_user += 1 current_user += 1
else: else:
@ -4622,12 +4622,12 @@ def doWhatIs():
sys.stderr.write(u'%s is a user alias\n\n' % email) sys.stderr.write(u'%s is a user alias\n\n' % email)
doGetAliasInfo(alias_email=email) doGetAliasInfo(alias_email=email)
return return
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
sys.stderr.write(u'%s is not a user...\n' % email) sys.stderr.write(u'%s is not a user...\n' % email)
sys.stderr.write(u'%s is not a user alias...\n' % email) sys.stderr.write(u'%s is not a user alias...\n' % email)
try: try:
group = callGAPI(service=cd.groups(), function=u'get', throw_reasons=[u'notFound', u'badRequest'], groupKey=email, fields=u'email') group = callGAPI(service=cd.groups(), function=u'get', throw_reasons=[u'notFound', u'badRequest'], groupKey=email, fields=u'email')
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
sys.stderr.write(u'%s is not a group either!\n\nDoesn\'t seem to exist!\n\n' % email) sys.stderr.write(u'%s is not a group either!\n\nDoesn\'t seem to exist!\n\n' % email)
sys.exit(1) sys.exit(1)
if group[u'email'].lower() == email.lower(): if group[u'email'].lower() == email.lower():
@ -4826,7 +4826,7 @@ def doGetUserInfo(user_email=None):
productId, skuId = getProductAndSKU(sku) productId, skuId = getProductAndSKU(sku)
try: try:
result = callGAPI(service=lic.licenseAssignments(), function=u'get', throw_reasons=['notFound'], userId=user_email, productId=productId, skuId=skuId) result = callGAPI(service=lic.licenseAssignments(), function=u'get', throw_reasons=['notFound'], userId=user_email, productId=productId, skuId=skuId)
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
continue continue
print u' %s' % result[u'skuId'] print u' %s' % result[u'skuId']
@ -4848,7 +4848,7 @@ def doGetGroupInfo(group_name=None):
basic_info = callGAPI(service=cd.groups(), function=u'get', groupKey=group_name) basic_info = callGAPI(service=cd.groups(), function=u'get', groupKey=group_name)
try: try:
settings = callGAPI(service=gs.groups(), function=u'get', retry_reasons=[u'serviceLimit'], groupUniqueId=basic_info[u'email'], throw_reasons=u'authError') # Use email address retrieved from cd since GS API doesn't support uid settings = callGAPI(service=gs.groups(), function=u'get', retry_reasons=[u'serviceLimit'], groupUniqueId=basic_info[u'email'], throw_reasons=u'authError') # Use email address retrieved from cd since GS API doesn't support uid
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
pass pass
print u'' print u''
print u'Group Settings:' print u'Group Settings:'
@ -4894,7 +4894,7 @@ def doGetAliasInfo(alias_email=None):
alias_email = u'%s@%s' % (alias_email, domain) alias_email = u'%s@%s' % (alias_email, domain)
try: try:
result = callGAPI(service=cd.users(), function=u'get', throw_reasons=[u'invalid', u'badRequest'], userKey=alias_email) result = callGAPI(service=cd.users(), function=u'get', throw_reasons=[u'invalid', u'badRequest'], userKey=alias_email)
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
result = callGAPI(service=cd.groups(), function=u'get', groupKey=alias_email) result = callGAPI(service=cd.groups(), function=u'get', groupKey=alias_email)
print u' Alias Email: %s' % alias_email print u' Alias Email: %s' % alias_email
try: try:
@ -5066,7 +5066,7 @@ def doSiteVerifyAttempt():
body = {u'site':{u'type':verify_type, u'identifier':identifier}, u'verificationMethod':verificationMethod} body = {u'site':{u'type':verify_type, u'identifier':identifier}, u'verificationMethod':verificationMethod}
try: try:
verify_result = callGAPI(service=verif.webResource(), function=u'insert', throw_reasons=[u'badRequest'], verificationMethod=verificationMethod, body=body) verify_result = callGAPI(service=verif.webResource(), function=u'insert', throw_reasons=[u'badRequest'], verificationMethod=verificationMethod, body=body)
except apiclient.errors.HttpError, e: except googleapiclient.errors.HttpError, e:
error = json.loads(e.content) error = json.loads(e.content)
message = error[u'error'][u'errors'][0][u'message'] message = error[u'error'][u'errors'][0][u'message']
print u'ERROR: %s' % message print u'ERROR: %s' % message
@ -5223,7 +5223,7 @@ def doGetBackupCodes(users):
for user in users: for user in users:
try: try:
codes = callGAPI(service=cd.verificationCodes(), function=u'list', throw_reasons=[u'invalidArgument', u'invalid'], userKey=user) codes = callGAPI(service=cd.verificationCodes(), function=u'list', throw_reasons=[u'invalidArgument', u'invalid'], userKey=user)
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
codes = dict() codes = dict()
codes[u'items'] = list() codes[u'items'] = list()
print u'Backup verification codes for %s' % user print u'Backup verification codes for %s' % user
@ -5262,7 +5262,7 @@ def doDelBackupCodes(users):
for user in users: for user in users:
try: try:
codes = callGAPI(service=cd.verificationCodes(), function=u'invalidate', soft_errors=True, throw_reasons=[u'invalid',], userKey=user) codes = callGAPI(service=cd.verificationCodes(), function=u'invalidate', soft_errors=True, throw_reasons=[u'invalid',], userKey=user)
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
print u'No 2SV backup codes for %s' % user print u'No 2SV backup codes for %s' % user
continue continue
print u'2SV backup codes for %s invalidated' % user print u'2SV backup codes for %s invalidated' % user
@ -5288,7 +5288,7 @@ def doGetTokens(users):
for user in users: for user in users:
try: try:
token = callGAPI(service=cd.tokens(), function=u'get', throw_reasons=[u'notFound',], userKey=user, clientId=clientId, fields=u'clientId') token = callGAPI(service=cd.tokens(), function=u'get', throw_reasons=[u'notFound',], userKey=user, clientId=clientId, fields=u'clientId')
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
continue continue
print u'%s has allowed this token' % user print u'%s has allowed this token' % user
return return
@ -5339,7 +5339,7 @@ def doDeprovUser(users):
print u'Invalidating 2SV Backup Codes for %s' % user print u'Invalidating 2SV Backup Codes for %s' % user
try: try:
codes = callGAPI(service=cd.verificationCodes(), function=u'invalidate', soft_errors=True, throw_reasons=[u'invalid'], userKey=user) codes = callGAPI(service=cd.verificationCodes(), function=u'invalidate', soft_errors=True, throw_reasons=[u'invalid'], userKey=user)
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
print u'No 2SV Backup Codes' print u'No 2SV Backup Codes'
print u'Getting tokens for %s...' % user print u'Getting tokens for %s...' % user
tokens = callGAPI(service=cd.tokens(), function=u'list', userKey=user, fields=u'items/clientId') tokens = callGAPI(service=cd.tokens(), function=u'list', userKey=user, fields=u'items/clientId')
@ -5673,7 +5673,7 @@ def doDeleteAlias(alias_email=None):
try: try:
callGAPI(service=cd.users().aliases(), function=u'delete', throw_reasons=[u'invalid', u'badRequest', u'notFound'], userKey=alias_email, alias=alias_email) callGAPI(service=cd.users().aliases(), function=u'delete', throw_reasons=[u'invalid', u'badRequest', u'notFound'], userKey=alias_email, alias=alias_email)
return return
except apiclient.errors.HttpError, e: except googleapiclient.errors.HttpError, e:
error = json.loads(e.content) error = json.loads(e.content)
reason = error[u'error'][u'errors'][0][u'reason'] reason = error[u'error'][u'errors'][0][u'reason']
if reason == u'notFound': if reason == u'notFound':
@ -5715,7 +5715,7 @@ def output_csv(csv_list, titles, list_type, todrive):
convert = False convert = False
drive = buildGAPIObject(u'drive') drive = buildGAPIObject(u'drive')
string_data = string_file.getvalue() string_data = string_file.getvalue()
media = apiclient.http.MediaInMemoryUpload(string_data, mimetype=u'text/csv') media = googleapiclient.http.MediaInMemoryUpload(string_data, mimetype=u'text/csv')
result = callGAPI(service=drive.files(), function=u'insert', convert=convert, body={u'description': u' '.join(sys.argv), u'title': u'%s - %s' % (domain, list_type), u'mimeType': u'text/csv'}, media_body=media) result = callGAPI(service=drive.files(), function=u'insert', convert=convert, body={u'description': u' '.join(sys.argv), u'title': u'%s - %s' % (domain, list_type), u'mimeType': u'text/csv'}, media_body=media)
file_url = result[u'alternateLink'] file_url = result[u'alternateLink']
if os.path.isfile(getGamPath()+u'nobrowser.txt'): if os.path.isfile(getGamPath()+u'nobrowser.txt'):
@ -6378,14 +6378,14 @@ def doPrintLicenses(return_list=False):
page_message = u'Got %%%%total_items%%%% Licenses for %s...\n' % sku page_message = u'Got %%%%total_items%%%% Licenses for %s...\n' % sku
try: try:
licenses += callGAPIpages(service=lic.licenseAssignments(), function=u'listForProductAndSku', throw_reasons=[u'invalid', u'forbidden'], page_message=page_message, customerId=domain, productId=product, skuId=sku, fields=u'items(productId,skuId,userId),nextPageToken') licenses += callGAPIpages(service=lic.licenseAssignments(), function=u'listForProductAndSku', throw_reasons=[u'invalid', u'forbidden'], page_message=page_message, customerId=domain, productId=product, skuId=sku, fields=u'items(productId,skuId,userId),nextPageToken')
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
licenses += [] licenses += []
else: else:
for productId in products: for productId in products:
page_message = u'Got %%%%total_items%%%% Licenses for %s...\n' % productId page_message = u'Got %%%%total_items%%%% Licenses for %s...\n' % productId
try: try:
licenses += callGAPIpages(service=lic.licenseAssignments(), function=u'listForProduct', throw_reasons=[u'invalid', u'forbidden'], page_message=page_message, customerId=domain, productId=productId, fields=u'items(productId,skuId,userId),nextPageToken') licenses += callGAPIpages(service=lic.licenseAssignments(), function=u'listForProduct', throw_reasons=[u'invalid', u'forbidden'], page_message=page_message, customerId=domain, productId=productId, fields=u'items(productId,skuId,userId),nextPageToken')
except apiclient.errors.HttpError: except googleapiclient.errors.HttpError:
licenses = +[] licenses = +[]
for license in licenses: for license in licenses:
a_license = dict() a_license = dict()

View File

@ -0,0 +1,15 @@
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "1.3.1"

285
googleapiclient/channel.py Normal file
View File

@ -0,0 +1,285 @@
"""Channel notifications support.
Classes and functions to support channel subscriptions and notifications
on those channels.
Notes:
- This code is based on experimental APIs and is subject to change.
- Notification does not do deduplication of notification ids, that's up to
the receiver.
- Storing the Channel between calls is up to the caller.
Example setting up a channel:
# Create a new channel that gets notifications via webhook.
channel = new_webhook_channel("https://example.com/my_web_hook")
# Store the channel, keyed by 'channel.id'. Store it before calling the
# watch method because notifications may start arriving before the watch
# method returns.
...
resp = service.objects().watchAll(
bucket="some_bucket_id", body=channel.body()).execute()
channel.update(resp)
# Store the channel, keyed by 'channel.id'. Store it after being updated
# since the resource_id value will now be correct, and that's needed to
# stop a subscription.
...
An example Webhook implementation using webapp2. Note that webapp2 puts
headers in a case insensitive dictionary, as headers aren't guaranteed to
always be upper case.
id = self.request.headers[X_GOOG_CHANNEL_ID]
# Retrieve the channel by id.
channel = ...
# Parse notification from the headers, including validating the id.
n = notification_from_headers(channel, self.request.headers)
# Do app specific stuff with the notification here.
if n.resource_state == 'sync':
# Code to handle sync state.
elif n.resource_state == 'exists':
# Code to handle the exists state.
elif n.resource_state == 'not_exists':
# Code to handle the not exists state.
Example of unsubscribing.
service.channels().stop(channel.body())
"""
import datetime
import uuid
from googleapiclient import errors
from oauth2client import util
# The unix time epoch starts at midnight 1970.
EPOCH = datetime.datetime.utcfromtimestamp(0)
# Map the names of the parameters in the JSON channel description to
# the parameter names we use in the Channel class.
CHANNEL_PARAMS = {
'address': 'address',
'id': 'id',
'expiration': 'expiration',
'params': 'params',
'resourceId': 'resource_id',
'resourceUri': 'resource_uri',
'type': 'type',
'token': 'token',
}
X_GOOG_CHANNEL_ID = 'X-GOOG-CHANNEL-ID'
X_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER'
X_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE'
X_GOOG_RESOURCE_URI = 'X-GOOG-RESOURCE-URI'
X_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID'
def _upper_header_keys(headers):
new_headers = {}
for k, v in headers.iteritems():
new_headers[k.upper()] = v
return new_headers
class Notification(object):
"""A Notification from a Channel.
Notifications are not usually constructed directly, but are returned
from functions like notification_from_headers().
Attributes:
message_number: int, The unique id number of this notification.
state: str, The state of the resource being monitored.
uri: str, The address of the resource being monitored.
resource_id: str, The unique identifier of the version of the resource at
this event.
"""
@util.positional(5)
def __init__(self, message_number, state, resource_uri, resource_id):
"""Notification constructor.
Args:
message_number: int, The unique id number of this notification.
state: str, The state of the resource being monitored. Can be one
of "exists", "not_exists", or "sync".
resource_uri: str, The address of the resource being monitored.
resource_id: str, The identifier of the watched resource.
"""
self.message_number = message_number
self.state = state
self.resource_uri = resource_uri
self.resource_id = resource_id
class Channel(object):
"""A Channel for notifications.
Usually not constructed directly, instead it is returned from helper
functions like new_webhook_channel().
Attributes:
type: str, The type of delivery mechanism used by this channel. For
example, 'web_hook'.
id: str, A UUID for the channel.
token: str, An arbitrary string associated with the channel that
is delivered to the target address with each event delivered
over this channel.
address: str, The address of the receiving entity where events are
delivered. Specific to the channel type.
expiration: int, The time, in milliseconds from the epoch, when this
channel will expire.
params: dict, A dictionary of string to string, with additional parameters
controlling delivery channel behavior.
resource_id: str, An opaque id that identifies the resource that is
being watched. Stable across different API versions.
resource_uri: str, The canonicalized ID of the watched resource.
"""
@util.positional(5)
def __init__(self, type, id, token, address, expiration=None,
params=None, resource_id="", resource_uri=""):
"""Create a new Channel.
In user code, this Channel constructor will not typically be called
manually since there are functions for creating channels for each specific
type with a more customized set of arguments to pass.
Args:
type: str, The type of delivery mechanism used by this channel. For
example, 'web_hook'.
id: str, A UUID for the channel.
token: str, An arbitrary string associated with the channel that
is delivered to the target address with each event delivered
over this channel.
address: str, The address of the receiving entity where events are
delivered. Specific to the channel type.
expiration: int, The time, in milliseconds from the epoch, when this
channel will expire.
params: dict, A dictionary of string to string, with additional parameters
controlling delivery channel behavior.
resource_id: str, An opaque id that identifies the resource that is
being watched. Stable across different API versions.
resource_uri: str, The canonicalized ID of the watched resource.
"""
self.type = type
self.id = id
self.token = token
self.address = address
self.expiration = expiration
self.params = params
self.resource_id = resource_id
self.resource_uri = resource_uri
def body(self):
"""Build a body from the Channel.
Constructs a dictionary that's appropriate for passing into watch()
methods as the value of body argument.
Returns:
A dictionary representation of the channel.
"""
result = {
'id': self.id,
'token': self.token,
'type': self.type,
'address': self.address
}
if self.params:
result['params'] = self.params
if self.resource_id:
result['resourceId'] = self.resource_id
if self.resource_uri:
result['resourceUri'] = self.resource_uri
if self.expiration:
result['expiration'] = self.expiration
return result
def update(self, resp):
"""Update a channel with information from the response of watch().
When a request is sent to watch() a resource, the response returned
from the watch() request is a dictionary with updated channel information,
such as the resource_id, which is needed when stopping a subscription.
Args:
resp: dict, The response from a watch() method.
"""
for json_name, param_name in CHANNEL_PARAMS.iteritems():
value = resp.get(json_name)
if value is not None:
setattr(self, param_name, value)
def notification_from_headers(channel, headers):
"""Parse a notification from the webhook request headers, validate
the notification, and return a Notification object.
Args:
channel: Channel, The channel that the notification is associated with.
headers: dict, A dictionary like object that contains the request headers
from the webhook HTTP request.
Returns:
A Notification object.
Raises:
errors.InvalidNotificationError if the notification is invalid.
ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
"""
headers = _upper_header_keys(headers)
channel_id = headers[X_GOOG_CHANNEL_ID]
if channel.id != channel_id:
raise errors.InvalidNotificationError(
'Channel id mismatch: %s != %s' % (channel.id, channel_id))
else:
message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
state = headers[X_GOOG_RESOURCE_STATE]
resource_uri = headers[X_GOOG_RESOURCE_URI]
resource_id = headers[X_GOOG_RESOURCE_ID]
return Notification(message_number, state, resource_uri, resource_id)
@util.positional(2)
def new_webhook_channel(url, token=None, expiration=None, params=None):
"""Create a new webhook Channel.
Args:
url: str, URL to post notifications to.
token: str, An arbitrary string associated with the channel that
is delivered to the target address with each notification delivered
over this channel.
expiration: datetime.datetime, A time in the future when the channel
should expire. Can also be None if the subscription should use the
default expiration. Note that different services may have different
limits on how long a subscription lasts. Check the response from the
watch() method to see the value the service has set for an expiration
time.
params: dict, Extra parameters to pass on channel creation. Currently
not used for webhook channels.
"""
expiration_ms = 0
if expiration:
delta = expiration - EPOCH
expiration_ms = delta.microseconds/1000 + (
delta.seconds + delta.days*24*3600)*1000
if expiration_ms < 0:
expiration_ms = 0
return Channel('web_hook', str(uuid.uuid4()),
token, url, expiration=expiration_ms,
params=params)

View File

@ -0,0 +1,995 @@
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for discovery based APIs.
A client library for Google's discovery based APIs.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = [
'build',
'build_from_document',
'fix_method_name',
'key2param',
]
# Standard library imports
import StringIO
import copy
from email.generator import Generator
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
import json
import keyword
import logging
import mimetypes
import os
import re
import urllib
import urlparse
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
# Third-party imports
import httplib2
import mimeparse
import uritemplate
# Local imports
from googleapiclient.errors import HttpError
from googleapiclient.errors import InvalidJsonError
from googleapiclient.errors import MediaUploadSizeError
from googleapiclient.errors import UnacceptableMimeTypeError
from googleapiclient.errors import UnknownApiNameOrVersion
from googleapiclient.errors import UnknownFileType
from googleapiclient.http import HttpRequest
from googleapiclient.http import MediaFileUpload
from googleapiclient.http import MediaUpload
from googleapiclient.model import JsonModel
from googleapiclient.model import MediaModel
from googleapiclient.model import RawModel
from googleapiclient.schema import Schemas
from oauth2client.client import GoogleCredentials
from oauth2client.util import _add_query_parameter
from oauth2client.util import positional
# The client library requires a version of httplib2 that supports RETRIES.
httplib2.RETRIES = 1
logger = logging.getLogger(__name__)
URITEMPLATE = re.compile('{[^}]*}')
VARNAME = re.compile('[a-zA-Z0-9_-]+')
DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
'{api}/{apiVersion}/rest')
DEFAULT_METHOD_DOC = 'A description of how to use this function'
HTTP_PAYLOAD_METHODS = frozenset(['PUT', 'POST', 'PATCH'])
_MEDIA_SIZE_BIT_SHIFTS = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
BODY_PARAMETER_DEFAULT_VALUE = {
'description': 'The request body.',
'type': 'object',
'required': True,
}
MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
'description': ('The filename of the media request body, or an instance '
'of a MediaUpload object.'),
'type': 'string',
'required': False,
}
# Parameters accepted by the stack, but not visible via discovery.
# TODO(dhermes): Remove 'userip' in 'v2'.
STACK_QUERY_PARAMETERS = frozenset(['trace', 'pp', 'userip', 'strict'])
STACK_QUERY_PARAMETER_DEFAULT_VALUE = {'type': 'string', 'location': 'query'}
# Library-specific reserved words beyond Python keywords.
RESERVED_WORDS = frozenset(['body'])
def fix_method_name(name):
"""Fix method names to avoid reserved word conflicts.
Args:
name: string, method name.
Returns:
The name with a '_' prefixed if the name is a reserved word.
"""
if keyword.iskeyword(name) or name in RESERVED_WORDS:
return name + '_'
else:
return name
def key2param(key):
"""Converts key names into parameter names.
For example, converting "max-results" -> "max_results"
Args:
key: string, the method key name.
Returns:
A safe method name based on the key name.
"""
result = []
key = list(key)
if not key[0].isalpha():
result.append('x')
for c in key:
if c.isalnum():
result.append(c)
else:
result.append('_')
return ''.join(result)
@positional(2)
def build(serviceName,
version,
http=None,
discoveryServiceUrl=DISCOVERY_URI,
developerKey=None,
model=None,
requestBuilder=HttpRequest,
credentials=None):
"""Construct a Resource for interacting with an API.
Construct a Resource object for interacting with an API. The serviceName and
version are the names from the Discovery service.
Args:
serviceName: string, name of the service.
version: string, the version of the service.
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
discoveryServiceUrl: string, a URI Template that points to the location of
the discovery service. It should have two parameters {api} and
{apiVersion} that when filled in produce an absolute URI to the discovery
document for that service.
developerKey: string, key obtained from
https://code.google.com/apis/console.
model: googleapiclient.Model, converts to and from the wire format.
requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP
request.
credentials: oauth2client.Credentials, credentials to be used for
authentication.
Returns:
A Resource object with methods for interacting with the service.
"""
params = {
'api': serviceName,
'apiVersion': version
}
if http is None:
http = httplib2.Http()
requested_url = uritemplate.expand(discoveryServiceUrl, params)
# REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
# variable that contains the network address of the client sending the
# request. If it exists then add that to the request for the discovery
# document to avoid exceeding the quota on discovery requests.
if 'REMOTE_ADDR' in os.environ:
requested_url = _add_query_parameter(requested_url, 'userIp',
os.environ['REMOTE_ADDR'])
logger.info('URL being requested: GET %s' % requested_url)
resp, content = http.request(requested_url)
if resp.status == 404:
raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName,
version))
if resp.status >= 400:
raise HttpError(resp, content, uri=requested_url)
try:
service = json.loads(content)
except ValueError, e:
logger.error('Failed to parse as JSON: ' + content)
raise InvalidJsonError()
return build_from_document(content, base=discoveryServiceUrl, http=http,
developerKey=developerKey, model=model, requestBuilder=requestBuilder,
credentials=credentials)
@positional(1)
def build_from_document(
service,
base=None,
future=None,
http=None,
developerKey=None,
model=None,
requestBuilder=HttpRequest,
credentials=None):
"""Create a Resource for interacting with an API.
Same as `build()`, but constructs the Resource object from a discovery
document that is it given, as opposed to retrieving one over HTTP.
Args:
service: string or object, the JSON discovery document describing the API.
The value passed in may either be the JSON string or the deserialized
JSON.
base: string, base URI for all HTTP requests, usually the discovery URI.
This parameter is no longer used as rootUrl and servicePath are included
within the discovery document. (deprecated)
future: string, discovery document with future capabilities (deprecated).
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
developerKey: string, Key for controlling API usage, generated
from the API Console.
model: Model class instance that serializes and de-serializes requests and
responses.
requestBuilder: Takes an http request and packages it up to be executed.
credentials: object, credentials to be used for authentication.
Returns:
A Resource object with methods for interacting with the service.
"""
# future is no longer used.
future = {}
if isinstance(service, basestring):
service = json.loads(service)
base = urlparse.urljoin(service['rootUrl'], service['servicePath'])
schema = Schemas(service)
if credentials:
# If credentials were passed in, we could have two cases:
# 1. the scopes were specified, in which case the given credentials
# are used for authorizing the http;
# 2. the scopes were not provided (meaning the Application Default
# Credentials are to be used). In this case, the Application Default
# Credentials are built and used instead of the original credentials.
# If there are no scopes found (meaning the given service requires no
# authentication), there is no authorization of the http.
if (isinstance(credentials, GoogleCredentials) and
credentials.create_scoped_required()):
scopes = service.get('auth', {}).get('oauth2', {}).get('scopes', {})
if scopes:
credentials = credentials.create_scoped(scopes.keys())
else:
# No need to authorize the http object
# if the service does not require authentication.
credentials = None
if credentials:
http = credentials.authorize(http)
if model is None:
features = service.get('features', [])
model = JsonModel('dataWrapper' in features)
return Resource(http=http, baseUrl=base, model=model,
developerKey=developerKey, requestBuilder=requestBuilder,
resourceDesc=service, rootDesc=service, schema=schema)
def _cast(value, schema_type):
"""Convert value to a string based on JSON Schema type.
See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
JSON Schema.
Args:
value: any, the value to convert
schema_type: string, the type that value should be interpreted as
Returns:
A string representation of 'value' based on the schema_type.
"""
if schema_type == 'string':
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
elif schema_type == 'integer':
return str(int(value))
elif schema_type == 'number':
return str(float(value))
elif schema_type == 'boolean':
return str(bool(value)).lower()
else:
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
def _media_size_to_long(maxSize):
"""Convert a string media size, such as 10GB or 3TB into an integer.
Args:
maxSize: string, size as a string, such as 2MB or 7GB.
Returns:
The size as an integer value.
"""
if len(maxSize) < 2:
return 0L
units = maxSize[-2:].upper()
bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
if bit_shift is not None:
return long(maxSize[:-2]) << bit_shift
else:
return long(maxSize)
def _media_path_url_from_info(root_desc, path_url):
"""Creates an absolute media path URL.
Constructed using the API root URI and service path from the discovery
document and the relative path for the API method.
Args:
root_desc: Dictionary; the entire original deserialized discovery document.
path_url: String; the relative URL for the API method. Relative to the API
root, which is specified in the discovery document.
Returns:
String; the absolute URI for media upload for the API method.
"""
return '%(root)supload/%(service_path)s%(path)s' % {
'root': root_desc['rootUrl'],
'service_path': root_desc['servicePath'],
'path': path_url,
}
def _fix_up_parameters(method_desc, root_desc, http_method):
"""Updates parameters of an API method with values specific to this library.
Specifically, adds whatever global parameters are specified by the API to the
parameters for the individual method. Also adds parameters which don't
appear in the discovery document, but are available to all discovery based
APIs (these are listed in STACK_QUERY_PARAMETERS).
SIDE EFFECTS: This updates the parameters dictionary object in the method
description.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
http_method: String; the HTTP method used to call the API method described
in method_desc.
Returns:
The updated Dictionary stored in the 'parameters' key of the method
description dictionary.
"""
parameters = method_desc.setdefault('parameters', {})
# Add in the parameters common to all methods.
for name, description in root_desc.get('parameters', {}).iteritems():
parameters[name] = description
# Add in undocumented query parameters.
for name in STACK_QUERY_PARAMETERS:
parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
# Add 'body' (our own reserved word) to parameters if the method supports
# a request payload.
if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc:
body = BODY_PARAMETER_DEFAULT_VALUE.copy()
body.update(method_desc['request'])
parameters['body'] = body
return parameters
def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
"""Updates parameters of API by adding 'media_body' if supported by method.
SIDE EFFECTS: If the method supports media upload and has a required body,
sets body to be optional (required=False) instead. Also, if there is a
'mediaUpload' in the method description, adds 'media_upload' key to
parameters.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
path_url: String; the relative URL for the API method. Relative to the API
root, which is specified in the discovery document.
parameters: A dictionary describing method parameters for method described
in method_desc.
Returns:
Triple (accept, max_size, media_path_url) where:
- accept is a list of strings representing what content types are
accepted for media upload. Defaults to empty list if not in the
discovery document.
- max_size is a long representing the max size in bytes allowed for a
media upload. Defaults to 0L if not in the discovery document.
- media_path_url is a String; the absolute URI for media upload for the
API method. Constructed using the API root URI and service path from
the discovery document and the relative path for the API method. If
media upload is not supported, this is None.
"""
media_upload = method_desc.get('mediaUpload', {})
accept = media_upload.get('accept', [])
max_size = _media_size_to_long(media_upload.get('maxSize', ''))
media_path_url = None
if media_upload:
media_path_url = _media_path_url_from_info(root_desc, path_url)
parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
if 'body' in parameters:
parameters['body']['required'] = False
return accept, max_size, media_path_url
def _fix_up_method_description(method_desc, root_desc):
"""Updates a method description in a discovery document.
SIDE EFFECTS: Changes the parameters dictionary in the method description with
extra parameters which are used locally.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
Returns:
Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
where:
- path_url is a String; the relative URL for the API method. Relative to
the API root, which is specified in the discovery document.
- http_method is a String; the HTTP method used to call the API method
described in the method description.
- method_id is a String; the name of the RPC method associated with the
API method, and is in the method description in the 'id' key.
- accept is a list of strings representing what content types are
accepted for media upload. Defaults to empty list if not in the
discovery document.
- max_size is a long representing the max size in bytes allowed for a
media upload. Defaults to 0L if not in the discovery document.
- media_path_url is a String; the absolute URI for media upload for the
API method. Constructed using the API root URI and service path from
the discovery document and the relative path for the API method. If
media upload is not supported, this is None.
"""
path_url = method_desc['path']
http_method = method_desc['httpMethod']
method_id = method_desc['id']
parameters = _fix_up_parameters(method_desc, root_desc, http_method)
# Order is important. `_fix_up_media_upload` needs `method_desc` to have a
# 'parameters' key and needs to know if there is a 'body' parameter because it
# also sets a 'media_body' parameter.
accept, max_size, media_path_url = _fix_up_media_upload(
method_desc, root_desc, path_url, parameters)
return path_url, http_method, method_id, accept, max_size, media_path_url
# TODO(dhermes): Convert this class to ResourceMethod and make it callable
class ResourceMethodParameters(object):
"""Represents the parameters associated with a method.
Attributes:
argmap: Map from method parameter name (string) to query parameter name
(string).
required_params: List of required parameters (represented by parameter
name as string).
repeated_params: List of repeated parameters (represented by parameter
name as string).
pattern_params: Map from method parameter name (string) to regular
expression (as a string). If the pattern is set for a parameter, the
value for that parameter must match the regular expression.
query_params: List of parameters (represented by parameter name as string)
that will be used in the query string.
path_params: Set of parameters (represented by parameter name as string)
that will be used in the base URL path.
param_types: Map from method parameter name (string) to parameter type. Type
can be any valid JSON schema type; valid values are 'any', 'array',
'boolean', 'integer', 'number', 'object', or 'string'. Reference:
http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
enum_params: Map from method parameter name (string) to list of strings,
where each list of strings is the list of acceptable enum values.
"""
def __init__(self, method_desc):
"""Constructor for ResourceMethodParameters.
Sets default values and defers to set_parameters to populate.
Args:
method_desc: Dictionary with metadata describing an API method. Value
comes from the dictionary of methods stored in the 'methods' key in
the deserialized discovery document.
"""
self.argmap = {}
self.required_params = []
self.repeated_params = []
self.pattern_params = {}
self.query_params = []
# TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
# parsing is gotten rid of.
self.path_params = set()
self.param_types = {}
self.enum_params = {}
self.set_parameters(method_desc)
def set_parameters(self, method_desc):
"""Populates maps and lists based on method description.
Iterates through each parameter for the method and parses the values from
the parameter dictionary.
Args:
method_desc: Dictionary with metadata describing an API method. Value
comes from the dictionary of methods stored in the 'methods' key in
the deserialized discovery document.
"""
for arg, desc in method_desc.get('parameters', {}).iteritems():
param = key2param(arg)
self.argmap[param] = arg
if desc.get('pattern'):
self.pattern_params[param] = desc['pattern']
if desc.get('enum'):
self.enum_params[param] = desc['enum']
if desc.get('required'):
self.required_params.append(param)
if desc.get('repeated'):
self.repeated_params.append(param)
if desc.get('location') == 'query':
self.query_params.append(param)
if desc.get('location') == 'path':
self.path_params.add(param)
self.param_types[param] = desc.get('type', 'string')
# TODO(dhermes): Determine if this is still necessary. Discovery based APIs
# should have all path parameters already marked with
# 'location: path'.
for match in URITEMPLATE.finditer(method_desc['path']):
for namematch in VARNAME.finditer(match.group(0)):
name = key2param(namematch.group(0))
self.path_params.add(name)
if name in self.query_params:
self.query_params.remove(name)
def createMethod(methodName, methodDesc, rootDesc, schema):
"""Creates a method for attaching to a Resource.
Args:
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
"""
methodName = fix_method_name(methodName)
(pathUrl, httpMethod, methodId, accept,
maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc)
parameters = ResourceMethodParameters(methodDesc)
def method(self, **kwargs):
# Don't bother with doc string, it will be over-written by createMethod.
for name in kwargs.iterkeys():
if name not in parameters.argmap:
raise TypeError('Got an unexpected keyword argument "%s"' % name)
# Remove args that have a value of None.
keys = kwargs.keys()
for name in keys:
if kwargs[name] is None:
del kwargs[name]
for name in parameters.required_params:
if name not in kwargs:
raise TypeError('Missing required parameter "%s"' % name)
for name, regex in parameters.pattern_params.iteritems():
if name in kwargs:
if isinstance(kwargs[name], basestring):
pvalues = [kwargs[name]]
else:
pvalues = kwargs[name]
for pvalue in pvalues:
if re.match(regex, pvalue) is None:
raise TypeError(
'Parameter "%s" value "%s" does not match the pattern "%s"' %
(name, pvalue, regex))
for name, enums in parameters.enum_params.iteritems():
if name in kwargs:
# We need to handle the case of a repeated enum
# name differently, since we want to handle both
# arg='value' and arg=['value1', 'value2']
if (name in parameters.repeated_params and
not isinstance(kwargs[name], basestring)):
values = kwargs[name]
else:
values = [kwargs[name]]
for value in values:
if value not in enums:
raise TypeError(
'Parameter "%s" value "%s" is not an allowed value in "%s"' %
(name, value, str(enums)))
actual_query_params = {}
actual_path_params = {}
for key, value in kwargs.iteritems():
to_type = parameters.param_types.get(key, 'string')
# For repeated parameters we cast each member of the list.
if key in parameters.repeated_params and type(value) == type([]):
cast_value = [_cast(x, to_type) for x in value]
else:
cast_value = _cast(value, to_type)
if key in parameters.query_params:
actual_query_params[parameters.argmap[key]] = cast_value
if key in parameters.path_params:
actual_path_params[parameters.argmap[key]] = cast_value
body_value = kwargs.get('body', None)
media_filename = kwargs.get('media_body', None)
if self._developerKey:
actual_query_params['key'] = self._developerKey
model = self._model
if methodName.endswith('_media'):
model = MediaModel()
elif 'response' not in methodDesc:
model = RawModel()
headers = {}
headers, params, query, body = model.request(headers,
actual_path_params, actual_query_params, body_value)
expanded_url = uritemplate.expand(pathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
resumable = None
multipart_boundary = ''
if media_filename:
# Ensure we end up with a valid MediaUpload object.
if isinstance(media_filename, basestring):
(media_mime_type, encoding) = mimetypes.guess_type(media_filename)
if media_mime_type is None:
raise UnknownFileType(media_filename)
if not mimeparse.best_match([media_mime_type], ','.join(accept)):
raise UnacceptableMimeTypeError(media_mime_type)
media_upload = MediaFileUpload(media_filename,
mimetype=media_mime_type)
elif isinstance(media_filename, MediaUpload):
media_upload = media_filename
else:
raise TypeError('media_filename must be str or MediaUpload.')
# Check the maxSize
if maxSize > 0 and media_upload.size() > maxSize:
raise MediaUploadSizeError("Media larger than: %s" % maxSize)
# Use the media path uri for media uploads
expanded_url = uritemplate.expand(mediaPathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
if media_upload.resumable():
url = _add_query_parameter(url, 'uploadType', 'resumable')
if media_upload.resumable():
# This is all we need to do for resumable, if the body exists it gets
# sent in the first request, otherwise an empty body is sent.
resumable = media_upload
else:
# A non-resumable upload
if body is None:
# This is a simple media upload
headers['content-type'] = media_upload.mimetype()
body = media_upload.getbytes(0, media_upload.size())
url = _add_query_parameter(url, 'uploadType', 'media')
else:
# This is a multipart/related upload.
msgRoot = MIMEMultipart('related')
# msgRoot should not write out it's own headers
setattr(msgRoot, '_write_headers', lambda self: None)
# attach the body as one part
msg = MIMENonMultipart(*headers['content-type'].split('/'))
msg.set_payload(body)
msgRoot.attach(msg)
# attach the media as the second part
msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
msg['Content-Transfer-Encoding'] = 'binary'
payload = media_upload.getbytes(0, media_upload.size())
msg.set_payload(payload)
msgRoot.attach(msg)
# encode the body: note that we can't use `as_string`, because
# it plays games with `From ` lines.
fp = StringIO.StringIO()
g = Generator(fp, mangle_from_=False)
g.flatten(msgRoot, unixfrom=False)
body = fp.getvalue()
multipart_boundary = msgRoot.get_boundary()
headers['content-type'] = ('multipart/related; '
'boundary="%s"') % multipart_boundary
url = _add_query_parameter(url, 'uploadType', 'multipart')
logger.info('URL being requested: %s %s' % (httpMethod,url))
return self._requestBuilder(self._http,
model.response,
url,
method=httpMethod,
body=body,
headers=headers,
methodId=methodId,
resumable=resumable)
docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
if len(parameters.argmap) > 0:
docs.append('Args:\n')
# Skip undocumented params and params common to all methods.
skip_parameters = rootDesc.get('parameters', {}).keys()
skip_parameters.extend(STACK_QUERY_PARAMETERS)
all_args = parameters.argmap.keys()
args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
# Move body to the front of the line.
if 'body' in all_args:
args_ordered.append('body')
for name in all_args:
if name not in args_ordered:
args_ordered.append(name)
for arg in args_ordered:
if arg in skip_parameters:
continue
repeated = ''
if arg in parameters.repeated_params:
repeated = ' (repeated)'
required = ''
if arg in parameters.required_params:
required = ' (required)'
paramdesc = methodDesc['parameters'][parameters.argmap[arg]]
paramdoc = paramdesc.get('description', 'A parameter')
if '$ref' in paramdesc:
docs.append(
(' %s: object, %s%s%s\n The object takes the'
' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
schema.prettyPrintByName(paramdesc['$ref'])))
else:
paramtype = paramdesc.get('type', 'string')
docs.append(' %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
repeated))
enum = paramdesc.get('enum', [])
enumDesc = paramdesc.get('enumDescriptions', [])
if enum and enumDesc:
docs.append(' Allowed values\n')
for (name, desc) in zip(enum, enumDesc):
docs.append(' %s - %s\n' % (name, desc))
if 'response' in methodDesc:
if methodName.endswith('_media'):
docs.append('\nReturns:\n The media object as a string.\n\n ')
else:
docs.append('\nReturns:\n An object of the form:\n\n ')
docs.append(schema.prettyPrintSchema(methodDesc['response']))
setattr(method, '__doc__', ''.join(docs))
return (methodName, method)
def createNextMethod(methodName):
"""Creates any _next methods for attaching to a Resource.
The _next methods allow for easy iteration through list() responses.
Args:
methodName: string, name of the method to use.
"""
methodName = fix_method_name(methodName)
def methodNext(self, previous_request, previous_response):
"""Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call 'execute()' on to request the next
page. Returns None if there are no more items in the collection.
"""
# Retrieve nextPageToken from previous_response
# Use as pageToken in previous_request to create new request.
if 'nextPageToken' not in previous_response:
return None
request = copy.copy(previous_request)
pageToken = previous_response['nextPageToken']
parsed = list(urlparse.urlparse(request.uri))
q = parse_qsl(parsed[4])
# Find and remove old 'pageToken' value from URI
newq = [(key, value) for (key, value) in q if key != 'pageToken']
newq.append(('pageToken', pageToken))
parsed[4] = urllib.urlencode(newq)
uri = urlparse.urlunparse(parsed)
request.uri = uri
logger.info('URL being requested: %s %s' % (methodName,uri))
return request
return (methodName, methodNext)
class Resource(object):
"""A class for interacting with a resource."""
def __init__(self, http, baseUrl, model, requestBuilder, developerKey,
resourceDesc, rootDesc, schema):
"""Build a Resource from the API description.
Args:
http: httplib2.Http, Object to make http requests with.
baseUrl: string, base URL for the API. All requests are relative to this
URI.
model: googleapiclient.Model, converts to and from the wire format.
requestBuilder: class or callable that instantiates an
googleapiclient.HttpRequest object.
developerKey: string, key obtained from
https://code.google.com/apis/console
resourceDesc: object, section of deserialized discovery document that
describes a resource. Note that the top level discovery document
is considered a resource.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
"""
self._dynamic_attrs = []
self._http = http
self._baseUrl = baseUrl
self._model = model
self._developerKey = developerKey
self._requestBuilder = requestBuilder
self._resourceDesc = resourceDesc
self._rootDesc = rootDesc
self._schema = schema
self._set_service_methods()
def _set_dynamic_attr(self, attr_name, value):
"""Sets an instance attribute and tracks it in a list of dynamic attributes.
Args:
attr_name: string; The name of the attribute to be set
value: The value being set on the object and tracked in the dynamic cache.
"""
self._dynamic_attrs.append(attr_name)
self.__dict__[attr_name] = value
def __getstate__(self):
"""Trim the state down to something that can be pickled.
Uses the fact that the instance variable _dynamic_attrs holds attrs that
will be wiped and restored on pickle serialization.
"""
state_dict = copy.copy(self.__dict__)
for dynamic_attr in self._dynamic_attrs:
del state_dict[dynamic_attr]
del state_dict['_dynamic_attrs']
return state_dict
def __setstate__(self, state):
"""Reconstitute the state of the object from being pickled.
Uses the fact that the instance variable _dynamic_attrs holds attrs that
will be wiped and restored on pickle serialization.
"""
self.__dict__.update(state)
self._dynamic_attrs = []
self._set_service_methods()
def _set_service_methods(self):
self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
self._add_next_methods(self._resourceDesc, self._schema)
def _add_basic_methods(self, resourceDesc, rootDesc, schema):
# Add basic methods to Resource
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
fixedMethodName, method = createMethod(
methodName, methodDesc, rootDesc, schema)
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
# Add in _media methods. The functionality of the attached method will
# change when it sees that the method name ends in _media.
if methodDesc.get('supportsMediaDownload', False):
fixedMethodName, method = createMethod(
methodName + '_media', methodDesc, rootDesc, schema)
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
def _add_nested_resources(self, resourceDesc, rootDesc, schema):
# Add in nested resources
if 'resources' in resourceDesc:
def createResourceMethod(methodName, methodDesc):
"""Create a method on the Resource to access a nested Resource.
Args:
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
"""
methodName = fix_method_name(methodName)
def methodResource(self):
return Resource(http=self._http, baseUrl=self._baseUrl,
model=self._model, developerKey=self._developerKey,
requestBuilder=self._requestBuilder,
resourceDesc=methodDesc, rootDesc=rootDesc,
schema=schema)
setattr(methodResource, '__doc__', 'A collection resource.')
setattr(methodResource, '__is_resource__', True)
return (methodName, methodResource)
for methodName, methodDesc in resourceDesc['resources'].iteritems():
fixedMethodName, method = createResourceMethod(methodName, methodDesc)
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
def _add_next_methods(self, resourceDesc, schema):
# Add _next() methods
# Look for response bodies in schema that contain nextPageToken, and methods
# that take a pageToken parameter.
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
if 'response' in methodDesc:
responseSchema = methodDesc['response']
if '$ref' in responseSchema:
responseSchema = schema.get(responseSchema['$ref'])
hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
{})
hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
if hasNextPageToken and hasPageToken:
fixedMethodName, method = createNextMethod(methodName + '_next')
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))

140
googleapiclient/errors.py Normal file
View File

@ -0,0 +1,140 @@
#!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Errors for the library.
All exceptions defined by the library
should be defined in this file.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import json
from oauth2client import util
class Error(Exception):
"""Base error for this module."""
pass
class HttpError(Error):
"""HTTP data was invalid or unexpected."""
@util.positional(3)
def __init__(self, resp, content, uri=None):
self.resp = resp
self.content = content
self.uri = uri
def _get_reason(self):
"""Calculate the reason for the error from the response content."""
reason = self.resp.reason
try:
data = json.loads(self.content)
reason = data['error']['message']
except (ValueError, KeyError):
pass
if reason is None:
reason = ''
return reason
def __repr__(self):
if self.uri:
return '<HttpError %s when requesting %s returned "%s">' % (
self.resp.status, self.uri, self._get_reason().strip())
else:
return '<HttpError %s "%s">' % (self.resp.status, self._get_reason())
__str__ = __repr__
class InvalidJsonError(Error):
"""The JSON returned could not be parsed."""
pass
class UnknownFileType(Error):
"""File type unknown or unexpected."""
pass
class UnknownLinkType(Error):
"""Link type unknown or unexpected."""
pass
class UnknownApiNameOrVersion(Error):
"""No API with that name and version exists."""
pass
class UnacceptableMimeTypeError(Error):
"""That is an unacceptable mimetype for this operation."""
pass
class MediaUploadSizeError(Error):
"""Media is larger than the method can accept."""
pass
class ResumableUploadError(HttpError):
"""Error occured during resumable upload."""
pass
class InvalidChunkSizeError(Error):
"""The given chunksize is not valid."""
pass
class InvalidNotificationError(Error):
"""The channel Notification is invalid."""
pass
class BatchError(HttpError):
"""Error occured during batch operations."""
@util.positional(2)
def __init__(self, reason, resp=None, content=None):
self.resp = resp
self.content = content
self.reason = reason
def __repr__(self):
return '<BatchError %s "%s">' % (self.resp.status, self.reason)
__str__ = __repr__
class UnexpectedMethodError(Error):
"""Exception raised by RequestMockBuilder on unexpected calls."""
@util.positional(1)
def __init__(self, methodId=None):
"""Constructor for an UnexpectedMethodError."""
super(UnexpectedMethodError, self).__init__(
'Received unexpected call %s' % methodId)
class UnexpectedBodyError(Error):
"""Exception raised by RequestMockBuilder on unexpected bodies."""
def __init__(self, expected, provided):
"""Constructor for an UnexpectedMethodError."""
super(UnexpectedBodyError, self).__init__(
'Expected: [%s] - Provided: [%s]' % (expected, provided))

1614
googleapiclient/http.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,172 @@
# Copyright 2014 Joe Gregorio
#
# Licensed under the MIT License
"""MIME-Type Parser
This module provides basic functions for handling mime-types. It can handle
matching mime-types against a list of media-ranges. See section 14.1 of the
HTTP specification [RFC 2616] for a complete explanation.
http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
Contents:
- parse_mime_type(): Parses a mime-type into its component parts.
- parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q'
quality parameter.
- quality(): Determines the quality ('q') of a mime-type when
compared against a list of media-ranges.
- quality_parsed(): Just like quality() except the second parameter must be
pre-parsed.
- best_match(): Choose the mime-type with the highest quality ('q')
from a list of candidates.
"""
__version__ = '0.1.3'
__author__ = 'Joe Gregorio'
__email__ = 'joe@bitworking.org'
__license__ = 'MIT License'
__credits__ = ''
def parse_mime_type(mime_type):
"""Parses a mime-type into its component parts.
Carves up a mime-type and returns a tuple of the (type, subtype, params)
where 'params' is a dictionary of all the parameters for the media range.
For example, the media range 'application/xhtml;q=0.5' would get parsed
into:
('application', 'xhtml', {'q', '0.5'})
"""
parts = mime_type.split(';')
params = dict([tuple([s.strip() for s in param.split('=', 1)])\
for param in parts[1:]
])
full_type = parts[0].strip()
# Java URLConnection class sends an Accept header that includes a
# single '*'. Turn it into a legal wildcard.
if full_type == '*':
full_type = '*/*'
(type, subtype) = full_type.split('/')
return (type.strip(), subtype.strip(), params)
def parse_media_range(range):
"""Parse a media-range into its component parts.
Carves up a media range and returns a tuple of the (type, subtype,
params) where 'params' is a dictionary of all the parameters for the media
range. For example, the media range 'application/*;q=0.5' would get parsed
into:
('application', '*', {'q', '0.5'})
In addition this function also guarantees that there is a value for 'q'
in the params dictionary, filling it in with a proper default if
necessary.
"""
(type, subtype, params) = parse_mime_type(range)
if not params.has_key('q') or not params['q'] or \
not float(params['q']) or float(params['q']) > 1\
or float(params['q']) < 0:
params['q'] = '1'
return (type, subtype, params)
def fitness_and_quality_parsed(mime_type, parsed_ranges):
"""Find the best match for a mime-type amongst parsed media-ranges.
Find the best match for a given mime-type against a list of media_ranges
that have already been parsed by parse_media_range(). Returns a tuple of
the fitness value and the value of the 'q' quality parameter of the best
match, or (-1, 0) if no match was found. Just as for quality_parsed(),
'parsed_ranges' must be a list of parsed media ranges.
"""
best_fitness = -1
best_fit_q = 0
(target_type, target_subtype, target_params) =\
parse_media_range(mime_type)
for (type, subtype, params) in parsed_ranges:
type_match = (type == target_type or\
type == '*' or\
target_type == '*')
subtype_match = (subtype == target_subtype or\
subtype == '*' or\
target_subtype == '*')
if type_match and subtype_match:
param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
target_params.iteritems() if key != 'q' and \
params.has_key(key) and value == params[key]], 0)
fitness = (type == target_type) and 100 or 0
fitness += (subtype == target_subtype) and 10 or 0
fitness += param_matches
if fitness > best_fitness:
best_fitness = fitness
best_fit_q = params['q']
return best_fitness, float(best_fit_q)
def quality_parsed(mime_type, parsed_ranges):
"""Find the best match for a mime-type amongst parsed media-ranges.
Find the best match for a given mime-type against a list of media_ranges
that have already been parsed by parse_media_range(). Returns the 'q'
quality parameter of the best match, 0 if no match was found. This function
bahaves the same as quality() except that 'parsed_ranges' must be a list of
parsed media ranges.
"""
return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
def quality(mime_type, ranges):
"""Return the quality ('q') of a mime-type against a list of media-ranges.
Returns the quality 'q' of a mime-type when compared against the
media-ranges in ranges. For example:
>>> quality('text/html','text/*;q=0.3, text/html;q=0.7,
text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
0.7
"""
parsed_ranges = [parse_media_range(r) for r in ranges.split(',')]
return quality_parsed(mime_type, parsed_ranges)
def best_match(supported, header):
"""Return mime-type with the highest quality ('q') from list of candidates.
Takes a list of supported mime-types and finds the best match for all the
media-ranges listed in header. The value of header must be a string that
conforms to the format of the HTTP Accept: header. The value of 'supported'
is a list of mime-types. The list of supported mime-types should be sorted
in order of increasing desirability, in case of a situation where there is
a tie.
>>> best_match(['application/xbel+xml', 'text/xml'],
'text/*;q=0.5,*/*; q=0.1')
'text/xml'
"""
split_header = _filter_blank(header.split(','))
parsed_header = [parse_media_range(r) for r in split_header]
weighted_matches = []
pos = 0
for mime_type in supported:
weighted_matches.append((fitness_and_quality_parsed(mime_type,
parsed_header), pos, mime_type))
pos += 1
weighted_matches.sort()
return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ''
def _filter_blank(i):
for s in i:
if s.strip():
yield s

383
googleapiclient/model.py Normal file
View File

@ -0,0 +1,383 @@
#!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model objects for requests and responses.
Each API may support one or more serializations, such
as JSON, Atom, etc. The model classes are responsible
for converting between the wire format and the Python
object representation.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import json
import logging
import urllib
from googleapiclient import __version__
from errors import HttpError
dump_request_response = False
def _abstract():
raise NotImplementedError('You need to override this function')
class Model(object):
"""Model base class.
All Model classes should implement this interface.
The Model serializes and de-serializes between a wire
format such as JSON and a Python object representation.
"""
def request(self, headers, path_params, query_params, body_value):
"""Updates outgoing requests with a serialized body.
Args:
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query_params: dict, parameters that appear in the query
body_value: object, the request body as a Python object, which must be
serializable.
Returns:
A tuple of (headers, path_params, query, body)
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query: string, query part of the request URI
body: string, the body serialized in the desired wire format.
"""
_abstract()
def response(self, resp, content):
"""Convert the response wire format into a Python object.
Args:
resp: httplib2.Response, the HTTP response headers and status
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
Raises:
googleapiclient.errors.HttpError if a non 2xx response is received.
"""
_abstract()
class BaseModel(Model):
"""Base model class.
Subclasses should provide implementations for the "serialize" and
"deserialize" methods, as well as values for the following class attributes.
Attributes:
accept: The value to use for the HTTP Accept header.
content_type: The value to use for the HTTP Content-type header.
no_content_response: The value to return when deserializing a 204 "No
Content" response.
alt_param: The value to supply as the "alt" query parameter for requests.
"""
accept = None
content_type = None
no_content_response = None
alt_param = None
def _log_request(self, headers, path_params, query, body):
"""Logs debugging information about the request if requested."""
if dump_request_response:
logging.info('--request-start--')
logging.info('-headers-start-')
for h, v in headers.iteritems():
logging.info('%s: %s', h, v)
logging.info('-headers-end-')
logging.info('-path-parameters-start-')
for h, v in path_params.iteritems():
logging.info('%s: %s', h, v)
logging.info('-path-parameters-end-')
logging.info('body: %s', body)
logging.info('query: %s', query)
logging.info('--request-end--')
def request(self, headers, path_params, query_params, body_value):
"""Updates outgoing requests with a serialized body.
Args:
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query_params: dict, parameters that appear in the query
body_value: object, the request body as a Python object, which must be
serializable by json.
Returns:
A tuple of (headers, path_params, query, body)
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query: string, query part of the request URI
body: string, the body serialized as JSON
"""
query = self._build_query(query_params)
headers['accept'] = self.accept
headers['accept-encoding'] = 'gzip, deflate'
if 'user-agent' in headers:
headers['user-agent'] += ' '
else:
headers['user-agent'] = ''
headers['user-agent'] += 'google-api-python-client/%s (gzip)' % __version__
if body_value is not None:
headers['content-type'] = self.content_type
body_value = self.serialize(body_value)
self._log_request(headers, path_params, query, body_value)
return (headers, path_params, query, body_value)
def _build_query(self, params):
"""Builds a query string.
Args:
params: dict, the query parameters
Returns:
The query parameters properly encoded into an HTTP URI query string.
"""
if self.alt_param is not None:
params.update({'alt': self.alt_param})
astuples = []
for key, value in params.iteritems():
if type(value) == type([]):
for x in value:
x = x.encode('utf-8')
astuples.append((key, x))
else:
if getattr(value, 'encode', False) and callable(value.encode):
value = value.encode('utf-8')
astuples.append((key, value))
return '?' + urllib.urlencode(astuples)
def _log_response(self, resp, content):
"""Logs debugging information about the response if requested."""
if dump_request_response:
logging.info('--response-start--')
for h, v in resp.iteritems():
logging.info('%s: %s', h, v)
if content:
logging.info(content)
logging.info('--response-end--')
def response(self, resp, content):
"""Convert the response wire format into a Python object.
Args:
resp: httplib2.Response, the HTTP response headers and status
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
Raises:
googleapiclient.errors.HttpError if a non 2xx response is received.
"""
self._log_response(resp, content)
# Error handling is TBD, for example, do we retry
# for some operation/error combinations?
if resp.status < 300:
if resp.status == 204:
# A 204: No Content response should be treated differently
# to all the other success states
return self.no_content_response
return self.deserialize(content)
else:
logging.debug('Content from bad request was: %s' % content)
raise HttpError(resp, content)
def serialize(self, body_value):
"""Perform the actual Python object serialization.
Args:
body_value: object, the request body as a Python object.
Returns:
string, the body in serialized form.
"""
_abstract()
def deserialize(self, content):
"""Perform the actual deserialization from response string to Python
object.
Args:
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
"""
_abstract()
class JsonModel(BaseModel):
"""Model class for JSON.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request and response bodies.
"""
accept = 'application/json'
content_type = 'application/json'
alt_param = 'json'
def __init__(self, data_wrapper=False):
"""Construct a JsonModel.
Args:
data_wrapper: boolean, wrap requests and responses in a data wrapper
"""
self._data_wrapper = data_wrapper
def serialize(self, body_value):
if (isinstance(body_value, dict) and 'data' not in body_value and
self._data_wrapper):
body_value = {'data': body_value}
return json.dumps(body_value)
def deserialize(self, content):
content = content.decode('utf-8')
body = json.loads(content)
if self._data_wrapper and isinstance(body, dict) and 'data' in body:
body = body['data']
return body
@property
def no_content_response(self):
return {}
class RawModel(JsonModel):
"""Model class for requests that don't return JSON.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request, and returns the raw bytes
of the response body.
"""
accept = '*/*'
content_type = 'application/json'
alt_param = None
def deserialize(self, content):
return content
@property
def no_content_response(self):
return ''
class MediaModel(JsonModel):
"""Model class for requests that return Media.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request, and returns the raw bytes
of the response body.
"""
accept = '*/*'
content_type = 'application/json'
alt_param = 'media'
def deserialize(self, content):
return content
@property
def no_content_response(self):
return ''
class ProtocolBufferModel(BaseModel):
"""Model class for protocol buffers.
Serializes and de-serializes the binary protocol buffer sent in the HTTP
request and response bodies.
"""
accept = 'application/x-protobuf'
content_type = 'application/x-protobuf'
alt_param = 'proto'
def __init__(self, protocol_buffer):
"""Constructs a ProtocolBufferModel.
The serialzed protocol buffer returned in an HTTP response will be
de-serialized using the given protocol buffer class.
Args:
protocol_buffer: The protocol buffer class used to de-serialize a
response from the API.
"""
self._protocol_buffer = protocol_buffer
def serialize(self, body_value):
return body_value.SerializeToString()
def deserialize(self, content):
return self._protocol_buffer.FromString(content)
@property
def no_content_response(self):
return self._protocol_buffer()
def makepatch(original, modified):
"""Create a patch object.
Some methods support PATCH, an efficient way to send updates to a resource.
This method allows the easy construction of patch bodies by looking at the
differences between a resource before and after it was modified.
Args:
original: object, the original deserialized resource
modified: object, the modified deserialized resource
Returns:
An object that contains only the changes from original to modified, in a
form suitable to pass to a PATCH method.
Example usage:
item = service.activities().get(postid=postid, userid=userid).execute()
original = copy.deepcopy(item)
item['object']['content'] = 'This is updated.'
service.activities.patch(postid=postid, userid=userid,
body=makepatch(original, item)).execute()
"""
patch = {}
for key, original_value in original.iteritems():
modified_value = modified.get(key, None)
if modified_value is None:
# Use None to signal that the element is deleted
patch[key] = None
elif original_value != modified_value:
if type(original_value) == type({}):
# Recursively descend objects
patch[key] = makepatch(original_value, modified_value)
else:
# In the case of simple types or arrays we just replace
patch[key] = modified_value
else:
# Don't add anything to patch if there's no change
pass
for key in modified:
if key not in original:
patch[key] = modified[key]
return patch

View File

@ -0,0 +1,102 @@
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for making samples.
Consolidates a lot of code commonly repeated in sample applications.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = ['init']
import argparse
import httplib2
import os
from googleapiclient import discovery
from oauth2client import client
from oauth2client import file
from oauth2client import tools
def init(argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None):
"""A common initialization routine for samples.
Many of the sample applications do the same initialization, which has now
been consolidated into this function. This function uses common idioms found
in almost all the samples, i.e. for an API with name 'apiname', the
credentials are stored in a file named apiname.dat, and the
client_secrets.json file is stored in the same directory as the application
main file.
Args:
argv: list of string, the command-line parameters of the application.
name: string, name of the API.
version: string, version of the API.
doc: string, description of the application. Usually set to __doc__.
file: string, filename of the application. Usually set to __file__.
parents: list of argparse.ArgumentParser, additional command-line flags.
scope: string, The OAuth scope used.
discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL.
Returns:
A tuple of (service, flags), where service is the service object and flags
is the parsed command-line flags.
"""
if scope is None:
scope = 'https://www.googleapis.com/auth/' + name
# Parser command-line arguments.
parent_parsers = [tools.argparser]
parent_parsers.extend(parents)
parser = argparse.ArgumentParser(
description=doc,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=parent_parsers)
flags = parser.parse_args(argv[1:])
# Name of a file containing the OAuth 2.0 information for this
# application, including client_id and client_secret, which are found
# on the API Access tab on the Google APIs
# Console <http://code.google.com/apis/console>.
client_secrets = os.path.join(os.path.dirname(filename),
'client_secrets.json')
# Set up a Flow object to be used if we need to authenticate.
flow = client.flow_from_clientsecrets(client_secrets,
scope=scope,
message=tools.message_if_missing(client_secrets))
# Prepare credentials, and authorize HTTP object with them.
# If the credentials don't exist or are invalid run through the native client
# flow. The Storage object will ensure that if successful the good
# credentials will get written back to a file.
storage = file.Storage(name + '.dat')
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = tools.run_flow(flow, storage, flags)
http = credentials.authorize(http = httplib2.Http())
if discovery_filename is None:
# Construct a service object via the discovery service.
service = discovery.build(name, version, http=http)
else:
# Construct a service object using a local discovery document file.
with open(discovery_filename) as discovery_file:
service = discovery.build_from_document(
discovery_file.read(),
base='https://www.googleapis.com/',
http=http)
return (service, flags)

311
googleapiclient/schema.py Normal file
View File

@ -0,0 +1,311 @@
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Schema processing for discovery based APIs
Schemas holds an APIs discovery schemas. It can return those schema as
deserialized JSON objects, or pretty print them as prototype objects that
conform to the schema.
For example, given the schema:
schema = \"\"\"{
"Foo": {
"type": "object",
"properties": {
"etag": {
"type": "string",
"description": "ETag of the collection."
},
"kind": {
"type": "string",
"description": "Type of the collection ('calendar#acl').",
"default": "calendar#acl"
},
"nextPageToken": {
"type": "string",
"description": "Token used to access the next
page of this result. Omitted if no further results are available."
}
}
}
}\"\"\"
s = Schemas(schema)
print s.prettyPrintByName('Foo')
Produces the following output:
{
"nextPageToken": "A String", # Token used to access the
# next page of this result. Omitted if no further results are available.
"kind": "A String", # Type of the collection ('calendar#acl').
"etag": "A String", # ETag of the collection.
},
The constructor takes a discovery document in which to look up named schema.
"""
# TODO(jcgregorio) support format, enum, minimum, maximum
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import copy
from oauth2client import util
class Schemas(object):
"""Schemas for an API."""
def __init__(self, discovery):
"""Constructor.
Args:
discovery: object, Deserialized discovery document from which we pull
out the named schema.
"""
self.schemas = discovery.get('schemas', {})
# Cache of pretty printed schemas.
self.pretty = {}
@util.positional(2)
def _prettyPrintByName(self, name, seen=None, dent=0):
"""Get pretty printed object prototype from the schema name.
Args:
name: string, Name of schema in the discovery document.
seen: list of string, Names of schema already seen. Used to handle
recursive definitions.
Returns:
string, A string that contains a prototype object with
comments that conforms to the given schema.
"""
if seen is None:
seen = []
if name in seen:
# Do not fall into an infinite loop over recursive definitions.
return '# Object with schema name: %s' % name
seen.append(name)
if name not in self.pretty:
self.pretty[name] = _SchemaToStruct(self.schemas[name],
seen, dent=dent).to_str(self._prettyPrintByName)
seen.pop()
return self.pretty[name]
def prettyPrintByName(self, name):
"""Get pretty printed object prototype from the schema name.
Args:
name: string, Name of schema in the discovery document.
Returns:
string, A string that contains a prototype object with
comments that conforms to the given schema.
"""
# Return with trailing comma and newline removed.
return self._prettyPrintByName(name, seen=[], dent=1)[:-2]
@util.positional(2)
def _prettyPrintSchema(self, schema, seen=None, dent=0):
"""Get pretty printed object prototype of schema.
Args:
schema: object, Parsed JSON schema.
seen: list of string, Names of schema already seen. Used to handle
recursive definitions.
Returns:
string, A string that contains a prototype object with
comments that conforms to the given schema.
"""
if seen is None:
seen = []
return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
def prettyPrintSchema(self, schema):
"""Get pretty printed object prototype of schema.
Args:
schema: object, Parsed JSON schema.
Returns:
string, A string that contains a prototype object with
comments that conforms to the given schema.
"""
# Return with trailing comma and newline removed.
return self._prettyPrintSchema(schema, dent=1)[:-2]
def get(self, name):
"""Get deserialized JSON schema from the schema name.
Args:
name: string, Schema name.
"""
return self.schemas[name]
class _SchemaToStruct(object):
"""Convert schema to a prototype object."""
@util.positional(3)
def __init__(self, schema, seen, dent=0):
"""Constructor.
Args:
schema: object, Parsed JSON schema.
seen: list, List of names of schema already seen while parsing. Used to
handle recursive definitions.
dent: int, Initial indentation depth.
"""
# The result of this parsing kept as list of strings.
self.value = []
# The final value of the parsing.
self.string = None
# The parsed JSON schema.
self.schema = schema
# Indentation level.
self.dent = dent
# Method that when called returns a prototype object for the schema with
# the given name.
self.from_cache = None
# List of names of schema already seen while parsing.
self.seen = seen
def emit(self, text):
"""Add text as a line to the output.
Args:
text: string, Text to output.
"""
self.value.extend([" " * self.dent, text, '\n'])
def emitBegin(self, text):
"""Add text to the output, but with no line terminator.
Args:
text: string, Text to output.
"""
self.value.extend([" " * self.dent, text])
def emitEnd(self, text, comment):
"""Add text and comment to the output with line terminator.
Args:
text: string, Text to output.
comment: string, Python comment.
"""
if comment:
divider = '\n' + ' ' * (self.dent + 2) + '# '
lines = comment.splitlines()
lines = [x.rstrip() for x in lines]
comment = divider.join(lines)
self.value.extend([text, ' # ', comment, '\n'])
else:
self.value.extend([text, '\n'])
def indent(self):
"""Increase indentation level."""
self.dent += 1
def undent(self):
"""Decrease indentation level."""
self.dent -= 1
def _to_str_impl(self, schema):
"""Prototype object based on the schema, in Python code with comments.
Args:
schema: object, Parsed JSON schema file.
Returns:
Prototype object based on the schema, in Python code with comments.
"""
stype = schema.get('type')
if stype == 'object':
self.emitEnd('{', schema.get('description', ''))
self.indent()
if 'properties' in schema:
for pname, pschema in schema.get('properties', {}).iteritems():
self.emitBegin('"%s": ' % pname)
self._to_str_impl(pschema)
elif 'additionalProperties' in schema:
self.emitBegin('"a_key": ')
self._to_str_impl(schema['additionalProperties'])
self.undent()
self.emit('},')
elif '$ref' in schema:
schemaName = schema['$ref']
description = schema.get('description', '')
s = self.from_cache(schemaName, seen=self.seen)
parts = s.splitlines()
self.emitEnd(parts[0], description)
for line in parts[1:]:
self.emit(line.rstrip())
elif stype == 'boolean':
value = schema.get('default', 'True or False')
self.emitEnd('%s,' % str(value), schema.get('description', ''))
elif stype == 'string':
value = schema.get('default', 'A String')
self.emitEnd('"%s",' % str(value), schema.get('description', ''))
elif stype == 'integer':
value = schema.get('default', '42')
self.emitEnd('%s,' % str(value), schema.get('description', ''))
elif stype == 'number':
value = schema.get('default', '3.14')
self.emitEnd('%s,' % str(value), schema.get('description', ''))
elif stype == 'null':
self.emitEnd('None,', schema.get('description', ''))
elif stype == 'any':
self.emitEnd('"",', schema.get('description', ''))
elif stype == 'array':
self.emitEnd('[', schema.get('description'))
self.indent()
self.emitBegin('')
self._to_str_impl(schema['items'])
self.undent()
self.emit('],')
else:
self.emit('Unknown type! %s' % stype)
self.emitEnd('', '')
self.string = ''.join(self.value)
return self.string
def to_str(self, from_cache):
"""Prototype object based on the schema, in Python code with comments.
Args:
from_cache: callable(name, seen), Callable that retrieves an object
prototype for a schema with the given name. Seen is a list of schema
names already seen as we recursively descend the schema definition.
Returns:
Prototype object based on the schema, in Python code with comments.
The lines of the code will all be properly indented.
"""
self.from_cache = from_cache
return self._to_str_impl(self.schema)

View File

@ -1,5 +1,8 @@
__version__ = "1.2" """Client library for using OAuth2, especially with Google APIs."""
__version__ = '1.3.1'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth' GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
GOOGLE_DEVICE_URI = 'https://accounts.google.com/o/oauth2/device/code'
GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke' GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
GOOGLE_TOKEN_URI = 'https://accounts.google.com/o/oauth2/token' GOOGLE_TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'

View File

@ -1,4 +1,4 @@
# Copyright (C) 2010 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -19,14 +19,14 @@ Utilities for making it easier to use OAuth 2.0 on Google App Engine.
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
import base64
import cgi import cgi
import httplib2 import json
import logging import logging
import os import os
import pickle import pickle
import threading import threading
import time
import httplib2
from google.appengine.api import app_identity from google.appengine.api import app_identity
from google.appengine.api import memcache from google.appengine.api import memcache
@ -41,7 +41,6 @@ from oauth2client import GOOGLE_TOKEN_URI
from oauth2client import clientsecrets from oauth2client import clientsecrets
from oauth2client import util from oauth2client import util
from oauth2client import xsrfutil from oauth2client import xsrfutil
from oauth2client.anyjson import simplejson
from oauth2client.client import AccessTokenRefreshError from oauth2client.client import AccessTokenRefreshError
from oauth2client.client import AssertionCredentials from oauth2client.client import AssertionCredentials
from oauth2client.client import Credentials from oauth2client.client import Credentials
@ -159,15 +158,20 @@ class AppAssertionCredentials(AssertionCredentials):
Args: Args:
scope: string or iterable of strings, scope(s) of the credentials being scope: string or iterable of strings, scope(s) of the credentials being
requested. requested.
**kwargs: optional keyword args, including:
service_account_id: service account id of the application. If None or
unspecified, the default service account for the app is used.
""" """
self.scope = util.scopes_to_string(scope) self.scope = util.scopes_to_string(scope)
self._kwargs = kwargs
self.service_account_id = kwargs.get('service_account_id', None)
# Assertion type is no longer used, but still in the parent class signature. # Assertion type is no longer used, but still in the parent class signature.
super(AppAssertionCredentials, self).__init__(None) super(AppAssertionCredentials, self).__init__(None)
@classmethod @classmethod
def from_json(cls, json): def from_json(cls, json_data):
data = simplejson.loads(json) data = json.loads(json_data)
return AppAssertionCredentials(data['scope']) return AppAssertionCredentials(data['scope'])
def _refresh(self, http_request): def _refresh(self, http_request):
@ -186,11 +190,22 @@ class AppAssertionCredentials(AssertionCredentials):
""" """
try: try:
scopes = self.scope.split() scopes = self.scope.split()
(token, _) = app_identity.get_access_token(scopes) (token, _) = app_identity.get_access_token(
except app_identity.Error, e: scopes, service_account_id=self.service_account_id)
except app_identity.Error as e:
raise AccessTokenRefreshError(str(e)) raise AccessTokenRefreshError(str(e))
self.access_token = token self.access_token = token
@property
def serialization_data(self):
raise NotImplementedError('Cannot serialize credentials for AppEngine.')
def create_scoped_required(self):
return not self.scope
def create_scoped(self, scopes):
return AppAssertionCredentials(scopes, **self._kwargs)
class FlowProperty(db.Property): class FlowProperty(db.Property):
"""App Engine datastore Property for Flow. """App Engine datastore Property for Flow.
@ -434,6 +449,7 @@ class StorageByKeyName(Storage):
entity_key = db.Key.from_path(self._model.kind(), self._key_name) entity_key = db.Key.from_path(self._model.kind(), self._key_name)
db.delete(entity_key) db.delete(entity_key)
@db.non_transactional(allow_existing=True)
def locked_get(self): def locked_get(self):
"""Retrieve Credential from datastore. """Retrieve Credential from datastore.
@ -456,6 +472,7 @@ class StorageByKeyName(Storage):
credentials.set_store(self) credentials.set_store(self)
return credentials return credentials
@db.non_transactional(allow_existing=True)
def locked_put(self, credentials): def locked_put(self, credentials):
"""Write a Credentials to the datastore. """Write a Credentials to the datastore.
@ -468,6 +485,7 @@ class StorageByKeyName(Storage):
if self._cache: if self._cache:
self._cache.set(self._key_name, credentials.to_json()) self._cache.set(self._key_name, credentials.to_json())
@db.non_transactional(allow_existing=True)
def locked_delete(self): def locked_delete(self):
"""Delete Credential from datastore.""" """Delete Credential from datastore."""
@ -650,8 +668,9 @@ class OAuth2Decorator(object):
provided to this constructor. A string indicating the name of the field provided to this constructor. A string indicating the name of the field
on the _credentials_class where a Credentials object will be stored. on the _credentials_class where a Credentials object will be stored.
Defaults to 'credentials'. Defaults to 'credentials'.
**kwargs: dict, Keyword arguments are be passed along as kwargs to the **kwargs: dict, Keyword arguments are passed along as kwargs to
OAuth2WebServerFlow constructor. the OAuth2WebServerFlow constructor.
""" """
self._tls = threading.local() self._tls = threading.local()
self.flow = None self.flow = None
@ -798,14 +817,18 @@ class OAuth2Decorator(object):
url = self.flow.step1_get_authorize_url() url = self.flow.step1_get_authorize_url()
return str(url) return str(url)
def http(self): def http(self, *args, **kwargs):
"""Returns an authorized http instance. """Returns an authorized http instance.
Must only be called from within an @oauth_required decorated method, or Must only be called from within an @oauth_required decorated method, or
from within an @oauth_aware decorated method where has_credentials() from within an @oauth_aware decorated method where has_credentials()
returns True. returns True.
Args:
*args: Positional arguments passed to httplib2.Http constructor.
**kwargs: Positional arguments passed to httplib2.Http constructor.
""" """
return self.credentials.authorize(httplib2.Http()) return self.credentials.authorize(httplib2.Http(*args, **kwargs))
@property @property
def callback_path(self): def callback_path(self):
@ -858,7 +881,7 @@ class OAuth2Decorator(object):
user) user)
if decorator._token_response_param and credentials.token_response: if decorator._token_response_param and credentials.token_response:
resp_json = simplejson.dumps(credentials.token_response) resp_json = json.dumps(credentials.token_response)
redirect_uri = util._add_query_parameter( redirect_uri = util._add_query_parameter(
redirect_uri, decorator._token_response_param, resp_json) redirect_uri, decorator._token_response_param, resp_json)
@ -904,7 +927,7 @@ class OAuth2DecoratorFromClientSecrets(OAuth2Decorator):
""" """
@util.positional(3) @util.positional(3)
def __init__(self, filename, scope, message=None, cache=None): def __init__(self, filename, scope, message=None, cache=None, **kwargs):
"""Constructor """Constructor
Args: Args:
@ -917,17 +940,20 @@ class OAuth2DecoratorFromClientSecrets(OAuth2Decorator):
decorator. decorator.
cache: An optional cache service client that implements get() and set() cache: An optional cache service client that implements get() and set()
methods. See clientsecrets.loadfile() for details. methods. See clientsecrets.loadfile() for details.
**kwargs: dict, Keyword arguments are passed along as kwargs to
the OAuth2WebServerFlow constructor.
""" """
client_type, client_info = clientsecrets.loadfile(filename, cache=cache) client_type, client_info = clientsecrets.loadfile(filename, cache=cache)
if client_type not in [ if client_type not in [
clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED]: clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED]:
raise InvalidClientSecretsError( raise InvalidClientSecretsError(
'OAuth2Decorator doesn\'t support this OAuth 2.0 flow.') "OAuth2Decorator doesn't support this OAuth 2.0 flow.")
constructor_kwargs = { constructor_kwargs = dict(kwargs)
'auth_uri': client_info['auth_uri'], constructor_kwargs.update({
'token_uri': client_info['token_uri'], 'auth_uri': client_info['auth_uri'],
'message': message, 'token_uri': client_info['token_uri'],
} 'message': message,
})
revoke_uri = client_info.get('revoke_uri') revoke_uri = client_info.get('revoke_uri')
if revoke_uri is not None: if revoke_uri is not None:
constructor_kwargs['revoke_uri'] = revoke_uri constructor_kwargs['revoke_uri'] = revoke_uri

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
# Copyright (C) 2011 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -20,8 +20,8 @@ an OAuth 2.0 protected service.
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
import json
from anyjson import simplejson
# Properties that make a client_secrets.json file valid. # Properties that make a client_secrets.json file valid.
TYPE_WEB = 'web' TYPE_WEB = 'web'
@ -87,12 +87,12 @@ def _validate_clientsecrets(obj):
def load(fp): def load(fp):
obj = simplejson.load(fp) obj = json.load(fp)
return _validate_clientsecrets(obj) return _validate_clientsecrets(obj)
def loads(s): def loads(s):
obj = simplejson.loads(s) obj = json.loads(s)
return _validate_clientsecrets(obj) return _validate_clientsecrets(obj)
@ -100,7 +100,7 @@ def _loadfile(filename):
try: try:
fp = file(filename, 'r') fp = file(filename, 'r')
try: try:
obj = simplejson.load(fp) obj = json.load(fp)
finally: finally:
fp.close() fp.close()
except IOError: except IOError:

View File

@ -1,7 +1,6 @@
#!/usr/bin/python2.4
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
# Copyright (C) 2011 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -14,14 +13,13 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Crypto-related routines for oauth2client."""
import base64 import base64
import hashlib import json
import logging import logging
import time import time
from anyjson import simplejson
CLOCK_SKEW_SECS = 300 # 5 minutes in seconds CLOCK_SKEW_SECS = 300 # 5 minutes in seconds
AUTH_TOKEN_LIFETIME_SECS = 300 # 5 minutes in seconds AUTH_TOKEN_LIFETIME_SECS = 300 # 5 minutes in seconds
@ -38,7 +36,6 @@ class AppIdentityError(Exception):
try: try:
from OpenSSL import crypto from OpenSSL import crypto
class OpenSSLVerifier(object): class OpenSSLVerifier(object):
"""Verifies the signature on a message.""" """Verifies the signature on a message."""
@ -125,10 +122,11 @@ try:
Raises: Raises:
OpenSSL.crypto.Error if the key can't be parsed. OpenSSL.crypto.Error if the key can't be parsed.
""" """
if key.startswith('-----BEGIN '): parsed_pem_key = _parse_pem_key(key)
pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key) if parsed_pem_key:
pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key)
else: else:
pkey = crypto.load_pkcs12(key, password).get_privatekey() pkey = crypto.load_pkcs12(key, password.encode('utf8')).get_privatekey()
return OpenSSLSigner(pkey) return OpenSSLSigner(pkey)
except ImportError: except ImportError:
@ -140,6 +138,7 @@ try:
from Crypto.PublicKey import RSA from Crypto.PublicKey import RSA
from Crypto.Hash import SHA256 from Crypto.Hash import SHA256
from Crypto.Signature import PKCS1_v1_5 from Crypto.Signature import PKCS1_v1_5
from Crypto.Util.asn1 import DerSequence
class PyCryptoVerifier(object): class PyCryptoVerifier(object):
@ -181,14 +180,15 @@ try:
Returns: Returns:
Verifier instance. Verifier instance.
Raises:
NotImplementedError if is_x509_cert is true.
""" """
if is_x509_cert: if is_x509_cert:
raise NotImplementedError( pemLines = key_pem.replace(' ', '').split()
'X509 certs are not supported by the PyCrypto library. ' certDer = _urlsafe_b64decode(''.join(pemLines[1:-1]))
'Try using PyOpenSSL if native code is an option.') certSeq = DerSequence()
certSeq.decode(certDer)
tbsSeq = DerSequence()
tbsSeq.decode(certSeq[0])
pubkey = RSA.importKey(tbsSeq[6])
else: else:
pubkey = RSA.importKey(key_pem) pubkey = RSA.importKey(key_pem)
return PyCryptoVerifier(pubkey) return PyCryptoVerifier(pubkey)
@ -230,11 +230,12 @@ try:
Raises: Raises:
NotImplementedError if they key isn't in PEM format. NotImplementedError if they key isn't in PEM format.
""" """
if key.startswith('-----BEGIN '): parsed_pem_key = _parse_pem_key(key)
pkey = RSA.importKey(key) if parsed_pem_key:
pkey = RSA.importKey(parsed_pem_key)
else: else:
raise NotImplementedError( raise NotImplementedError(
'PKCS12 format is not supported by the PyCrpto library. ' 'PKCS12 format is not supported by the PyCrypto library. '
'Try converting to a "PEM" ' 'Try converting to a "PEM" '
'(openssl pkcs12 -in xxxxx.p12 -nodes -nocerts > privatekey.pem) ' '(openssl pkcs12 -in xxxxx.p12 -nodes -nocerts > privatekey.pem) '
'or using PyOpenSSL if native code is an option.') 'or using PyOpenSSL if native code is an option.')
@ -256,6 +257,23 @@ else:
'PyOpenSSL, or PyCrypto 2.6 or later') 'PyOpenSSL, or PyCrypto 2.6 or later')
def _parse_pem_key(raw_key_input):
"""Identify and extract PEM keys.
Determines whether the given key is in the format of PEM key, and extracts
the relevant part of the key if it is.
Args:
raw_key_input: The contents of a private key file (either PEM or PKCS12).
Returns:
string, The actual key if the contents are from a PEM file, or else None.
"""
offset = raw_key_input.find('-----BEGIN ')
if offset != -1:
return raw_key_input[offset:]
def _urlsafe_b64encode(raw_bytes): def _urlsafe_b64encode(raw_bytes):
return base64.urlsafe_b64encode(raw_bytes).rstrip('=') return base64.urlsafe_b64encode(raw_bytes).rstrip('=')
@ -268,7 +286,7 @@ def _urlsafe_b64decode(b64string):
def _json_encode(data): def _json_encode(data):
return simplejson.dumps(data, separators = (',', ':')) return json.dumps(data, separators=(',', ':'))
def make_signed_jwt(signer, payload): def make_signed_jwt(signer, payload):
@ -286,8 +304,8 @@ def make_signed_jwt(signer, payload):
header = {'typ': 'JWT', 'alg': 'RS256'} header = {'typ': 'JWT', 'alg': 'RS256'}
segments = [ segments = [
_urlsafe_b64encode(_json_encode(header)), _urlsafe_b64encode(_json_encode(header)),
_urlsafe_b64encode(_json_encode(payload)), _urlsafe_b64encode(_json_encode(payload)),
] ]
signing_input = '.'.join(segments) signing_input = '.'.join(segments)
@ -318,9 +336,8 @@ def verify_signed_jwt_with_certs(jwt, certs, audience):
""" """
segments = jwt.split('.') segments = jwt.split('.')
if (len(segments) != 3): if len(segments) != 3:
raise AppIdentityError( raise AppIdentityError('Wrong number of segments in token: %s' % jwt)
'Wrong number of segments in token: %s' % jwt)
signed = '%s.%s' % (segments[0], segments[1]) signed = '%s.%s' % (segments[0], segments[1])
signature = _urlsafe_b64decode(segments[2]) signature = _urlsafe_b64decode(segments[2])
@ -328,15 +345,15 @@ def verify_signed_jwt_with_certs(jwt, certs, audience):
# Parse token. # Parse token.
json_body = _urlsafe_b64decode(segments[1]) json_body = _urlsafe_b64decode(segments[1])
try: try:
parsed = simplejson.loads(json_body) parsed = json.loads(json_body)
except: except:
raise AppIdentityError('Can\'t parse token: %s' % json_body) raise AppIdentityError('Can\'t parse token: %s' % json_body)
# Check signature. # Check signature.
verified = False verified = False
for (keyname, pem) in certs.items(): for _, pem in certs.items():
verifier = Verifier.from_string(pem, True) verifier = Verifier.from_string(pem, True)
if (verifier.verify(signed, signature)): if verifier.verify(signed, signature):
verified = True verified = True
break break
if not verified: if not verified:
@ -354,16 +371,15 @@ def verify_signed_jwt_with_certs(jwt, certs, audience):
if exp is None: if exp is None:
raise AppIdentityError('No exp field in token: %s' % json_body) raise AppIdentityError('No exp field in token: %s' % json_body)
if exp >= now + MAX_TOKEN_LIFETIME_SECS: if exp >= now + MAX_TOKEN_LIFETIME_SECS:
raise AppIdentityError( raise AppIdentityError('exp field too far in future: %s' % json_body)
'exp field too far in future: %s' % json_body)
latest = exp + CLOCK_SKEW_SECS latest = exp + CLOCK_SKEW_SECS
if now < earliest: if now < earliest:
raise AppIdentityError('Token used too early, %d < %d: %s' % raise AppIdentityError('Token used too early, %d < %d: %s' %
(now, earliest, json_body)) (now, earliest, json_body))
if now > latest: if now > latest:
raise AppIdentityError('Token used too late, %d > %d: %s' % raise AppIdentityError('Token used too late, %d > %d: %s' %
(now, latest, json_body)) (now, latest, json_body))
# Check audience. # Check audience.
if audience is not None: if audience is not None:
@ -372,6 +388,6 @@ def verify_signed_jwt_with_certs(jwt, certs, audience):
raise AppIdentityError('No aud field in token: %s' % json_body) raise AppIdentityError('No aud field in token: %s' % json_body)
if aud != audience: if aud != audience:
raise AppIdentityError('Wrong recipient, %s != %s: %s' % raise AppIdentityError('Wrong recipient, %s != %s: %s' %
(aud, audience, json_body)) (aud, audience, json_body))
return parsed return parsed

View File

@ -1,4 +1,4 @@
# Copyright (C) 2010 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -116,14 +116,21 @@ class Storage(BaseStorage):
credential.set_store(self) credential.set_store(self)
return credential return credential
def locked_put(self, credentials): def locked_put(self, credentials, overwrite=False):
"""Write a Credentials to the datastore. """Write a Credentials to the datastore.
Args: Args:
credentials: Credentials, the credentials to store. credentials: Credentials, the credentials to store.
overwrite: Boolean, indicates whether you would like these credentials to
overwrite any existing stored credentials.
""" """
args = {self.key_name: self.key_value} args = {self.key_name: self.key_value}
entity = self.model_class(**args)
if overwrite:
entity, unused_is_new = self.model_class.objects.get_or_create(**args)
else:
entity = self.model_class(**args)
setattr(entity, self.property_name, credentials) setattr(entity, self.property_name, credentials)
entity.save() entity.save()

View File

@ -1,4 +1,4 @@
# Copyright (C) 2010 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -21,12 +21,10 @@ credentials.
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
import os import os
import stat
import threading import threading
from anyjson import simplejson from oauth2client.client import Credentials
from client import Storage as BaseStorage from oauth2client.client import Storage as BaseStorage
from client import Credentials
class CredentialsFileSymbolicLinkError(Exception): class CredentialsFileSymbolicLinkError(Exception):

View File

@ -1,4 +1,4 @@
# Copyright (C) 2012 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -19,12 +19,11 @@ Utilities for making it easier to use OAuth 2.0 on Google Compute Engine.
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
import httplib2 import json
import logging import logging
import uritemplate import urllib
from oauth2client import util from oauth2client import util
from oauth2client.anyjson import simplejson
from oauth2client.client import AccessTokenRefreshError from oauth2client.client import AccessTokenRefreshError
from oauth2client.client import AssertionCredentials from oauth2client.client import AssertionCredentials
@ -57,13 +56,14 @@ class AppAssertionCredentials(AssertionCredentials):
requested. requested.
""" """
self.scope = util.scopes_to_string(scope) self.scope = util.scopes_to_string(scope)
self.kwargs = kwargs
# Assertion type is no longer used, but still in the parent class signature. # Assertion type is no longer used, but still in the parent class signature.
super(AppAssertionCredentials, self).__init__(None) super(AppAssertionCredentials, self).__init__(None)
@classmethod @classmethod
def from_json(cls, json): def from_json(cls, json_data):
data = simplejson.loads(json) data = json.loads(json_data)
return AppAssertionCredentials(data['scope']) return AppAssertionCredentials(data['scope'])
def _refresh(self, http_request): def _refresh(self, http_request):
@ -78,13 +78,28 @@ class AppAssertionCredentials(AssertionCredentials):
Raises: Raises:
AccessTokenRefreshError: When the refresh fails. AccessTokenRefreshError: When the refresh fails.
""" """
uri = uritemplate.expand(META, {'scope': self.scope}) query = '?scope=%s' % urllib.quote(self.scope, '')
uri = META.replace('{?scope}', query)
response, content = http_request(uri) response, content = http_request(uri)
if response.status == 200: if response.status == 200:
try: try:
d = simplejson.loads(content) d = json.loads(content)
except StandardError, e: except StandardError as e:
raise AccessTokenRefreshError(str(e)) raise AccessTokenRefreshError(str(e))
self.access_token = d['accessToken'] self.access_token = d['accessToken']
else: else:
if response.status == 404:
content += (' This can occur if a VM was created'
' with no service account or scopes.')
raise AccessTokenRefreshError(content) raise AccessTokenRefreshError(content)
@property
def serialization_data(self):
raise NotImplementedError(
'Cannot serialize credentials for GCE service accounts.')
def create_scoped_required(self):
return not self.scope
def create_scoped(self, scopes):
return AppAssertionCredentials(scopes, **self.kwargs)

View File

@ -1,4 +1,4 @@
# Copyright (C) 2012 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -19,11 +19,12 @@ A Storage for Credentials that uses the keyring module.
__author__ = 'jcgregorio@google.com (Joe Gregorio)' __author__ = 'jcgregorio@google.com (Joe Gregorio)'
import keyring
import threading import threading
from client import Storage as BaseStorage import keyring
from client import Credentials
from oauth2client.client import Credentials
from oauth2client.client import Storage as BaseStorage
class Storage(BaseStorage): class Storage(BaseStorage):

View File

@ -1,4 +1,4 @@
# Copyright 2011 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -70,6 +70,7 @@ class _Opener(object):
self._mode = mode self._mode = mode
self._fallback_mode = fallback_mode self._fallback_mode = fallback_mode
self._fh = None self._fh = None
self._lock_fd = None
def is_locked(self): def is_locked(self):
"""Was the file locked.""" """Was the file locked."""
@ -122,7 +123,7 @@ class _PosixOpener(_Opener):
validate_file(self._filename) validate_file(self._filename)
try: try:
self._fh = open(self._filename, self._mode) self._fh = open(self._filename, self._mode)
except IOError, e: except IOError as e:
# If we can't access with _mode, try _fallback_mode and don't lock. # If we can't access with _mode, try _fallback_mode and don't lock.
if e.errno == errno.EACCES: if e.errno == errno.EACCES:
self._fh = open(self._filename, self._fallback_mode) self._fh = open(self._filename, self._fallback_mode)
@ -137,12 +138,12 @@ class _PosixOpener(_Opener):
self._locked = True self._locked = True
break break
except OSError, e: except OSError as e:
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
raise raise
if (time.time() - start_time) >= timeout: if (time.time() - start_time) >= timeout:
logger.warn('Could not acquire lock %s in %s seconds' % ( logger.warn('Could not acquire lock %s in %s seconds',
lock_filename, timeout)) lock_filename, timeout)
# Close the file and open in fallback_mode. # Close the file and open in fallback_mode.
if self._fh: if self._fh:
self._fh.close() self._fh.close()
@ -192,9 +193,9 @@ try:
validate_file(self._filename) validate_file(self._filename)
try: try:
self._fh = open(self._filename, self._mode) self._fh = open(self._filename, self._mode)
except IOError, e: except IOError as e:
# If we can't access with _mode, try _fallback_mode and don't lock. # If we can't access with _mode, try _fallback_mode and don't lock.
if e.errno == errno.EACCES: if e.errno in (errno.EPERM, errno.EACCES):
self._fh = open(self._filename, self._fallback_mode) self._fh = open(self._filename, self._fallback_mode)
return return
@ -204,7 +205,7 @@ try:
fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX) fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX)
self._locked = True self._locked = True
return return
except IOError, e: except IOError as e:
# If not retrying, then just pass on the error. # If not retrying, then just pass on the error.
if timeout == 0: if timeout == 0:
raise e raise e
@ -212,8 +213,8 @@ try:
raise e raise e
# We could not acquire the lock. Try again. # We could not acquire the lock. Try again.
if (time.time() - start_time) >= timeout: if (time.time() - start_time) >= timeout:
logger.warn('Could not lock %s in %s seconds' % ( logger.warn('Could not lock %s in %s seconds',
self._filename, timeout)) self._filename, timeout)
if self._fh: if self._fh:
self._fh.close() self._fh.close()
self._fh = open(self._filename, self._fallback_mode) self._fh = open(self._filename, self._fallback_mode)
@ -267,7 +268,7 @@ try:
validate_file(self._filename) validate_file(self._filename)
try: try:
self._fh = open(self._filename, self._mode) self._fh = open(self._filename, self._mode)
except IOError, e: except IOError as e:
# If we can't access with _mode, try _fallback_mode and don't lock. # If we can't access with _mode, try _fallback_mode and don't lock.
if e.errno == errno.EACCES: if e.errno == errno.EACCES:
self._fh = open(self._filename, self._fallback_mode) self._fh = open(self._filename, self._fallback_mode)
@ -284,7 +285,7 @@ try:
pywintypes.OVERLAPPED()) pywintypes.OVERLAPPED())
self._locked = True self._locked = True
return return
except pywintypes.error, e: except pywintypes.error as e:
if timeout == 0: if timeout == 0:
raise e raise e
@ -308,7 +309,7 @@ try:
try: try:
hfile = win32file._get_osfhandle(self._fh.fileno()) hfile = win32file._get_osfhandle(self._fh.fileno())
win32file.UnlockFileEx(hfile, 0, -0x10000, pywintypes.OVERLAPPED()) win32file.UnlockFileEx(hfile, 0, -0x10000, pywintypes.OVERLAPPED())
except pywintypes.error, e: except pywintypes.error as e:
if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR: if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR:
raise raise
self._locked = False self._locked = False

View File

@ -1,4 +1,4 @@
# Copyright 2011 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -43,17 +43,15 @@ The format of the stored data is like so:
__author__ = 'jbeda@google.com (Joe Beda)' __author__ = 'jbeda@google.com (Joe Beda)'
import base64 import json
import errno
import logging import logging
import os import os
import threading import threading
from anyjson import simplejson
from oauth2client.client import Storage as BaseStorage
from oauth2client.client import Credentials from oauth2client.client import Credentials
from oauth2client.client import Storage as BaseStorage
from oauth2client import util from oauth2client import util
from locked_file import LockedFile from oauth2client.locked_file import LockedFile
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -286,7 +284,7 @@ class _MultiStore(object):
if self._warn_on_readonly: if self._warn_on_readonly:
logger.warn('The credentials file (%s) is not writable. Opening in ' logger.warn('The credentials file (%s) is not writable. Opening in '
'read-only mode. Any refreshed credentials will only be ' 'read-only mode. Any refreshed credentials will only be '
'valid for this run.' % self._file.filename()) 'valid for this run.', self._file.filename())
if os.path.getsize(self._file.filename()) == 0: if os.path.getsize(self._file.filename()) == 0:
logger.debug('Initializing empty multistore file') logger.debug('Initializing empty multistore file')
# The multistore is empty so write out an empty file. # The multistore is empty so write out an empty file.
@ -315,7 +313,7 @@ class _MultiStore(object):
""" """
assert self._thread_lock.locked() assert self._thread_lock.locked()
self._file.file_handle().seek(0) self._file.file_handle().seek(0)
return simplejson.load(self._file.file_handle()) return json.load(self._file.file_handle())
def _locked_json_write(self, data): def _locked_json_write(self, data):
"""Write a JSON serializable data structure to the multistore. """Write a JSON serializable data structure to the multistore.
@ -329,7 +327,7 @@ class _MultiStore(object):
if self._read_only: if self._read_only:
return return
self._file.file_handle().seek(0) self._file.file_handle().seek(0)
simplejson.dump(data, self._file.file_handle(), sort_keys=True, indent=2) json.dump(data, self._file.file_handle(), sort_keys=True, indent=2, separators=(',', ': '))
self._file.file_handle().truncate() self._file.file_handle().truncate()
def _refresh_data_cache(self): def _refresh_data_cache(self):
@ -387,7 +385,7 @@ class _MultiStore(object):
raw_key = cred_entry['key'] raw_key = cred_entry['key']
key = util.dict_to_tuple_key(raw_key) key = util.dict_to_tuple_key(raw_key)
credential = None credential = None
credential = Credentials.new_from_json(simplejson.dumps(cred_entry['credential'])) credential = Credentials.new_from_json(json.dumps(cred_entry['credential']))
return (key, credential) return (key, credential)
def _write(self): def _write(self):
@ -400,7 +398,7 @@ class _MultiStore(object):
raw_data['data'] = raw_creds raw_data['data'] = raw_creds
for (cred_key, cred) in self._data.items(): for (cred_key, cred) in self._data.items():
raw_key = dict(cred_key) raw_key = dict(cred_key)
raw_cred = simplejson.loads(cred.to_json()) raw_cred = json.loads(cred.to_json())
raw_creds.append({'key': raw_key, 'credential': raw_cred}) raw_creds.append({'key': raw_key, 'credential': raw_cred})
self._locked_json_write(raw_data) self._locked_json_write(raw_data)

View File

@ -1,4 +1,4 @@
# Copyright (C) 2013 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -96,7 +96,7 @@ def run(flow, storage, http=None):
try: try:
httpd = ClientRedirectServer((FLAGS.auth_host_name, port), httpd = ClientRedirectServer((FLAGS.auth_host_name, port),
ClientRedirectHandler) ClientRedirectHandler)
except socket.error, e: except socket.error as e:
pass pass
else: else:
success = True success = True
@ -150,7 +150,7 @@ def run(flow, storage, http=None):
try: try:
credential = flow.step2_exchange(code, http=http) credential = flow.step2_exchange(code, http=http)
except client.FlowExchangeError, e: except client.FlowExchangeError as e:
sys.exit('Authentication has failed: %s' % e) sys.exit('Authentication has failed: %s' % e)
storage.put(credential) storage.put(credential)

View File

@ -0,0 +1,132 @@
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A service account credentials class.
This credentials class is implemented on top of rsa library.
"""
import base64
import json
import time
from pyasn1.codec.ber import decoder
from pyasn1_modules.rfc5208 import PrivateKeyInfo
import rsa
from oauth2client import GOOGLE_REVOKE_URI
from oauth2client import GOOGLE_TOKEN_URI
from oauth2client import util
from oauth2client.client import AssertionCredentials
class _ServiceAccountCredentials(AssertionCredentials):
"""Class representing a service account (signed JWT) credential."""
MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
def __init__(self, service_account_id, service_account_email, private_key_id,
private_key_pkcs8_text, scopes, user_agent=None,
token_uri=GOOGLE_TOKEN_URI, revoke_uri=GOOGLE_REVOKE_URI,
**kwargs):
super(_ServiceAccountCredentials, self).__init__(
None, user_agent=user_agent, token_uri=token_uri, revoke_uri=revoke_uri)
self._service_account_id = service_account_id
self._service_account_email = service_account_email
self._private_key_id = private_key_id
self._private_key = _get_private_key(private_key_pkcs8_text)
self._private_key_pkcs8_text = private_key_pkcs8_text
self._scopes = util.scopes_to_string(scopes)
self._user_agent = user_agent
self._token_uri = token_uri
self._revoke_uri = revoke_uri
self._kwargs = kwargs
def _generate_assertion(self):
"""Generate the assertion that will be used in the request."""
header = {
'alg': 'RS256',
'typ': 'JWT',
'kid': self._private_key_id
}
now = long(time.time())
payload = {
'aud': self._token_uri,
'scope': self._scopes,
'iat': now,
'exp': now + _ServiceAccountCredentials.MAX_TOKEN_LIFETIME_SECS,
'iss': self._service_account_email
}
payload.update(self._kwargs)
assertion_input = '%s.%s' % (
_urlsafe_b64encode(header),
_urlsafe_b64encode(payload))
# Sign the assertion.
signature = base64.urlsafe_b64encode(rsa.pkcs1.sign(
assertion_input, self._private_key, 'SHA-256')).rstrip('=')
return '%s.%s' % (assertion_input, signature)
def sign_blob(self, blob):
return (self._private_key_id,
rsa.pkcs1.sign(blob, self._private_key, 'SHA-256'))
@property
def service_account_email(self):
return self._service_account_email
@property
def serialization_data(self):
return {
'type': 'service_account',
'client_id': self._service_account_id,
'client_email': self._service_account_email,
'private_key_id': self._private_key_id,
'private_key': self._private_key_pkcs8_text
}
def create_scoped_required(self):
return not self._scopes
def create_scoped(self, scopes):
return _ServiceAccountCredentials(self._service_account_id,
self._service_account_email,
self._private_key_id,
self._private_key_pkcs8_text,
scopes,
user_agent=self._user_agent,
token_uri=self._token_uri,
revoke_uri=self._revoke_uri,
**self._kwargs)
def _urlsafe_b64encode(data):
return base64.urlsafe_b64encode(
json.dumps(data, separators=(',', ':')).encode('UTF-8')).rstrip('=')
def _get_private_key(private_key_pkcs8_text):
"""Get an RSA private key object from a pkcs8 representation."""
der = rsa.pem.load_pem(private_key_pkcs8_text, 'PRIVATE KEY')
asn1_private_key, _ = decoder.decode(der, asn1Spec=PrivateKeyInfo())
return rsa.PrivateKey.load_pkcs1(
asn1_private_key.getComponentByName('privateKey').asOctets(),
format='DER')

View File

@ -1,4 +1,4 @@
# Copyright (C) 2013 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -23,24 +23,17 @@ __author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = ['argparser', 'run_flow', 'run', 'message_if_missing'] __all__ = ['argparser', 'run_flow', 'run', 'message_if_missing']
import BaseHTTPServer
#import argparse #import argparse
import httplib2 import BaseHTTPServer
import logging import logging
import os
import socket import socket
import sys import sys
import urlparse
import webbrowser import webbrowser
from oauth2client import client from oauth2client import client
from oauth2client import file
from oauth2client import util from oauth2client import util
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
_CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0 _CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file To make this sample run you will need to populate the client_secrets.json file
@ -52,20 +45,20 @@ with information from the APIs Console <https://code.google.com/apis/console>.
""" """
# run_parser is an ArgumentParser that contains command-line options expected # argparser is an ArgumentParser that contains command-line options expected
# by tools.run(). Pass it in as part of the 'parents' argument to your own # by tools.run(). Pass it in as part of the 'parents' argument to your own
# ArgumentParser. # ArgumentParser.
#argparser = argparse.ArgumentParser(add_help=False) #argparser = argparse.ArgumentParser(add_help=False)
#argparser.add_argument('--auth_host_name', default='localhost', #argparser.add_argument('--auth_host_name', default='localhost',
# help='Hostname when running a local web server.') # help='Hostname when running a local web server.')
#argparser.add_argument('--noauth_local_webserver', action='store_true', #argparser.add_argument('--noauth_local_webserver', action='store_true',
# default=False, help='Do not run a local web server.') # default=False, help='Do not run a local web server.')
#argparser.add_argument('--auth_host_port', default=[8080, 8090], type=int, #argparser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
# nargs='*', help='Port web server should listen on.') # nargs='*', help='Port web server should listen on.')
#argparser.add_argument('--logging_level', default='ERROR', #argparser.add_argument('--logging_level', default='ERROR',
# choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', # choices=['DEBUG', 'INFO', 'WARNING', 'ERROR',
# 'CRITICAL'], # 'CRITICAL'],
# help='Set the logging level of detail.') # help='Set the logging level of detail.')
class ClientRedirectServer(BaseHTTPServer.HTTPServer): class ClientRedirectServer(BaseHTTPServer.HTTPServer):
@ -84,26 +77,25 @@ class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
into the servers query_params and then stops serving. into the servers query_params and then stops serving.
""" """
def do_GET(s): def do_GET(self):
"""Handle a GET request. """Handle a GET request.
Parses the query parameters and prints a message Parses the query parameters and prints a message
if the flow has completed. Note that we can't detect if the flow has completed. Note that we can't detect
if an error occurred. if an error occurred.
""" """
s.send_response(200) self.send_response(200)
s.send_header("Content-type", "text/html") self.send_header("Content-type", "text/html")
s.end_headers() self.end_headers()
query = s.path.split('?', 1)[-1] query = self.path.split('?', 1)[-1]
query = dict(parse_qsl(query)) query = dict(urlparse.parse_qsl(query))
s.server.query_params = query self.server.query_params = query
s.wfile.write("<html><head><title>Authentication Status</title></head>") self.wfile.write("<html><head><title>Authentication Status</title></head>")
s.wfile.write("<body><p>The authentication flow has completed.</p>") self.wfile.write("<body><p>The authentication flow has completed.</p>")
s.wfile.write("</body></html>") self.wfile.write("</body></html>")
def log_message(self, format, *args): def log_message(self, format, *args):
"""Do not log messages to stdout while running as command line program.""" """Do not log messages to stdout while running as command line program."""
pass
@util.positional(3) @util.positional(3)
@ -141,7 +133,7 @@ def run_flow(flow, storage, flags, http=None):
parser = argparse.ArgumentParser(description=__doc__, parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[tools.run_parser]) parents=[tools.argparser])
flags = parser.parse_args(argv) flags = parser.parse_args(argv)
Args: Args:
@ -163,7 +155,7 @@ def run_flow(flow, storage, flags, http=None):
try: try:
httpd = ClientRedirectServer((flags.auth_host_name, port), httpd = ClientRedirectServer((flags.auth_host_name, port),
ClientRedirectHandler) ClientRedirectHandler)
except socket.error, e: except socket.error as e:
pass pass
else: else:
success = True success = True
@ -186,7 +178,7 @@ def run_flow(flow, storage, flags, http=None):
authorize_url = flow.step1_get_authorize_url() authorize_url = flow.step1_get_authorize_url()
if flags.short_url: if flags.short_url:
from apiclient.discovery import build from googleapiclient.discovery import build
service = build('urlshortener', 'v1', http=http) service = build('urlshortener', 'v1', http=http)
url_result = service.url().insert(body={'longUrl': authorize_url}).execute() url_result = service.url().insert(body={'longUrl': authorize_url}).execute()
authorize_url = url_result['id'] authorize_url = url_result['id']
@ -199,7 +191,6 @@ def run_flow(flow, storage, flags, http=None):
print print
print 'If your browser is on a different machine then exit and re-run this' print 'If your browser is on a different machine then exit and re-run this'
print 'after creating a file called nobrowser.txt in the same path as GAM.' print 'after creating a file called nobrowser.txt in the same path as GAM.'
# print 'If your browser is on a different machine then exit and re-run this'
# print 'application with the command-line parameter ' # print 'application with the command-line parameter '
# print # print
# print ' --noauth_local_webserver' # print ' --noauth_local_webserver'
@ -225,7 +216,7 @@ def run_flow(flow, storage, flags, http=None):
try: try:
credential = flow.step2_exchange(code, http=http) credential = flow.step2_exchange(code, http=http)
except client.FlowExchangeError, e: except client.FlowExchangeError as e:
sys.exit('Authentication has failed: %s' % e) sys.exit('Authentication has failed: %s' % e)
storage.put(credential) storage.put(credential)
@ -241,8 +232,8 @@ def message_if_missing(filename):
return _CLIENT_SECRETS_MESSAGE % filename return _CLIENT_SECRETS_MESSAGE % filename
try: try:
from old_run import run from oauth2client.old_run import run
from old_run import FLAGS from oauth2client.old_run import FLAGS
except ImportError: except ImportError:
def run(*args, **kwargs): def run(*args, **kwargs):
raise NotImplementedError( raise NotImplementedError(

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# #
# Copyright 2010 Google Inc. # Copyright 2014 Google Inc. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -17,14 +17,16 @@
"""Common utility library.""" """Common utility library."""
__author__ = ['rafek@google.com (Rafe Kaplan)', __author__ = [
'guido@google.com (Guido van Rossum)', 'rafek@google.com (Rafe Kaplan)',
'guido@google.com (Guido van Rossum)',
] ]
__all__ = [ __all__ = [
'positional', 'positional',
'POSITIONAL_WARNING', 'POSITIONAL_WARNING',
'POSITIONAL_EXCEPTION', 'POSITIONAL_EXCEPTION',
'POSITIONAL_IGNORE', 'POSITIONAL_IGNORE',
] ]
import inspect import inspect
@ -33,11 +35,6 @@ import types
import urllib import urllib
import urlparse import urlparse
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
POSITIONAL_WARNING = 'WARNING' POSITIONAL_WARNING = 'WARNING'
@ -190,7 +187,7 @@ def _add_query_parameter(url, name, value):
return url return url
else: else:
parsed = list(urlparse.urlparse(url)) parsed = list(urlparse.urlparse(url))
q = dict(parse_qsl(parsed[4])) q = dict(urlparse.parse_qsl(parsed[4]))
q[name] = value q[name] = value
parsed[4] = urllib.urlencode(q) parsed[4] = urllib.urlencode(q)
return urlparse.urlunparse(parsed) return urlparse.urlunparse(parsed)

View File

@ -1,6 +1,6 @@
#!/usr/bin/python2.5 #!/usr/bin/python2.5
# #
# Copyright 2010 the Melange authors. # Copyright 2014 the Melange authors.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -17,14 +17,13 @@
"""Helper methods for creating & verifying XSRF tokens.""" """Helper methods for creating & verifying XSRF tokens."""
__authors__ = [ __authors__ = [
'"Doug Coker" <dcoker@google.com>', '"Doug Coker" <dcoker@google.com>',
'"Joe Gregorio" <jcgregorio@google.com>', '"Joe Gregorio" <jcgregorio@google.com>',
] ]
import base64 import base64
import hmac import hmac
import os # for urandom
import time import time
from oauth2client import util from oauth2client import util