Auto-format all files using yapf and pre-commit (#1173)

Automatic fixes produced by yapf formatting using `--style=google`, as
well as common pre-commit checks such as trailing whitespace removal,
double quote fixer, and newlines at the end of a file.
This commit is contained in:
ejochman 2020-04-28 13:59:47 -07:00 committed by GitHub
parent 216f2920b9
commit 0bd4eefeca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 18967 additions and 14714 deletions

View File

@ -9,10 +9,12 @@ repos:
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: double-quote-string-fixer
- id: check-yaml
- id: check-docstring-first
- id: name-tests-test
- id: requirements-txt-fixer
- id: check-merge-conflict
- repo: https://github.com/pre-commit/mirrors-yapf
rev: v0.29.0

View File

@ -7,5 +7,5 @@ import sys
from gam.__main__ import main
# Run from command line
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv)

File diff suppressed because it is too large Load Diff

View File

@ -29,6 +29,7 @@ from multiprocessing import set_start_method
from gam import controlflow
import gam
def main(argv):
freeze_support()
if sys.platform == 'darwin':
@ -37,11 +38,13 @@ def main(argv):
# command line arguments
set_start_method('fork')
if sys.version_info[0] < 3 or sys.version_info[1] < 6:
controlflow.system_error_exit(5,
f'GAM requires Python 3.6 or newer. You are running %s.%s.%s. Please upgrade your Python version or use one of the binary GAM downloads.' % sys.version_info[
:3])
controlflow.system_error_exit(
5,
f'GAM requires Python 3.6 or newer. You are running %s.%s.%s. Please upgrade your Python version or use one of the binary GAM downloads.'
% sys.version_info[:3])
sys.exit(gam.ProcessGAMCommand(sys.argv))
# Run from command line
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv)

View File

@ -115,8 +115,7 @@ class Credentials(google.oauth2.credentials.Credentials):
Raises:
TypeError: If id_token_data is not the required dict type.
"""
super(Credentials, self).__init__(
token=token,
super(Credentials, self).__init__(token=token,
refresh_token=refresh_token,
id_token=id_token,
token_uri=token_uri,
@ -176,14 +175,14 @@ class Credentials(google.oauth2.credentials.Credentials):
expiry = info.get('token_expiry')
if expiry:
# Convert the raw expiry to datetime
expiry = datetime.datetime.strptime(expiry, Credentials.DATETIME_FORMAT)
expiry = datetime.datetime.strptime(expiry,
Credentials.DATETIME_FORMAT)
id_token_data = info.get('decoded_id_token')
# Provide backwards compatibility with field names when loading from JSON.
# Some field names may be different, depending on when/how the credentials
# were pickled.
return cls(
token=info.get('token', info.get('auth_token', '')),
return cls(token=info.get('token', info.get('auth_token', '')),
refresh_token=info.get('refresh_token', ''),
id_token=info.get('id_token_jwt', info.get('id_token')),
token_uri=info.get('token_uri'),
@ -226,10 +225,12 @@ class Credentials(google.oauth2.credentials.Credentials):
InvalidCredentialsFileError: When the credentials file cannot be opened.
EmptyCredentialsFileError: When the provided file contains no credentials.
"""
file_content = fileutils.read_file(
filename, continue_on_error=True, display_errors=False)
file_content = fileutils.read_file(filename,
continue_on_error=True,
display_errors=False)
if file_content is None:
raise InvalidCredentialsFileError(f'File {filename} could not be opened')
raise InvalidCredentialsFileError(
f'File {filename} could not be opened')
info = json.loads(file_content)
if not info:
raise EmptyCredentialsFileError(
@ -279,14 +280,17 @@ class Credentials(google.oauth2.credentials.Credentials):
'installed': {
'client_id': client_id,
'client_secret': client_secret,
'redirect_uris': ['http://localhost', 'urn:ietf:wg:oauth:2.0:oob'],
'redirect_uris': [
'http://localhost', 'urn:ietf:wg:oauth:2.0:oob'
],
'auth_uri': 'https://accounts.google.com/o/oauth2/v2/auth',
'token_uri': 'https://oauth2.googleapis.com/token',
}
}
flow = _ShortURLFlow.from_client_config(
client_config, scopes, autogenerate_code_verifier=True)
flow = _ShortURLFlow.from_client_config(client_config,
scopes,
autogenerate_code_verifier=True)
flow_kwargs = {'access_type': access_type}
if login_hint:
flow_kwargs['login_hint'] = login_hint
@ -295,16 +299,17 @@ class Credentials(google.oauth2.credentials.Credentials):
# messaging about `nobrowser.txt` is co-located with the logic that uses it.
if use_console_flow:
flow.run_console(
authorization_prompt_message=MESSAGE_CONSOLE_AUTHORIZATION_PROMPT,
authorization_prompt_message=
MESSAGE_CONSOLE_AUTHORIZATION_PROMPT,
authorization_code_message=MESSAGE_CONSOLE_AUTHORIZATION_CODE,
**flow_kwargs)
else:
flow.run_local_server(
authorization_prompt_message=MESSAGE_LOCAL_SERVER_AUTHORIZATION_PROMPT,
flow.run_local_server(authorization_prompt_message=
MESSAGE_LOCAL_SERVER_AUTHORIZATION_PROMPT,
success_message=MESSAGE_LOCAL_SERVER_SUCCESS,
**flow_kwargs)
return cls.from_google_oauth2_credentials(
flow.credentials, filename=filename)
return cls.from_google_oauth2_credentials(flow.credentials,
filename=filename)
@classmethod
def from_client_secrets_file(cls,
@ -345,8 +350,9 @@ class Credentials(google.oauth2.credentials.Credentials):
Returns:
Credentials
"""
cs_data = fileutils.read_file(
client_secrets_file, continue_on_error=True, display_errors=False)
cs_data = fileutils.read_file(client_secrets_file,
continue_on_error=True,
display_errors=False)
if not cs_data:
raise InvalidClientSecretsFileError(
f'File {client_secrets_file} could not be opened')
@ -355,15 +361,15 @@ class Credentials(google.oauth2.credentials.Credentials):
client_id = cs_json['installed']['client_id']
# Chop off .apps.googleusercontent.com suffix as it's not needed
# and we need to keep things short for the Auth URL.
client_id = re.sub(r'\.apps\.googleusercontent\.com$', '', client_id)
client_id = re.sub(r'\.apps\.googleusercontent\.com$', '',
client_id)
client_secret = cs_json['installed']['client_secret']
except (ValueError, IndexError, KeyError):
raise InvalidClientSecretsFileFormatError(
f'Could not extract Client ID or Client Secret from file {client_secrets_file}'
)
return cls.from_client_secrets(
client_id,
return cls.from_client_secrets(client_id,
client_secret,
scopes,
access_type=access_type,
@ -380,7 +386,8 @@ class Credentials(google.oauth2.credentials.Credentials):
CredentialsError: If no id_token is present.
"""
if not self.id_token:
raise CredentialsError('Failed to fetch token data. No id_token present.')
raise CredentialsError(
'Failed to fetch token data. No id_token present.')
request = transport.create_request()
if self.expired:
@ -520,6 +527,7 @@ class _ShortURLFlow(google_auth_oauthlib.flow.InstalledAppFlow):
short_url = utils.shorten_url(long_url)
return short_url, state
class _FileLikeThreadLock(object):
"""A threading.lock which has the same interface as filelock.Filelock."""

View File

@ -73,7 +73,8 @@ class CredentialsTest(unittest.TestCase):
'client_secret':
self.fake_client_secret,
'token_expiry':
self.fake_token_expiry.strftime(oauth.Credentials.DATETIME_FORMAT),
self.fake_token_expiry.strftime(
oauth.Credentials.DATETIME_FORMAT),
'id_token_data':
self.fake_token_data,
}
@ -89,7 +90,8 @@ class CredentialsTest(unittest.TestCase):
def test_from_authorized_user_info_missing_required_info(self):
info_with_missing_fields = {'token': self.fake_token}
with self.assertRaises(ValueError):
oauth.Credentials.from_authorized_user_info(info_with_missing_fields)
oauth.Credentials.from_authorized_user_info(
info_with_missing_fields)
def test_from_authorized_user_info_no_expiry_in_info(self):
info_with_no_token_expiry = self.info_with_only_required_fields.copy()
@ -99,8 +101,7 @@ class CredentialsTest(unittest.TestCase):
self.assertIsNone(creds.expiry)
def test_init_saves_filename(self):
creds = oauth.Credentials(
token=self.fake_token,
creds = oauth.Credentials(token=self.fake_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
filename=self.fake_filename)
@ -108,21 +109,19 @@ class CredentialsTest(unittest.TestCase):
@patch.object(oauth.google.oauth2.id_token, 'verify_oauth2_token')
def test_init_loads_decoded_id_token_data(self, mock_verify_token):
creds = oauth.Credentials(
token=self.fake_token,
creds = oauth.Credentials(token=self.fake_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
id_token=self.fake_id_token,
id_token_data=self.fake_token_data)
self.assertEqual(
self.fake_token_data.get('field'), creds.get_token_value('field'))
self.assertEqual(self.fake_token_data.get('field'),
creds.get_token_value('field'))
# Verify the fetching method was not called, since the token
# data was supposed to be loaded from the passed in info.
self.assertEqual(mock_verify_token.call_count, 0)
def test_credentials_uses_file_lock_when_filename_provided(self):
creds = oauth.Credentials(
token=self.fake_token,
creds = oauth.Credentials(token=self.fake_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
filename=self.fake_filename)
@ -130,8 +129,7 @@ class CredentialsTest(unittest.TestCase):
self.assertEqual(creds._lock.lock_file, '%s.lock' % creds.filename)
def test_credentials_uses_thread_lock_when_filename_not_provided(self):
creds = oauth.Credentials(
token=self.fake_token,
creds = oauth.Credentials(token=self.fake_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
filename=None)
@ -201,8 +199,7 @@ class CredentialsTest(unittest.TestCase):
id_token=self.fake_id_token)
mock_flow.return_value.credentials = flow_creds
creds = oauth.Credentials.from_client_secrets(
self.fake_client_id,
creds = oauth.Credentials.from_client_secrets(self.fake_client_id,
self.fake_client_secret,
self.fake_scopes,
use_console_flow=True)
@ -224,8 +221,7 @@ class CredentialsTest(unittest.TestCase):
id_token=self.fake_id_token)
mock_flow.return_value.credentials = flow_creds
creds = oauth.Credentials.from_client_secrets(
self.fake_client_id,
creds = oauth.Credentials.from_client_secrets(self.fake_client_id,
self.fake_client_secret,
self.fake_scopes,
use_console_flow=False)
@ -247,8 +243,7 @@ class CredentialsTest(unittest.TestCase):
id_token=self.fake_id_token)
mock_flow.return_value.credentials = flow_creds
oauth.Credentials.from_client_secrets(
self.fake_client_id,
oauth.Credentials.from_client_secrets(self.fake_client_id,
self.fake_client_secret,
self.fake_scopes,
login_hint='someone@domain.com')
@ -257,7 +252,8 @@ class CredentialsTest(unittest.TestCase):
self.assertEqual('someone@domain.com', run_flow_args.get('login_hint'))
def test_from_client_secrets_uses_shortened_url_flow(self):
with patch.object(oauth._ShortURLFlow, 'from_client_config') as mock_flow:
with patch.object(oauth._ShortURLFlow,
'from_client_config') as mock_flow:
flow_creds = google.oauth2.credentials.Credentials(
token=self.fake_token,
refresh_token=self.fake_refresh_token,
@ -287,7 +283,8 @@ class CredentialsTest(unittest.TestCase):
filename=self.fake_filename)
self.assertEqual(os.path.abspath(self.fake_filename), creds.filename)
def test_from_client_secrets_file_corrupt_or_missing_file_raises_error(self):
def test_from_client_secrets_file_corrupt_or_missing_file_raises_error(
self):
self.assertFalse(os.path.exists(self.fake_filename))
with self.assertRaises(oauth.InvalidClientSecretsFileError):
oauth.Credentials.from_client_secrets_file(self.fake_filename,
@ -309,8 +306,10 @@ class CredentialsTest(unittest.TestCase):
self, mock_read_file, mock_creds_from_client_secrets):
mock_read_file.return_value = json.dumps({
'installed': {
'client_id': self.fake_client_id + '.apps.googleusercontent.com',
'client_secret': self.fake_client_secret,
'client_id':
self.fake_client_id + '.apps.googleusercontent.com',
'client_secret':
self.fake_client_secret,
}
})
@ -321,28 +320,28 @@ class CredentialsTest(unittest.TestCase):
def test_get_token_value_known_token_field(self):
token_data = {'known-field': 'known-value'}
creds = oauth.Credentials(
token=self.fake_token,
creds = oauth.Credentials(token=self.fake_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
id_token_data=token_data)
self.assertEqual('known-value', creds.get_token_value('known-field'))
def test_get_token_value_unknown_field_returns_unknown(self):
creds = oauth.Credentials(
token=self.fake_token,
creds = oauth.Credentials(token=self.fake_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
id_token_data=self.fake_token_data)
self.assertEqual('Unknown', creds.get_token_value('unknown-field'))
@patch.object(oauth.google.oauth2.id_token, 'verify_oauth2_token')
def test_get_token_value_credentials_expired(self, mock_verify_oauth2_token):
mock_verify_oauth2_token.return_value = {'fetched-field': 'fetched-value'}
def test_get_token_value_credentials_expired(self,
mock_verify_oauth2_token):
mock_verify_oauth2_token.return_value = {
'fetched-field': 'fetched-value'
}
time_earlier_than_now = datetime.datetime.now() - datetime.timedelta(
minutes=5)
creds = oauth.Credentials(
token=self.fake_token,
creds = oauth.Credentials(token=self.fake_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
expiry=time_earlier_than_now,
@ -357,8 +356,7 @@ class CredentialsTest(unittest.TestCase):
self.assertTrue(creds.refresh.called)
def test_to_json_contains_all_required_fields(self):
creds = oauth.Credentials(
token=self.fake_token,
creds = oauth.Credentials(token=self.fake_token,
refresh_token=self.fake_refresh_token,
id_token=self.fake_id_token,
id_token_data=self.fake_token_data,
@ -419,8 +417,7 @@ class CredentialsTest(unittest.TestCase):
@patch.object(oauth.google.oauth2.credentials.Credentials, 'refresh')
def test_refresh_calls_super_refresh(self, mock_super_refresh):
creds = oauth.Credentials(
token=None,
creds = oauth.Credentials(token=None,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret)
@ -431,8 +428,7 @@ class CredentialsTest(unittest.TestCase):
self.assertEqual(request, mock_super_refresh.call_args[0][0])
def test_refresh_locks_resource_during_refresh(self):
creds = oauth.Credentials(
token=None,
creds = oauth.Credentials(token=None,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret)
@ -458,8 +454,7 @@ class CredentialsTest(unittest.TestCase):
@patch.object(oauth.fileutils, 'write_file')
def test_refresh_writes_new_credentials_to_disk_after_refresh(
self, mock_write_file, mock_super_refresh):
creds = oauth.Credentials(
token=None,
creds = oauth.Credentials(token=None,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
@ -475,14 +470,14 @@ class CredentialsTest(unittest.TestCase):
self.assertEqual('refreshed_access_token', creds.token,
'Access token was not refreshed')
text_written_to_file = mock_write_file.call_args[0][1]
self.assertIsNotNone(text_written_to_file, 'Nothing was written to file')
self.assertIsNotNone(text_written_to_file,
'Nothing was written to file')
saved_json = json.loads(text_written_to_file)
self.assertEqual('refreshed_access_token', saved_json['token'],
'Refreshed access token was not saved to disk')
def test_write_writes_credentials_to_disk(self):
creds = oauth.Credentials(
token=None,
creds = oauth.Credentials(token=None,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
@ -493,8 +488,7 @@ class CredentialsTest(unittest.TestCase):
self.assertTrue(os.path.exists(self.fake_filename))
def test_write_raises_error_when_no_credentials_file_is_set(self):
creds = oauth.Credentials(
token=None,
creds = oauth.Credentials(token=None,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret)
@ -507,8 +501,7 @@ class CredentialsTest(unittest.TestCase):
@patch.object(oauth.fileutils, 'write_file')
def test_write_locks_resource_during_write(self, mock_write_file,
unused_mock_super_refresh):
creds = oauth.Credentials(
token=None,
creds = oauth.Credentials(token=None,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
@ -527,8 +520,7 @@ class CredentialsTest(unittest.TestCase):
def test_delete_removes_credentials_file(self):
self.assertFalse(os.path.exists(self.fake_filename))
creds = oauth.Credentials(
token=None,
creds = oauth.Credentials(token=None,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
@ -543,8 +535,7 @@ class CredentialsTest(unittest.TestCase):
reason=('On Windows, Filelock deletes the lock file each time the lock '
'is released. Delete does not remove it.'))
def test_delete_removes_lock_file(self):
creds = oauth.Credentials(
token=None,
creds = oauth.Credentials(token=None,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret,
@ -556,8 +547,7 @@ class CredentialsTest(unittest.TestCase):
self.assertFalse(os.path.exists(lock_file))
def test_delete_is_noop_when_not_using_filelock(self):
creds = oauth.Credentials(
token=None,
creds = oauth.Credentials(token=None,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret)
@ -565,8 +555,7 @@ class CredentialsTest(unittest.TestCase):
creds.delete() # This should not raise an exception.
def test_revoke_requests_credential_revoke(self):
creds = oauth.Credentials(
token=self.fake_token,
creds = oauth.Credentials(token=self.fake_token,
refresh_token=self.fake_refresh_token,
client_id=self.fake_client_id,
client_secret=self.fake_client_secret)
@ -594,7 +583,9 @@ class ShortUrlFlowTest(unittest.TestCase):
'installed': {
'client_id': self.fake_client_id,
'client_secret': self.fake_client_secret,
'redirect_uris': ['http://localhost', 'urn:ietf:wg:oauth:2.0:oob'],
'redirect_uris': [
'http://localhost', 'urn:ietf:wg:oauth:2.0:oob'
],
'auth_uri': 'https://accounts.google.com/o/oauth2/v2/auth',
'token_uri': 'https://oauth2.googleapis.com/token',
}
@ -693,7 +684,8 @@ class ShortUrlFlowTest(unittest.TestCase):
mock_http = MagicMock()
mock_response = MagicMock()
mock_response.status = 200
content = json.dumps({}) # This json content contains no "short-url" key
content = json.dumps(
{}) # This json content contains no "short-url" key
mock_http.request.return_value = (mock_response, content)
url, state = url_flow.authorization_url(http=mock_http)

View File

@ -18,7 +18,8 @@ class ControlFlowTest(unittest.TestCase):
self.assertEqual(context_manager.exception.code, 100)
@patch.object(controlflow.display, 'print_error')
def test_system_error_exit_prints_error_before_exiting(self, mock_print_err):
def test_system_error_exit_prints_error_before_exiting(
self, mock_print_err):
with self.assertRaises(SystemExit):
controlflow.system_error_exit(100, 'exit message')
self.assertIn('exit message', mock_print_err.call_args[0][0])
@ -93,14 +94,15 @@ class ControlFlowTest(unittest.TestCase):
self.assertIn(message, mock_stderr_write.call_args[0][0])
@patch.object(controlflow.time, 'sleep')
def test_wait_on_failure_only_prints_after_threshold(self, unused_mock_sleep):
def test_wait_on_failure_only_prints_after_threshold(
self, unused_mock_sleep):
total_attempts = 5
threshold = 3
with patch.object(controlflow.sys.stderr, 'write') as mock_stderr_write:
for attempt in range(1, total_attempts + 1):
controlflow.wait_on_failure(
attempt,
controlflow.wait_on_failure(attempt,
total_attempts,
'Attempt #%s' % attempt,
error_print_threshold=threshold)
self.assertEqual(total_attempts - threshold, mock_stderr_write.call_count)
self.assertEqual(total_attempts - threshold,
mock_stderr_write.call_count)

View File

@ -19,8 +19,11 @@ from gam import gapi
def current_count(i, count):
return f' ({i}/{count})' if (count > GC_Values[GC_SHOW_COUNTS_MIN]) else ''
def current_count_nl(i, count):
return f' ({i}/{count})\n' if (count > GC_Values[GC_SHOW_COUNTS_MIN]) else '\n'
return f' ({i}/{count})\n' if (
count > GC_Values[GC_SHOW_COUNTS_MIN]) else '\n'
def add_field_to_fields_list(fieldName, fieldsChoiceMap, fieldsList):
fields = fieldsChoiceMap[fieldName.lower()]
@ -29,18 +32,21 @@ def add_field_to_fields_list(fieldName, fieldsChoiceMap, fieldsList):
else:
fieldsList.append(fields)
# Write a CSV file
def add_titles_to_csv_file(addTitles, titles):
for title in addTitles:
if title not in titles:
titles.append(title)
def add_row_titles_to_csv_file(row, csvRows, titles):
csvRows.append(row)
for title in row:
if title not in titles:
titles.append(title)
# fieldName is command line argument
# fieldNameMap maps fieldName to API field names; CSV file header will be API field name
#ARGUMENT_TO_PROPERTY_MAP = {
@ -49,13 +55,15 @@ def add_row_titles_to_csv_file(row, csvRows, titles):
# }
# fieldsList is the list of API fields
# fieldsTitles maps the API field name to the CSV file header
def add_field_to_csv_file(fieldName, fieldNameMap, fieldsList, fieldsTitles, titles):
def add_field_to_csv_file(fieldName, fieldNameMap, fieldsList, fieldsTitles,
titles):
for ftList in fieldNameMap[fieldName]:
if ftList not in fieldsTitles:
fieldsList.append(ftList)
fieldsTitles[ftList] = ftList
add_titles_to_csv_file([ftList], titles)
# fieldName is command line argument
# fieldNameTitleMap maps fieldName to API field name and CSV file header
#ARGUMENT_TO_PROPERTY_TITLE_MAP = {
@ -64,13 +72,15 @@ def add_field_to_csv_file(fieldName, fieldNameMap, fieldsList, fieldsTitles, tit
# }
# fieldsList is the list of API fields
# fieldsTitles maps the API field name to the CSV file header
def add_field_title_to_csv_file(fieldName, fieldNameTitleMap, fieldsList, fieldsTitles, titles):
def add_field_title_to_csv_file(fieldName, fieldNameTitleMap, fieldsList,
fieldsTitles, titles):
ftList = fieldNameTitleMap[fieldName]
for i in range(0, len(ftList), 2):
if ftList[i] not in fieldsTitles:
fieldsList.append(ftList[i])
fieldsTitles[ftList[i]] = ftList[i+1]
add_titles_to_csv_file([ftList[i+1]], titles)
fieldsTitles[ftList[i]] = ftList[i + 1]
add_titles_to_csv_file([ftList[i + 1]], titles)
def sort_csv_titles(firstTitle, titles):
restoreTitles = []
@ -82,17 +92,24 @@ def sort_csv_titles(firstTitle, titles):
for title in restoreTitles[::-1]:
titles.insert(0, title)
def QuotedArgumentList(items):
return ' '.join([item if item and (item.find(' ') == -1) and (item.find(',') == -1) else '"'+item+'"' for item in items])
return ' '.join([
item if item and (item.find(' ') == -1) and
(item.find(',') == -1) else '"' + item + '"' for item in items
])
def write_csv_file(csvRows, titles, list_type, todrive):
def rowDateTimeFilterMatch(dateMode, rowDate, op, filterDate):
if not rowDate or not isinstance(rowDate, str):
return False
try:
rowTime = dateutil.parser.parse(rowDate, ignoretz=True)
if dateMode:
rowDate = datetime.datetime(rowTime.year, rowTime.month, rowTime.day).isoformat()+'Z'
rowDate = datetime.datetime(rowTime.year, rowTime.month,
rowTime.day).isoformat() + 'Z'
except ValueError:
rowDate = NEVER_TIME
if op == '<':
@ -125,6 +142,7 @@ def write_csv_file(csvRows, titles, list_type, todrive):
if op == '!=':
return rowCount != filterCount
return rowCount == filterCount
def rowBooleanFilterMatch(rowBoolean, filterBoolean):
if not isinstance(rowBoolean, bool):
return False
@ -139,32 +157,63 @@ def write_csv_file(csvRows, titles, list_type, todrive):
if GC_Values[GC_CSV_ROW_FILTER]:
for column, filterVal in iter(GC_Values[GC_CSV_ROW_FILTER].items()):
if column not in titles:
sys.stderr.write(f'WARNING: Row filter column "{column}" is not in output columns\n')
sys.stderr.write(
f'WARNING: Row filter column "{column}" is not in output columns\n'
)
continue
if filterVal[0] == 'regex':
csvRows = [row for row in csvRows if filterVal[1].search(str(row.get(column, '')))]
csvRows = [
row for row in csvRows
if filterVal[1].search(str(row.get(column, '')))
]
elif filterVal[0] == 'notregex':
csvRows = [row for row in csvRows if not filterVal[1].search(str(row.get(column, '')))]
csvRows = [
row for row in csvRows
if not filterVal[1].search(str(row.get(column, '')))
]
elif filterVal[0] in ['date', 'time']:
csvRows = [row for row in csvRows if rowDateTimeFilterMatch(filterVal[0] == 'date', row.get(column, ''), filterVal[1], filterVal[2])]
csvRows = [
row for row in csvRows if rowDateTimeFilterMatch(
filterVal[0] == 'date', row.get(column, ''),
filterVal[1], filterVal[2])
]
elif filterVal[0] == 'count':
csvRows = [row for row in csvRows if rowCountFilterMatch(row.get(column, 0), filterVal[1], filterVal[2])]
csvRows = [
row for row in csvRows if rowCountFilterMatch(
row.get(column, 0), filterVal[1], filterVal[2])
]
else: #boolean
csvRows = [row for row in csvRows if rowBooleanFilterMatch(row.get(column, False), filterVal[1])]
csvRows = [
row for row in csvRows if rowBooleanFilterMatch(
row.get(column, False), filterVal[1])
]
if GC_Values[GC_CSV_HEADER_FILTER] or GC_Values[GC_CSV_HEADER_DROP_FILTER]:
if GC_Values[GC_CSV_HEADER_DROP_FILTER]:
titles = [t for t in titles if not headerFilterMatch(GC_Values[GC_CSV_HEADER_DROP_FILTER], t)]
titles = [
t for t in titles if
not headerFilterMatch(GC_Values[GC_CSV_HEADER_DROP_FILTER], t)
]
if GC_Values[GC_CSV_HEADER_FILTER]:
titles = [t for t in titles if headerFilterMatch(GC_Values[GC_CSV_HEADER_FILTER], t)]
titles = [
t for t in titles
if headerFilterMatch(GC_Values[GC_CSV_HEADER_FILTER], t)
]
if not titles:
controlflow.system_error_exit(3, 'No columns selected with GAM_CSV_HEADER_FILTER and GAM_CSV_HEADER_DROP_FILTER\n')
controlflow.system_error_exit(
3,
'No columns selected with GAM_CSV_HEADER_FILTER and GAM_CSV_HEADER_DROP_FILTER\n'
)
return
csv.register_dialect('nixstdout', lineterminator='\n')
if todrive:
write_to = io.StringIO()
else:
write_to = sys.stdout
writer = csv.DictWriter(write_to, fieldnames=titles, dialect='nixstdout', extrasaction='ignore', quoting=csv.QUOTE_MINIMAL)
writer = csv.DictWriter(write_to,
fieldnames=titles,
dialect='nixstdout',
extrasaction='ignore',
quoting=csv.QUOTE_MINIMAL)
try:
writer.writerow(dict((item, item) for item in writer.fieldnames))
writer.writerows(csvRows)
@ -185,16 +234,23 @@ and follow recommend steps to authorize GAM for Drive access.''')
data_size = len(write_to.getvalue())
max_sheet_bytes = int(result['maxImportSizes'][MIMETYPE_GA_SPREADSHEET])
if cell_count > MAX_GOOGLE_SHEET_CELLS or data_size > max_sheet_bytes:
print(f'{WARNING_PREFIX}{MESSAGE_RESULTS_TOO_LARGE_FOR_GOOGLE_SPREADSHEET}')
print(
f'{WARNING_PREFIX}{MESSAGE_RESULTS_TOO_LARGE_FOR_GOOGLE_SPREADSHEET}'
)
mimeType = 'text/csv'
else:
mimeType = MIMETYPE_GA_SPREADSHEET
body = {'description': QuotedArgumentList(sys.argv),
body = {
'description': QuotedArgumentList(sys.argv),
'name': f'{GC_Values[GC_DOMAIN]} - {list_type}',
'mimeType': mimeType}
result = gapi.call(drive.files(), 'create', fields='webViewLink',
'mimeType': mimeType
}
result = gapi.call(drive.files(),
'create',
fields='webViewLink',
body=body,
media_body=googleapiclient.http.MediaInMemoryUpload(write_to.getvalue().encode(),
media_body=googleapiclient.http.MediaInMemoryUpload(
write_to.getvalue().encode(),
mimetype='text/csv'))
file_url = result['webViewLink']
if GC_Values[GC_NO_BROWSER]:
@ -205,6 +261,7 @@ and follow recommend steps to authorize GAM for Drive access.''')
else:
webbrowser.open(file_url)
def print_error(message):
"""Prints a one-line error message to stderr in a standard format."""
sys.stderr.write('\n{0}{1}\n'.format(ERROR_PREFIX, message))
@ -214,10 +271,12 @@ def print_warning(message):
"""Prints a one-line warning message to stderr in a standard format."""
sys.stderr.write('\n{0}{1}\n'.format(WARNING_PREFIX, message))
def print_json(object_value, spacing=''):
"""Prints Dict or Array to screen in clean human-readable format.."""
if isinstance(object_value, list):
if len(object_value) == 1 and isinstance(object_value[0], (str, int, bool)):
if len(object_value) == 1 and isinstance(object_value[0],
(str, int, bool)):
sys.stdout.write(f'{object_value[0]}\n')
return
if spacing:

View File

@ -21,8 +21,10 @@ def _open_file(filename, mode, encoding=None, newline=None):
elif 'r' in mode and encoding.lower().replace('-', '') == 'utf8':
encoding = UTF8_SIG
return open(
os.path.expanduser(filename), mode, newline=newline, encoding=encoding)
return open(os.path.expanduser(filename),
mode,
newline=newline,
encoding=encoding)
def open_file(filename,
@ -67,7 +69,8 @@ def open_file(filename,
utf8_bom_bytes = utf_bom.encode('UTF-8')
iso_8859_1_bom = utf8_bom_bytes.decode('iso-8859-1').encode(
'iso-8859-1')
has_bom = f.read(3).encode('iso-8859-1', 'replace') == iso_8859_1_bom
has_bom = f.read(3).encode('iso-8859-1',
'replace') == iso_8859_1_bom
else:
has_bom = f.read(1) == utf_bom
@ -133,7 +136,8 @@ def read_file(filename,
# Read from stdin, rather than a file.
return str(sys.stdin.read())
with _open_file(filename, mode, newline=newline, encoding=encoding) as f:
with _open_file(filename, mode, newline=newline,
encoding=encoding) as f:
return f.read()
except IOError as e:

View File

@ -93,7 +93,9 @@ class FileutilsTest(unittest.TestCase):
fake_file = io.BytesIO(bom_prefixed_data)
mock_open = MagicMock(spec=open, return_value=fake_file)
with patch.object(fileutils, 'open', mock_open):
f = fileutils.open_file(self.fake_path, mode='rb', strip_utf_bom=True)
f = fileutils.open_file(self.fake_path,
mode='rb',
strip_utf_bom=True)
self.assertEqual(b'foobar', f.read())
def test_open_file_strip_utf_bom_when_no_bom_in_data(self):
@ -145,17 +147,20 @@ class FileutilsTest(unittest.TestCase):
def test_read_file_continues_on_errors_without_displaying(
self, mock_open_file, mock_print_warning):
mock_open_file.side_effect = IOError()
contents = fileutils.read_file(
self.fake_path, continue_on_error=True, display_errors=False)
contents = fileutils.read_file(self.fake_path,
continue_on_error=True,
display_errors=False)
self.assertIsNone(contents)
self.assertFalse(mock_print_warning.called)
@patch.object(fileutils.display, 'print_warning')
@patch.object(fileutils, '_open_file')
def test_read_file_displays_errors(self, mock_open_file, mock_print_warning):
def test_read_file_displays_errors(self, mock_open_file,
mock_print_warning):
mock_open_file.side_effect = IOError()
fileutils.read_file(
self.fake_path, continue_on_error=True, display_errors=True)
fileutils.read_file(self.fake_path,
continue_on_error=True,
display_errors=True)
self.assertTrue(mock_print_warning.called)
@patch.object(fileutils, '_open_file')
@ -175,16 +180,18 @@ class FileutilsTest(unittest.TestCase):
@patch.object(fileutils, '_open_file')
def test_read_file_exits_code_2_on_unicodeerror(self, mock_open_file):
mock_open_file.return_value.__enter__().read.side_effect = UnicodeError()
mock_open_file.return_value.__enter__().read.side_effect = UnicodeError(
)
with self.assertRaises(SystemExit) as context:
fileutils.read_file(self.fake_path)
self.assertEqual(context.exception.code, 2)
@patch.object(fileutils, '_open_file')
def test_read_file_exits_code_2_on_unicodedecodeerror(self, mock_open_file):
fake_decode_error = UnicodeDecodeError('fake-encoding', b'fakebytes', 0, 1,
'testing only')
mock_open_file.return_value.__enter__().read.side_effect = fake_decode_error
fake_decode_error = UnicodeDecodeError('fake-encoding', b'fakebytes', 0,
1, 'testing only')
mock_open_file.return_value.__enter__(
).read.side_effect = fake_decode_error
with self.assertRaises(SystemExit) as context:
fileutils.read_file(self.fake_path)
self.assertEqual(context.exception.code, 2)
@ -205,8 +212,7 @@ class FileutilsTest(unittest.TestCase):
def test_write_file_continues_on_errors_without_displaying(
self, mock_open_file, mock_print_error):
mock_open_file.side_effect = IOError()
status = fileutils.write_file(
self.fake_path,
status = fileutils.write_file(self.fake_path,
'foo data',
continue_on_error=True,
display_errors=False)
@ -217,8 +223,10 @@ class FileutilsTest(unittest.TestCase):
@patch.object(fileutils, '_open_file')
def test_write_file_displays_errors(self, mock_open_file, mock_print_error):
mock_open_file.side_effect = IOError()
fileutils.write_file(
self.fake_path, 'foo data', continue_on_error=True, display_errors=True)
fileutils.write_file(self.fake_path,
'foo data',
continue_on_error=True,
display_errors=True)
self.assertTrue(mock_print_error.called)
@patch.object(fileutils, '_open_file')
@ -226,7 +234,9 @@ class FileutilsTest(unittest.TestCase):
self, mock_open_file):
mock_open_file.side_effect = IOError()
with self.assertRaises(SystemExit) as context:
fileutils.write_file(self.fake_path, 'foo data', continue_on_error=False)
fileutils.write_file(self.fake_path,
'foo data',
continue_on_error=False)
self.assertEqual(context.exception.code, 6)

View File

@ -68,18 +68,24 @@ def call(service,
if http_status == 0:
return None
is_known_error_reason = reason in [r.value for r in errors.ErrorReason]
if is_known_error_reason and errors.ErrorReason(reason) in throw_reasons:
if errors.ErrorReason(reason) in errors.ERROR_REASON_TO_EXCEPTION:
raise errors.ERROR_REASON_TO_EXCEPTION[errors.ErrorReason(reason)](
message)
is_known_error_reason = reason in [
r.value for r in errors.ErrorReason
]
if is_known_error_reason and errors.ErrorReason(
reason) in throw_reasons:
if errors.ErrorReason(
reason) in errors.ERROR_REASON_TO_EXCEPTION:
raise errors.ERROR_REASON_TO_EXCEPTION[errors.ErrorReason(
reason)](message)
raise e
if (n != retries) and (is_known_error_reason and errors.ErrorReason(
reason) in errors.DEFAULT_RETRY_REASONS + retry_reasons):
controlflow.wait_on_failure(n, retries, reason)
continue
if soft_errors:
display.print_error(f'{http_status}: {message} - {reason}{["", ": Giving up."][n > 1]}')
display.print_error(
f'{http_status}: {message} - {reason}{["", ": Giving up."][n > 1]}'
)
return None
controlflow.system_error_exit(
int(http_status), f'{http_status}: {message} - {reason}')
@ -89,10 +95,12 @@ def call(service,
errors.ErrorReason.SERVICE_NOT_AVAILABLE in throw_reasons)
if errors.ErrorReason.SERVICE_NOT_AVAILABLE in throw_reasons:
raise errors.GapiServiceNotAvailableError(str(e))
display.print_error(f'User {GM_Globals[GM_CURRENT_API_USER]}: {str(e)}')
display.print_error(
f'User {GM_Globals[GM_CURRENT_API_USER]}: {str(e)}')
return None
except ValueError as e:
if hasattr(service._http, 'cache') and service._http.cache is not None:
if hasattr(service._http,
'cache') and service._http.cache is not None:
service._http.cache = None
continue
controlflow.system_error_exit(4, str(e))
@ -130,8 +138,7 @@ def get_items(service,
Returns:
The list of items in the first page of a response.
"""
results = call(
service,
results = call(service,
function,
throw_reasons=throw_reasons,
retry_reasons=retry_reasons,
@ -159,7 +166,8 @@ def _get_max_page_size_for_api_call(service, function, **kwargs):
for a_method in resource.get('methods', {}).values():
if a_method.get('id') == api_id:
if not a_method.get('parameters') or a_method['parameters'].get(
'pageSize') or not a_method['parameters'].get('maxResults'):
'pageSize'
) or not a_method['parameters'].get('maxResults'):
# Make sure API call supports maxResults. For now we don't care to
# set pageSize since all known pageSize API calls have
# default pageSize == max pageSize.
@ -176,6 +184,7 @@ TOTAL_ITEMS_MARKER = '%%total_items%%'
FIRST_ITEM_MARKER = '%%first_item%%'
LAST_ITEM_MARKER = '%%last_item%%'
def got_total_items_msg(items, eol):
"""Format a page_message to be used by get_all_pages
@ -192,6 +201,7 @@ def got_total_items_msg(items, eol):
return f'Got {TOTAL_ITEMS_MARKER} {items}{eol}'
def got_total_items_first_last_msg(items):
"""Format a page_message to be used by get_all_pages
@ -205,7 +215,8 @@ def got_total_items_first_last_msg(items):
The formatted page_message
"""
return f'Got {TOTAL_ITEMS_MARKER} {items}: {FIRST_ITEM_MARKER} - {LAST_ITEM_MARKER}'+'\n'
return f'Got {TOTAL_ITEMS_MARKER} {items}: {FIRST_ITEM_MARKER} - {LAST_ITEM_MARKER}' + '\n'
def get_all_pages(service,
function,
@ -258,8 +269,7 @@ def get_all_pages(service,
page_token = None
total_items = 0
while True:
page = call(
service,
page = call(service,
function,
soft_errors=soft_errors,
throw_reasons=throw_reasons,
@ -278,12 +288,16 @@ def get_all_pages(service,
# Show a paging message to the user that indicates paging progress
if page_message:
show_message = page_message.replace(TOTAL_ITEMS_MARKER, str(total_items))
show_message = page_message.replace(TOTAL_ITEMS_MARKER,
str(total_items))
if message_attribute:
first_item = page_items[0] if num_page_items > 0 else {}
last_item = page_items[-1] if num_page_items > 1 else first_item
show_message = show_message.replace(FIRST_ITEM_MARKER, str(first_item.get(message_attribute, '')))
show_message = show_message.replace(LAST_ITEM_MARKER, str(last_item.get(message_attribute, '')))
show_message = show_message.replace(
FIRST_ITEM_MARKER,
str(first_item.get(message_attribute, '')))
show_message = show_message.replace(
LAST_ITEM_MARKER, str(last_item.get(message_attribute, '')))
sys.stderr.write('\r')
sys.stderr.flush()
sys.stderr.write(show_message)
@ -324,5 +338,6 @@ def handle_oauth_token_error(e, soft_errors):
GM_Globals[GM_CURRENT_API_USER]))
controlflow.system_error_exit(18, f'Authentication Token Error - {str(e)}')
def get_enum_values_minus_unspecified(values):
return [a_type for a_type in values if '_UNSPECIFIED' not in a_type]

View File

@ -86,8 +86,10 @@ class GapiTest(unittest.TestCase):
self.assertEqual(response, self.mock_method().execute.return_value)
def test_call_passes_target_method_params(self):
gapi.call(
self.mock_service, self.mock_method_name, my_param_1=1, my_param_2=2)
gapi.call(self.mock_service,
self.mock_method_name,
my_param_1=1,
my_param_2=2)
self.assertEqual(self.mock_method.call_count, 1)
method_kwargs = self.mock_method.call_args[1]
self.assertEqual(method_kwargs.get('my_param_1'), 1)
@ -104,11 +106,12 @@ class GapiTest(unittest.TestCase):
fake_http_error, fake_200_response
]
response = gapi.call(
self.mock_service, self.mock_method_name, soft_errors=True)
response = gapi.call(self.mock_service,
self.mock_method_name,
soft_errors=True)
self.assertEqual(response, fake_200_response)
self.assertEqual(
self.mock_service._http.credentials.refresh.call_count, 1)
self.assertEqual(self.mock_service._http.credentials.refresh.call_count,
1)
self.assertEqual(self.mock_method.return_value.execute.call_count, 2)
def test_call_throws_for_provided_reason(self):
@ -118,8 +121,7 @@ class GapiTest(unittest.TestCase):
gam_exception = errors.ERROR_REASON_TO_EXCEPTION[throw_reason]
with self.assertRaises(gam_exception):
gapi.call(
self.mock_service,
gapi.call(self.mock_service,
self.mock_method_name,
throw_reasons=[throw_reason])
@ -133,15 +135,17 @@ class GapiTest(unittest.TestCase):
default_throw_reason = errors.ErrorReason.BACKEND_ERROR
self.assertIn(default_throw_reason, errors.DEFAULT_RETRY_REASONS)
fake_http_error = create_http_error(404, default_throw_reason, 'message')
fake_http_error = create_http_error(404, default_throw_reason,
'message')
fake_200_response = MagicMock()
# Fail once, then succeed on retry
self.mock_method.return_value.execute.side_effect = [
fake_http_error, fake_200_response
]
response = gapi.call(
self.mock_service, self.mock_method_name, retry_reasons=[])
response = gapi.call(self.mock_service,
self.mock_method_name,
retry_reasons=[])
self.assertEqual(response, fake_200_response)
self.assertEqual(self.mock_method.return_value.execute.call_count, 2)
# Make sure a backoff technique was used for retry.
@ -165,14 +169,14 @@ class GapiTest(unittest.TestCase):
'This error should not cause the request to be retried')
# Fail once, then succeed on retry
self.mock_method.return_value.execute.side_effect = [
fake_retrieable_error1, fake_retrieable_error2, fake_non_retriable_error
fake_retrieable_error1, fake_retrieable_error2,
fake_non_retriable_error
]
with self.assertRaises(SystemExit):
# The third call should raise the SystemExit when non_retriable_error is
# raised.
gapi.call(
self.mock_service,
gapi.call(self.mock_service,
self.mock_method_name,
retry_reasons=[retry_reason1, retry_reason2])
@ -231,7 +235,8 @@ class GapiTest(unittest.TestCase):
response = gapi.call(self.mock_service, self.mock_method_name)
self.assertEqual(response, fake_200_response)
# HTTP cached connections should be cleared on receiving this error
self.assertNotEqual(http_connections, self.mock_service._http.connections)
self.assertNotEqual(http_connections,
self.mock_service._http.connections)
self.assertEqual(self.mock_method.return_value.execute.call_count, 2)
# Make sure a backoff technique was used for retry.
self.assertEqual(mock_wait_on_failure.call_count, 1)
@ -250,13 +255,16 @@ class GapiTest(unittest.TestCase):
field_name = 'things'
fake_response = {field_name: [{}, {}, {}]}
self.mock_method.return_value.execute.return_value = fake_response
page = gapi.get_items(
self.mock_service, self.mock_method_name, items=field_name)
page = gapi.get_items(self.mock_service,
self.mock_method_name,
items=field_name)
self.assertEqual(page, fake_response[field_name])
def test_get_items_passes_additional_kwargs_to_service(self):
gapi.get_items(
self.mock_service, self.mock_method_name, my_param_1=1, my_param_2=2)
gapi.get_items(self.mock_service,
self.mock_method_name,
my_param_1=1,
my_param_2=2)
self.assertEqual(self.mock_method.call_count, 1)
method_kwargs = self.mock_method.call_args[1]
self.assertEqual(1, method_kwargs.get('my_param_1'))
@ -273,18 +281,22 @@ class GapiTest(unittest.TestCase):
page_1 = {'items': ['1-1', '1-2', '1-3'], 'nextPageToken': '2'}
page_2 = {'items': ['2-1', '2-2', '2-3'], 'nextPageToken': '3'}
page_3 = {'items': ['3-1', '3-2', '3-3']}
self.mock_method.return_value.execute.side_effect = [page_1, page_2, page_3]
self.mock_method.return_value.execute.side_effect = [
page_1, page_2, page_3
]
response_items = gapi.get_all_pages(self.mock_service,
self.mock_method_name)
self.assertListEqual(response_items,
page_1['items'] + page_2['items'] + page_3['items'])
self.assertListEqual(
response_items, page_1['items'] + page_2['items'] + page_3['items'])
def test_get_all_pages_includes_next_pagetoken_in_request(self):
page_1 = {'items': ['1-1', '1-2', '1-3'], 'nextPageToken': 'someToken'}
page_2 = {'items': ['2-1', '2-2', '2-3']}
self.mock_method.return_value.execute.side_effect = [page_1, page_2]
gapi.get_all_pages(self.mock_service, self.mock_method_name, pageSize=100)
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
pageSize=100)
self.assertEqual(self.mock_method.call_count, 2)
call_2_kwargs = self.mock_method.call_args_list[1][1]
self.assertIn('pageToken', call_2_kwargs)
@ -321,8 +333,9 @@ class GapiTest(unittest.TestCase):
def test_get_all_pages_max_page_size_overrided(self):
self.mock_method.return_value.execute.return_value = self.empty_items_response
gapi.get_all_pages(
self.mock_service, self.mock_method_name, pageSize=123456)
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
pageSize=123456)
request_method_kwargs = self.mock_method.call_args[1]
self.assertIn('pageSize', request_method_kwargs)
self.assertEqual(123456, request_method_kwargs['pageSize'])
@ -332,8 +345,9 @@ class GapiTest(unittest.TestCase):
paging_message = 'A simple string displayed during paging'
with patch.object(gapi.sys.stderr, 'write') as mock_write:
gapi.get_all_pages(
self.mock_service, self.mock_method_name, page_message=paging_message)
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
page_message=paging_message)
messages_written = [
call_args[0][0] for call_args in mock_write.call_args_list
]
@ -344,8 +358,9 @@ class GapiTest(unittest.TestCase):
paging_message = 'A simple string displayed during paging'
with patch.object(gapi.sys.stderr, 'write') as mock_write:
gapi.get_all_pages(
self.mock_service, self.mock_method_name, page_message=paging_message)
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
page_message=paging_message)
messages_written = [
call_args[0][0] for call_args in mock_write.call_args_list
]
@ -365,8 +380,9 @@ class GapiTest(unittest.TestCase):
paging_message = 'A simple string displayed during paging'
with patch.object(gapi.sys.stderr, 'write') as mock_write:
gapi.get_all_pages(
self.mock_service, self.mock_method_name, page_message=paging_message)
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
page_message=paging_message)
messages_written = [
call_args[0][0] for call_args in mock_write.call_args_list
]
@ -381,8 +397,9 @@ class GapiTest(unittest.TestCase):
paging_message = 'Total number of items discovered: %%total_items%%'
with patch.object(gapi.sys.stderr, 'write') as mock_write:
gapi.get_all_pages(
self.mock_service, self.mock_method_name, page_message=paging_message)
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
page_message=paging_message)
messages_written = [
call_args[0][0] for call_args in mock_write.call_args_list
@ -413,8 +430,7 @@ class GapiTest(unittest.TestCase):
paging_message = 'First item in page: %%first_item%%'
with patch.object(gapi.sys.stderr, 'write') as mock_write:
gapi.get_all_pages(
self.mock_service,
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
page_message=paging_message,
message_attribute='position')
@ -441,8 +457,7 @@ class GapiTest(unittest.TestCase):
paging_message = 'Last item in page: %%last_item%%'
with patch.object(gapi.sys.stderr, 'write') as mock_write:
gapi.get_all_pages(
self.mock_service,
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
page_message=paging_message,
message_attribute='position')
@ -469,8 +484,10 @@ class GapiTest(unittest.TestCase):
def test_get_all_pages_passes_additional_kwargs_to_service_method(self):
self.mock_method.return_value.execute.return_value = self.empty_items_response
gapi.get_all_pages(
self.mock_service, self.mock_method_name, my_param_1=1, my_param_2=2)
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
my_param_1=1,
my_param_2=2)
method_kwargs = self.mock_method.call_args[1]
self.assertEqual(method_kwargs.get('my_param_1'), 1)
self.assertEqual(method_kwargs.get('my_param_2'), 2)
@ -480,8 +497,7 @@ class GapiTest(unittest.TestCase):
throw_for = MagicMock()
retry_for = MagicMock()
mock_call.return_value = self.empty_items_response
gapi.get_all_pages(
self.mock_service,
gapi.get_all_pages(self.mock_service,
self.mock_method_name,
throw_reasons=throw_for,
retry_reasons=retry_for)
@ -493,8 +509,9 @@ class GapiTest(unittest.TestCase):
field_name = 'things'
fake_response = {field_name: [{}, {}, {}]}
self.mock_method.return_value.execute.return_value = fake_response
page = gapi.get_all_pages(
self.mock_service, self.mock_method_name, items=field_name)
page = gapi.get_all_pages(self.mock_service,
self.mock_method_name,
items=field_name)
self.assertEqual(page, fake_response[field_name])

View File

@ -24,8 +24,7 @@ def normalizeCalendarId(calname, checkPrimary=False):
def buildCalendarGAPIObject(calname):
calendarId = normalizeCalendarId(calname)
return (calendarId, gam.buildGAPIServiceObject('calendar',
calendarId))
return (calendarId, gam.buildGAPIServiceObject('calendar', calendarId))
def buildCalendarDataGAPIObject(calname):
@ -41,6 +40,7 @@ def buildCalendarDataGAPIObject(calname):
_, cal = buildCalendarGAPIObject(gam._getValueFromOAuth('email'))
return (calendarId, cal)
def printShowACLs(csvFormat):
calendarId, cal = buildCalendarDataGAPIObject(sys.argv[2])
if not cal:
@ -54,10 +54,9 @@ def printShowACLs(csvFormat):
i += 1
else:
action = ['showacl', 'printacl'][csvFormat]
message = f"gam calendar <email> {action}"
message = f'gam calendar <email> {action}'
controlflow.invalid_argument_exit(sys.argv[i], message)
acls = gapi.get_all_pages(
cal.acl(), 'list', 'items', calendarId=calendarId)
acls = gapi.get_all_pages(cal.acl(), 'list', 'items', calendarId=calendarId)
i = 0
if csvFormat:
titles = []
@ -75,10 +74,11 @@ def printShowACLs(csvFormat):
else:
formatted_acl = formatACLRule(rule)
current_count = display.current_count(i, count)
print(f'Calendar: {calendarId}, ACL: {formatted_acl}{current_count}')
print(
f'Calendar: {calendarId}, ACL: {formatted_acl}{current_count}')
if csvFormat:
display.write_csv_file(
rows, titles, f'{calendarId} Calendar ACLs', toDrive)
display.write_csv_file(rows, titles, f'{calendarId} Calendar ACLs',
toDrive)
def _getCalendarACLScope(i, body):
@ -87,8 +87,8 @@ def _getCalendarACLScope(i, body):
body['scope']['type'] = myarg
i += 1
if myarg in ['user', 'group']:
body['scope']['value'] = gam.normalizeEmailAddressOrUID(
sys.argv[i], noUid=True)
body['scope']['value'] = gam.normalizeEmailAddressOrUID(sys.argv[i],
noUid=True)
i += 1
elif myarg == 'domain':
if i < len(sys.argv) and \
@ -99,8 +99,8 @@ def _getCalendarACLScope(i, body):
body['scope']['value'] = GC_Values[GC_DOMAIN]
elif myarg != 'default':
body['scope']['type'] = 'user'
body['scope']['value'] = gam.normalizeEmailAddressOrUID(
myarg, noUid=True)
body['scope']['value'] = gam.normalizeEmailAddressOrUID(myarg,
noUid=True)
return i
@ -122,22 +122,26 @@ def addACL(function):
return
myarg = sys.argv[4].lower().replace('_', '')
if myarg not in CALENDAR_ACL_ROLES_MAP:
controlflow.expected_argument_exit(
"Role", ", ".join(CALENDAR_ACL_ROLES_MAP), myarg)
controlflow.expected_argument_exit('Role',
', '.join(CALENDAR_ACL_ROLES_MAP),
myarg)
body = {'role': CALENDAR_ACL_ROLES_MAP[myarg]}
i = _getCalendarACLScope(5, body)
sendNotifications = True
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'sendnotifications':
sendNotifications = gam.getBoolean(sys.argv[i+1], myarg)
sendNotifications = gam.getBoolean(sys.argv[i + 1], myarg)
i += 2
else:
controlflow.invalid_argument_exit(
sys.argv[i], f"gam calendar <email> {function.lower()}")
sys.argv[i], f'gam calendar <email> {function.lower()}')
print(f'Calendar: {calendarId}, {function} ACL: {formatACLRule(body)}')
gapi.call(cal.acl(), 'insert', calendarId=calendarId,
body=body, sendNotifications=sendNotifications)
gapi.call(cal.acl(),
'insert',
calendarId=calendarId,
body=body,
sendNotifications=sendNotifications)
def delACL():
@ -152,8 +156,11 @@ def delACL():
body = {'role': 'none'}
_getCalendarACLScope(5, body)
print(f'Calendar: {calendarId}, Delete ACL: {formatACLScope(body)}')
gapi.call(cal.acl(), 'insert', calendarId=calendarId,
body=body, sendNotifications=False)
gapi.call(cal.acl(),
'insert',
calendarId=calendarId,
body=body,
sendNotifications=False)
def wipeData():
@ -176,7 +183,7 @@ def printEvents():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'query':
q = sys.argv[i+1]
q = sys.argv[i + 1]
i += 2
elif myarg == 'includedeleted':
showDeleted = True
@ -185,30 +192,34 @@ def printEvents():
showHiddenInvitations = True
i += 1
elif myarg == 'after':
timeMin = utils.get_time_or_delta_from_now(sys.argv[i+1])
timeMin = utils.get_time_or_delta_from_now(sys.argv[i + 1])
i += 2
elif myarg == 'before':
timeMax = utils.get_time_or_delta_from_now(sys.argv[i+1])
timeMax = utils.get_time_or_delta_from_now(sys.argv[i + 1])
i += 2
elif myarg == 'timezone':
timeZone = sys.argv[i+1]
timeZone = sys.argv[i + 1]
i += 2
elif myarg == 'updated':
updatedMin = utils.get_time_or_delta_from_now(sys.argv[i+1])
updatedMin = utils.get_time_or_delta_from_now(sys.argv[i + 1])
i += 2
elif myarg == 'todrive':
toDrive = True
i += 1
else:
controlflow.invalid_argument_exit(
sys.argv[i], "gam calendar <email> printevents")
sys.argv[i], 'gam calendar <email> printevents')
page_message = gapi.got_total_items_msg(f'Events for {calendarId}', '')
results = gapi.get_all_pages(cal.events(), 'list', 'items',
results = gapi.get_all_pages(cal.events(),
'list',
'items',
page_message=page_message,
calendarId=calendarId, q=q,
calendarId=calendarId,
q=q,
showDeleted=showDeleted,
showHiddenInvitations=showHiddenInvitations,
timeMin=timeMin, timeMax=timeMax,
timeMin=timeMin,
timeMax=timeMax,
timeZone=timeZone,
updatedMin=updatedMin)
for result in results:
@ -237,17 +248,19 @@ def getSendUpdates(myarg, i, cal):
sendUpdates = 'all'
i += 1
elif myarg == 'sendnotifications':
sendUpdates = 'all' if gam.getBoolean(sys.argv[i+1], myarg) else 'none'
sendUpdates = 'all' if gam.getBoolean(sys.argv[i +
1], myarg) else 'none'
i += 2
else: # 'sendupdates':
sendUpdatesMap = {}
for val in cal._rootDesc['resources']['events']['methods']['delete'][
'parameters']['sendUpdates']['enum']:
sendUpdatesMap[val.lower()] = val
sendUpdates = sendUpdatesMap.get(sys.argv[i+1].lower(), False)
sendUpdates = sendUpdatesMap.get(sys.argv[i + 1].lower(), False)
if not sendUpdates:
controlflow.expected_argument_exit(
"sendupdates", ", ".join(sendUpdatesMap), sys.argv[i+1])
controlflow.expected_argument_exit('sendupdates',
', '.join(sendUpdatesMap),
sys.argv[i + 1])
i += 2
return (sendUpdates, i)
@ -265,7 +278,7 @@ def moveOrDeleteEvent(moveOrDelete):
if myarg in ['notifyattendees', 'sendnotifications', 'sendupdates']:
sendUpdates, i = getSendUpdates(myarg, i, cal)
elif myarg in ['id', 'eventid']:
eventId = sys.argv[i+1]
eventId = sys.argv[i + 1]
i += 2
elif myarg in ['query', 'eventquery']:
controlflow.system_error_exit(
@ -276,15 +289,19 @@ def moveOrDeleteEvent(moveOrDelete):
doit = True
i += 1
elif moveOrDelete == 'move' and myarg == 'destination':
kwargs['destination'] = sys.argv[i+1]
kwargs['destination'] = sys.argv[i + 1]
i += 2
else:
controlflow.invalid_argument_exit(
sys.argv[i], f"gam calendar <email> {moveOrDelete}event")
sys.argv[i], f'gam calendar <email> {moveOrDelete}event')
if doit:
print(f' going to {moveOrDelete} eventId {eventId}')
gapi.call(cal.events(), moveOrDelete, calendarId=calendarId,
eventId=eventId, sendUpdates=sendUpdates, **kwargs)
gapi.call(cal.events(),
moveOrDelete,
calendarId=calendarId,
eventId=eventId,
sendUpdates=sendUpdates,
**kwargs)
else:
print(
f' would {moveOrDelete} eventId {eventId}. Add doit to command ' \
@ -296,8 +313,10 @@ def infoEvent():
if not cal:
return
eventId = sys.argv[4]
result = gapi.call(cal.events(), 'get',
calendarId=calendarId, eventId=eventId)
result = gapi.call(cal.events(),
'get',
calendarId=calendarId,
eventId=eventId)
display.print_json(result)
@ -316,25 +335,36 @@ def addOrUpdateEvent(action):
kwargs = {'eventId': eventId}
i = 5
func = 'patch'
requires_full_update = ['attendee', 'optionalattendee',
'removeattendee', 'replacedescription']
requires_full_update = [
'attendee', 'optionalattendee', 'removeattendee',
'replacedescription'
]
for arg in sys.argv[i:]:
if arg.replace('_', '').lower() in requires_full_update:
func = 'update'
body = gapi.call(cal.events(), 'get',
calendarId=calendarId, eventId=eventId)
body = gapi.call(cal.events(),
'get',
calendarId=calendarId,
eventId=eventId)
break
sendUpdates, body = getEventAttributes(i, calendarId, cal, body, action)
result = gapi.call(cal.events(), func, conferenceDataVersion=1,
supportsAttachments=True, calendarId=calendarId,
sendUpdates=sendUpdates, body=body, fields='id',
result = gapi.call(cal.events(),
func,
conferenceDataVersion=1,
supportsAttachments=True,
calendarId=calendarId,
sendUpdates=sendUpdates,
body=body,
fields='id',
**kwargs)
print(f'Event {result["id"]} {action} finished')
def _remove_attendee(attendees, remove_email):
return [attendee for attendee in attendees
if not attendee['email'].lower() == remove_email]
return [
attendee for attendee in attendees
if not attendee['email'].lower() == remove_email
]
def getEventAttributes(i, calendarId, cal, body, action):
@ -348,45 +378,48 @@ def getEventAttributes(i, calendarId, cal, body, action):
sendUpdates, i = getSendUpdates(myarg, i, cal)
elif myarg == 'attendee':
body.setdefault('attendees', [])
body['attendees'].append({'email': sys.argv[i+1]})
body['attendees'].append({'email': sys.argv[i + 1]})
i += 2
elif myarg == 'removeattendee' and action == 'update':
remove_email = sys.argv[i+1].lower()
remove_email = sys.argv[i + 1].lower()
if 'attendees' in body:
body['attendees'] = _remove_attendee(body['attendees'],
remove_email)
i += 2
elif myarg == 'optionalattendee':
body.setdefault('attendees', [])
body['attendees'].append(
{'email': sys.argv[i+1], 'optional': True})
body['attendees'].append({
'email': sys.argv[i + 1],
'optional': True
})
i += 2
elif myarg == 'anyonecanaddself':
body['anyoneCanAddSelf'] = True
i += 1
elif myarg == 'description':
body['description'] = sys.argv[i+1].replace('\\n', '\n')
body['description'] = sys.argv[i + 1].replace('\\n', '\n')
i += 2
elif myarg == 'replacedescription' and action == 'update':
search = sys.argv[i+1]
replace = sys.argv[i+2]
search = sys.argv[i + 1]
replace = sys.argv[i + 2]
if 'description' in body:
body['description'] = re.sub(search, replace, body['description'])
body['description'] = re.sub(search, replace,
body['description'])
i += 3
elif myarg == 'start':
if sys.argv[i+1].lower() == 'allday':
body['start'] = {'date': utils.get_yyyymmdd(sys.argv[i+2])}
if sys.argv[i + 1].lower() == 'allday':
body['start'] = {'date': utils.get_yyyymmdd(sys.argv[i + 2])}
i += 3
else:
start_time = utils.get_time_or_delta_from_now(sys.argv[i+1])
start_time = utils.get_time_or_delta_from_now(sys.argv[i + 1])
body['start'] = {'dateTime': start_time}
i += 2
elif myarg == 'end':
if sys.argv[i+1].lower() == 'allday':
body['end'] = {'date': utils.get_yyyymmdd(sys.argv[i+2])}
if sys.argv[i + 1].lower() == 'allday':
body['end'] = {'date': utils.get_yyyymmdd(sys.argv[i + 2])}
i += 3
else:
end_time = utils.get_time_or_delta_from_now(sys.argv[i+1])
end_time = utils.get_time_or_delta_from_now(sys.argv[i + 1])
body['end'] = {'dateTime': end_time}
i += 2
elif myarg == 'guestscantinviteothers':
@ -394,64 +427,66 @@ def getEventAttributes(i, calendarId, cal, body, action):
i += 1
elif myarg == 'guestscaninviteothers':
body['guestsCanInviteTohters'] = gam.getBoolean(
sys.argv[i+1], 'guestscaninviteothers')
sys.argv[i + 1], 'guestscaninviteothers')
i += 2
elif myarg == 'guestscantseeothers':
body['guestsCanSeeOtherGuests'] = False
i += 1
elif myarg == 'guestscanseeothers':
body['guestsCanSeeOtherGuests'] = gam.getBoolean(
sys.argv[i+1], 'guestscanseeothers')
sys.argv[i + 1], 'guestscanseeothers')
i += 2
elif myarg == 'guestscanmodify':
body['guestsCanModify'] = gam.getBoolean(
sys.argv[i+1], 'guestscanmodify')
body['guestsCanModify'] = gam.getBoolean(sys.argv[i + 1],
'guestscanmodify')
i += 2
elif myarg == 'id':
if action == 'update':
controlflow.invalid_argument_exit(
'id', 'gam calendar <calendar> updateevent')
body['id'] = sys.argv[i+1]
body['id'] = sys.argv[i + 1]
i += 2
elif myarg == 'summary':
body['summary'] = sys.argv[i+1]
body['summary'] = sys.argv[i + 1]
i += 2
elif myarg == 'location':
body['location'] = sys.argv[i+1]
body['location'] = sys.argv[i + 1]
i += 2
elif myarg == 'available':
body['transparency'] = 'transparent'
i += 1
elif myarg == 'transparency':
validTransparency = ['opaque', 'transparent']
if sys.argv[i+1].lower() in validTransparency:
body['transparency'] = sys.argv[i+1].lower()
if sys.argv[i + 1].lower() in validTransparency:
body['transparency'] = sys.argv[i + 1].lower()
else:
controlflow.expected_argument_exit(
'transparency',
", ".join(validTransparency), sys.argv[i+1])
controlflow.expected_argument_exit('transparency',
', '.join(validTransparency),
sys.argv[i + 1])
i += 2
elif myarg == 'visibility':
validVisibility = ['default', 'public', 'private']
if sys.argv[i+1].lower() in validVisibility:
body['visibility'] = sys.argv[i+1].lower()
if sys.argv[i + 1].lower() in validVisibility:
body['visibility'] = sys.argv[i + 1].lower()
else:
controlflow.expected_argument_exit(
"visibility", ", ".join(validVisibility), sys.argv[i+1])
controlflow.expected_argument_exit('visibility',
', '.join(validVisibility),
sys.argv[i + 1])
i += 2
elif myarg == 'tentative':
body['status'] = 'tentative'
i += 1
elif myarg == 'status':
validStatus = ['confirmed', 'tentative', 'cancelled']
if sys.argv[i+1].lower() in validStatus:
body['status'] = sys.argv[i+1].lower()
if sys.argv[i + 1].lower() in validStatus:
body['status'] = sys.argv[i + 1].lower()
else:
controlflow.expected_argument_exit(
'visibility', ', '.join(validStatus), sys.argv[i+1])
controlflow.expected_argument_exit('visibility',
', '.join(validStatus),
sys.argv[i + 1])
i += 2
elif myarg == 'source':
body['source'] = {'title': sys.argv[i+1], 'url': sys.argv[i+2]}
body['source'] = {'title': sys.argv[i + 1], 'url': sys.argv[i + 2]}
i += 3
elif myarg == 'noreminders':
body['reminders'] = {'useDefault': False}
@ -460,43 +495,48 @@ def getEventAttributes(i, calendarId, cal, body, action):
minutes = \
gam.getInteger(sys.argv[i+1], myarg, minVal=0,
maxVal=CALENDAR_REMINDER_MAX_MINUTES)
reminder = {'minutes': minutes, 'method': sys.argv[i+2]}
body.setdefault(
'reminders', {'overrides': [], 'useDefault': False})
reminder = {'minutes': minutes, 'method': sys.argv[i + 2]}
body.setdefault('reminders', {'overrides': [], 'useDefault': False})
body['reminders']['overrides'].append(reminder)
i += 3
elif myarg == 'recurrence':
body.setdefault('recurrence', [])
body['recurrence'].append(sys.argv[i+1])
body['recurrence'].append(sys.argv[i + 1])
i += 2
elif myarg == 'timezone':
timeZone = sys.argv[i+1]
timeZone = sys.argv[i + 1]
i += 2
elif myarg == 'privateproperty':
if 'extendedProperties' not in body:
body['extendedProperties'] = {'private': {}, 'shared': {}}
body['extendedProperties']['private'][sys.argv[i+1]] = sys.argv[i+2]
body['extendedProperties']['private'][sys.argv[i +
1]] = sys.argv[i + 2]
i += 3
elif myarg == 'sharedproperty':
if 'extendedProperties' not in body:
body['extendedProperties'] = {'private': {}, 'shared': {}}
body['extendedProperties']['shared'][sys.argv[i+1]] = sys.argv[i+2]
body['extendedProperties']['shared'][sys.argv[i + 1]] = sys.argv[i +
2]
i += 3
elif myarg == 'colorindex':
body['colorId'] = gam.getInteger(
sys.argv[i+1], myarg, CALENDAR_EVENT_MIN_COLOR_INDEX,
body['colorId'] = gam.getInteger(sys.argv[i + 1], myarg,
CALENDAR_EVENT_MIN_COLOR_INDEX,
CALENDAR_EVENT_MAX_COLOR_INDEX)
i += 2
elif myarg == 'hangoutsmeet':
body['conferenceData'] = {'createRequest': {
'requestId': f'{str(uuid.uuid4())}'}}
body['conferenceData'] = {
'createRequest': {
'requestId': f'{str(uuid.uuid4())}'
}
}
i += 1
else:
controlflow.invalid_argument_exit(
sys.argv[i], f'gam calendar <email> {action}event')
if ('recurrence' in body) and (('start' in body) or ('end' in body)):
if not timeZone:
timeZone = gapi.call(cal.calendars(), 'get',
timeZone = gapi.call(cal.calendars(),
'get',
calendarId=calendarId,
fields='timeZone')['timeZone']
if 'start' in body:
@ -515,20 +555,20 @@ def modifySettings():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'description':
body['description'] = sys.argv[i+1]
body['description'] = sys.argv[i + 1]
i += 2
elif myarg == 'location':
body['location'] = sys.argv[i+1]
body['location'] = sys.argv[i + 1]
i += 2
elif myarg == 'summary':
body['summary'] = sys.argv[i+1]
body['summary'] = sys.argv[i + 1]
i += 2
elif myarg == 'timezone':
body['timeZone'] = sys.argv[i+1]
body['timeZone'] = sys.argv[i + 1]
i += 2
else:
controlflow.invalid_argument_exit(
sys.argv[i], "gam calendar <email> modify")
controlflow.invalid_argument_exit(sys.argv[i],
'gam calendar <email> modify')
gapi.call(cal.calendars(), 'patch', calendarId=calendarId, body=body)
@ -540,23 +580,23 @@ def changeAttendees(users):
while len(sys.argv) > i:
myarg = sys.argv[i].lower()
if myarg == 'csv':
csv_file = sys.argv[i+1]
csv_file = sys.argv[i + 1]
i += 2
elif myarg == 'dryrun':
do_it = False
i += 1
elif myarg == 'start':
start_date = utils.get_time_or_delta_from_now(sys.argv[i+1])
start_date = utils.get_time_or_delta_from_now(sys.argv[i + 1])
i += 2
elif myarg == 'end':
end_date = utils.get_time_or_delta_from_now(sys.argv[i+1])
end_date = utils.get_time_or_delta_from_now(sys.argv[i + 1])
i += 2
elif myarg == 'allevents':
allevents = True
i += 1
else:
controlflow.invalid_argument_exit(
sys.argv[i], "gam <users> update calattendees")
sys.argv[i], 'gam <users> update calattendees')
attendee_map = {}
f = fileutils.open_file(csv_file)
csvFile = csv.reader(f)
@ -570,9 +610,13 @@ def changeAttendees(users):
continue
page_token = None
while True:
events_page = gapi.call(cal.events(), 'list', calendarId=user,
pageToken=page_token, timeMin=start_date,
timeMax=end_date, showDeleted=False,
events_page = gapi.call(cal.events(),
'list',
calendarId=user,
pageToken=page_token,
timeMin=start_date,
timeMax=end_date,
showDeleted=False,
showHiddenInvitations=False)
print(f'Got {len(events_page.get("items", []))}')
for event in events_page.get('items', []):
@ -596,8 +640,8 @@ def changeAttendees(users):
try:
if attendee['email'].lower() in attendee_map:
old_email = attendee['email'].lower()
new_email = attendee_map[attendee['email'].lower(
)]
new_email = attendee_map[
attendee['email'].lower()]
print(f' SWITCHING attendee {old_email} to ' \
f'{new_email} for {event_summary}')
event['attendees'].remove(attendee)
@ -612,9 +656,12 @@ def changeAttendees(users):
body['attendees'] = event['attendees']
print(f'UPDATING {event_summary}')
if do_it:
gapi.call(cal.events(), 'patch', calendarId=user,
gapi.call(cal.events(),
'patch',
calendarId=user,
eventId=event['id'],
sendNotifications=False, body=body)
sendNotifications=False,
body=body)
else:
print(' not pulling the trigger.')
# else:
@ -631,8 +678,10 @@ def deleteCalendar(users):
user, cal = buildCalendarGAPIObject(user)
if not cal:
continue
gapi.call(cal.calendarList(), 'delete',
soft_errors=True, calendarId=calendarId)
gapi.call(cal.calendarList(),
'delete',
soft_errors=True,
calendarId=calendarId)
CALENDAR_REMINDER_MAX_MINUTES = 40320
@ -649,62 +698,71 @@ def getCalendarAttributes(i, body, function):
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'selected':
body['selected'] = gam.getBoolean(sys.argv[i+1], myarg)
body['selected'] = gam.getBoolean(sys.argv[i + 1], myarg)
i += 2
elif myarg == 'hidden':
body['hidden'] = gam.getBoolean(sys.argv[i+1], myarg)
body['hidden'] = gam.getBoolean(sys.argv[i + 1], myarg)
i += 2
elif myarg == 'summary':
body['summaryOverride'] = sys.argv[i+1]
body['summaryOverride'] = sys.argv[i + 1]
i += 2
elif myarg == 'colorindex':
body['colorId'] = gam.getInteger(
sys.argv[i+1], myarg, minVal=CALENDAR_MIN_COLOR_INDEX,
body['colorId'] = gam.getInteger(sys.argv[i + 1],
myarg,
minVal=CALENDAR_MIN_COLOR_INDEX,
maxVal=CALENDAR_MAX_COLOR_INDEX)
i += 2
elif myarg == 'backgroundcolor':
body['backgroundColor'] = gam.getColor(sys.argv[i+1])
body['backgroundColor'] = gam.getColor(sys.argv[i + 1])
colorRgbFormat = True
i += 2
elif myarg == 'foregroundcolor':
body['foregroundColor'] = gam.getColor(sys.argv[i+1])
body['foregroundColor'] = gam.getColor(sys.argv[i + 1])
colorRgbFormat = True
i += 2
elif myarg == 'reminder':
body.setdefault('defaultReminders', [])
method = sys.argv[i+1].lower()
method = sys.argv[i + 1].lower()
if method not in CLEAR_NONE_ARGUMENT:
if method not in CALENDAR_REMINDER_METHODS:
controlflow.expected_argument_exit("Method", ", ".join(
CALENDAR_REMINDER_METHODS+CLEAR_NONE_ARGUMENT), method)
minutes = gam.getInteger(
sys.argv[i+2], myarg, minVal=0,
controlflow.expected_argument_exit(
'Method', ', '.join(CALENDAR_REMINDER_METHODS +
CLEAR_NONE_ARGUMENT), method)
minutes = gam.getInteger(sys.argv[i + 2],
myarg,
minVal=0,
maxVal=CALENDAR_REMINDER_MAX_MINUTES)
body['defaultReminders'].append(
{'method': method, 'minutes': minutes})
body['defaultReminders'].append({
'method': method,
'minutes': minutes
})
i += 3
else:
i += 2
elif myarg == 'notification':
body.setdefault('notificationSettings', {'notifications': []})
method = sys.argv[i+1].lower()
method = sys.argv[i + 1].lower()
if method not in CLEAR_NONE_ARGUMENT:
if method not in CALENDAR_NOTIFICATION_METHODS:
controlflow.expected_argument_exit("Method", ", ".join(
CALENDAR_NOTIFICATION_METHODS+CLEAR_NONE_ARGUMENT), method)
eventType = sys.argv[i+2].lower()
controlflow.expected_argument_exit(
'Method', ', '.join(CALENDAR_NOTIFICATION_METHODS +
CLEAR_NONE_ARGUMENT), method)
eventType = sys.argv[i + 2].lower()
if eventType not in CALENDAR_NOTIFICATION_TYPES_MAP:
controlflow.expected_argument_exit("Event", ", ".join(
CALENDAR_NOTIFICATION_TYPES_MAP), eventType)
notice = {'method': method,
'type': CALENDAR_NOTIFICATION_TYPES_MAP[eventType]}
controlflow.expected_argument_exit(
'Event', ', '.join(CALENDAR_NOTIFICATION_TYPES_MAP),
eventType)
notice = {
'method': method,
'type': CALENDAR_NOTIFICATION_TYPES_MAP[eventType]
}
body['notificationSettings']['notifications'].append(notice)
i += 3
else:
i += 2
else:
controlflow.invalid_argument_exit(
sys.argv[i], f"gam {function} calendar")
controlflow.invalid_argument_exit(sys.argv[i],
f'gam {function} calendar')
return colorRgbFormat
@ -721,8 +779,11 @@ def addCalendar(users):
continue
current_count = display.current_count(i, count)
print(f'Subscribing {user} to calendar {calendarId}{current_count}')
gapi.call(cal.calendarList(), 'insert', soft_errors=True,
body=body, colorRgbFormat=colorRgbFormat)
gapi.call(cal.calendarList(),
'insert',
soft_errors=True,
body=body,
colorRgbFormat=colorRgbFormat)
def updateCalendar(users):
@ -740,13 +801,17 @@ def updateCalendar(users):
print(f"Updating {user}'s subscription to calendar ' \
f'{calendarId}{current_count}")
calId = calendarId if calendarId != 'primary' else user
gapi.call(cal.calendarList(), 'patch', soft_errors=True,
calendarId=calId, body=body, colorRgbFormat=colorRgbFormat)
gapi.call(cal.calendarList(),
'patch',
soft_errors=True,
calendarId=calId,
body=body,
colorRgbFormat=colorRgbFormat)
def _showCalendar(userCalendar, j, jcount):
current_count = display.current_count(j, jcount)
summary = userCalendar.get("summaryOverride", userCalendar["summary"])
summary = userCalendar.get('summaryOverride', userCalendar['summary'])
print(f' Calendar: {userCalendar["id"]}{current_count}')
print(f' Summary: {summary}')
print(f' Description: {userCalendar.get("description", "")}')
@ -780,7 +845,8 @@ def infoCalendar(users):
user, cal = buildCalendarGAPIObject(user)
if not cal:
continue
result = gapi.call(cal.calendarList(), 'get',
result = gapi.call(cal.calendarList(),
'get',
soft_errors=True,
calendarId=calendarId)
if result:
@ -809,8 +875,10 @@ def printShowCalendars(users, csvFormat):
user, cal = buildCalendarGAPIObject(user)
if not cal:
continue
result = gapi.get_all_pages(
cal.calendarList(), 'list', 'items', soft_errors=True)
result = gapi.get_all_pages(cal.calendarList(),
'list',
'items',
soft_errors=True)
jcount = len(result)
if not csvFormat:
print(f'User: {user}, Calendars:{display.current_count(i, count)}')
@ -825,8 +893,9 @@ def printShowCalendars(users, csvFormat):
continue
for userCalendar in result:
row = {'primaryEmail': user}
display.add_row_titles_to_csv_file(utils.flatten_json(
userCalendar, flattened=row), csvRows, titles)
display.add_row_titles_to_csv_file(
utils.flatten_json(userCalendar, flattened=row), csvRows,
titles)
if csvFormat:
display.sort_csv_titles(['primaryEmail', 'id'], titles)
display.write_csv_file(csvRows, titles, 'Calendars', todrive)
@ -840,8 +909,10 @@ def showCalSettings(users):
user, cal = buildCalendarGAPIObject(user)
if not cal:
continue
feed = gapi.get_all_pages(
cal.settings(), 'list', 'items', soft_errors=True)
feed = gapi.get_all_pages(cal.settings(),
'list',
'items',
soft_errors=True)
if feed:
current_count = display.current_count(i, count)
print(f'User: {user}, Calendar Settings:{current_count}')
@ -862,11 +933,11 @@ def transferSecCals(users):
remove_source_user = False
i += 1
elif myarg == 'sendnotifications':
sendNotifications = gam.getBoolean(sys.argv[i+1], myarg)
sendNotifications = gam.getBoolean(sys.argv[i + 1], myarg)
i += 2
else:
controlflow.invalid_argument_exit(
sys.argv[i], "gam <users> transfer seccals")
controlflow.invalid_argument_exit(sys.argv[i],
'gam <users> transfer seccals')
if remove_source_user:
target_user, target_cal = buildCalendarGAPIObject(target_user)
if not target_cal:
@ -875,20 +946,38 @@ def transferSecCals(users):
user, source_cal = buildCalendarGAPIObject(user)
if not source_cal:
continue
calendars = gapi.get_all_pages(source_cal.calendarList(), 'list',
'items', soft_errors=True,
minAccessRole='owner', showHidden=True,
calendars = gapi.get_all_pages(source_cal.calendarList(),
'list',
'items',
soft_errors=True,
minAccessRole='owner',
showHidden=True,
fields='items(id),nextPageToken')
for calendar in calendars:
calendarId = calendar['id']
if calendarId.find('@group.calendar.google.com') != -1:
body = {'role': 'owner',
'scope': {'type': 'user', 'value': target_user}}
gapi.call(source_cal.acl(), 'insert', calendarId=calendarId,
body=body, sendNotifications=sendNotifications)
if remove_source_user:
body = {'role': 'none',
'scope': {'type': 'user', 'value': user}}
gapi.call(target_cal.acl(), 'insert',
calendarId=calendarId, body=body,
body = {
'role': 'owner',
'scope': {
'type': 'user',
'value': target_user
}
}
gapi.call(source_cal.acl(),
'insert',
calendarId=calendarId,
body=body,
sendNotifications=sendNotifications)
if remove_source_user:
body = {
'role': 'none',
'scope': {
'type': 'user',
'value': user
}
}
gapi.call(target_cal.acl(),
'insert',
calendarId=calendarId,
body=body,
sendNotifications=sendNotifications)

View File

@ -20,29 +20,33 @@ def doUpdateCros():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'user':
update_body['annotatedUser'] = sys.argv[i+1]
update_body['annotatedUser'] = sys.argv[i + 1]
i += 2
elif myarg == 'location':
update_body['annotatedLocation'] = sys.argv[i+1]
update_body['annotatedLocation'] = sys.argv[i + 1]
i += 2
elif myarg == 'notes':
update_body['notes'] = sys.argv[i+1].replace('\\n', '\n')
update_body['notes'] = sys.argv[i + 1].replace('\\n', '\n')
i += 2
elif myarg in ['tag', 'asset', 'assetid']:
update_body['annotatedAssetId'] = sys.argv[i+1]
update_body['annotatedAssetId'] = sys.argv[i + 1]
i += 2
elif myarg in ['ou', 'org']:
orgUnitPath = gam.getOrgUnitItem(sys.argv[i+1])
orgUnitPath = gam.getOrgUnitItem(sys.argv[i + 1])
i += 2
elif myarg == 'action':
action = sys.argv[i+1].lower().replace('_', '').replace('-', '')
action = sys.argv[i + 1].lower().replace('_', '').replace('-', '')
deprovisionReason = None
if action in ['deprovisionsamemodelreplace',
'deprovisionsamemodelreplacement']:
if action in [
'deprovisionsamemodelreplace',
'deprovisionsamemodelreplacement'
]:
action = 'deprovision'
deprovisionReason = 'same_model_replacement'
elif action in ['deprovisiondifferentmodelreplace',
'deprovisiondifferentmodelreplacement']:
elif action in [
'deprovisiondifferentmodelreplace',
'deprovisiondifferentmodelreplacement'
]:
action = 'deprovision'
deprovisionReason = 'different_model_replacement'
elif action in ['deprovisionretiringdevice']:
@ -62,7 +66,7 @@ def doUpdateCros():
ack_wipe = True
i += 1
else:
controlflow.invalid_argument_exit(sys.argv[i], "gam update cros")
controlflow.invalid_argument_exit(sys.argv[i], 'gam update cros')
i = 0
count = len(devices)
if action_body:
@ -86,27 +90,33 @@ def doUpdateCros():
i += 1
cur_count = gam.currentCount(i, count)
print(f' performing action {action} for {deviceId}{cur_count}')
gapi.call(cd.chromeosdevices(), function='action',
gapi.call(cd.chromeosdevices(),
function='action',
customerId=GC_Values[GC_CUSTOMER_ID],
resourceId=deviceId, body=action_body)
resourceId=deviceId,
body=action_body)
else:
if update_body:
for deviceId in devices:
i += 1
current_count = gam.currentCount(i, count)
print(f' updating {deviceId}{current_count}')
gapi.call(cd.chromeosdevices(), 'update',
gapi.call(cd.chromeosdevices(),
'update',
customerId=GC_Values[GC_CUSTOMER_ID],
deviceId=deviceId, body=update_body)
deviceId=deviceId,
body=update_body)
if orgUnitPath:
# split moves into max 50 devices per batch
for l in range(0, len(devices), 50):
move_body = {'deviceIds': devices[l:l+50]}
move_body = {'deviceIds': devices[l:l + 50]}
print(f' moving {len(move_body["deviceIds"])} devices to ' \
f'{orgUnitPath}')
gapi.call(cd.chromeosdevices(), 'moveDevicesToOu',
gapi.call(cd.chromeosdevices(),
'moveDevicesToOu',
customerId=GC_Values[GC_CUSTOMER_ID],
orgUnitPath=orgUnitPath, body=move_body)
orgUnitPath=orgUnitPath,
body=move_body)
def doGetCrosInfo():
@ -125,13 +135,13 @@ def doGetCrosInfo():
noLists = True
i += 1
elif myarg == 'listlimit':
listLimit = gam.getInteger(sys.argv[i+1], myarg, minVal=-1)
listLimit = gam.getInteger(sys.argv[i + 1], myarg, minVal=-1)
i += 2
elif myarg in CROS_START_ARGUMENTS:
startDate = _getFilterDate(sys.argv[i+1])
startDate = _getFilterDate(sys.argv[i + 1])
i += 2
elif myarg in CROS_END_ARGUMENTS:
endDate = _getFilterDate(sys.argv[i+1])
endDate = _getFilterDate(sys.argv[i + 1])
i += 2
elif myarg == 'allfields':
projection = 'FULL'
@ -148,7 +158,7 @@ def doGetCrosInfo():
fieldsList.extend(CROS_ARGUMENT_TO_PROPERTY_MAP[myarg])
i += 1
elif myarg == 'fields':
fieldNameList = sys.argv[i+1]
fieldNameList = sys.argv[i + 1]
for field in fieldNameList.lower().replace(',', ' ').split():
if field in CROS_ARGUMENT_TO_PROPERTY_MAP:
fieldsList.extend(CROS_ARGUMENT_TO_PROPERTY_MAP[field])
@ -158,21 +168,21 @@ def doGetCrosInfo():
projection = 'FULL'
noLists = False
else:
controlflow.invalid_argument_exit(
field, "gam info cros fields")
controlflow.invalid_argument_exit(field,
'gam info cros fields')
i += 2
elif myarg == 'downloadfile':
downloadfile = sys.argv[i+1]
downloadfile = sys.argv[i + 1]
if downloadfile.lower() == 'latest':
downloadfile = downloadfile.lower()
i += 2
elif myarg == 'targetfolder':
targetFolder = os.path.expanduser(sys.argv[i+1])
targetFolder = os.path.expanduser(sys.argv[i + 1])
if not os.path.isdir(targetFolder):
os.makedirs(targetFolder)
i += 2
else:
controlflow.invalid_argument_exit(sys.argv[i], "gam info cros")
controlflow.invalid_argument_exit(sys.argv[i], 'gam info cros')
if fieldsList:
fieldsList.append('deviceId')
fields = ','.join(set(fieldsList)).replace('.', '/')
@ -182,9 +192,11 @@ def doGetCrosInfo():
device_count = len(devices)
for deviceId in devices:
i += 1
cros = gapi.call(cd.chromeosdevices(), 'get',
cros = gapi.call(cd.chromeosdevices(),
'get',
customerId=GC_Values[GC_CUSTOMER_ID],
deviceId=deviceId, projection=projection,
deviceId=deviceId,
projection=projection,
fields=fields)
print(f'CrOS Device: {deviceId} ({i} of {device_count})')
if 'notes' in cros:
@ -208,8 +220,8 @@ def doGetCrosInfo():
print(' activeTimeRanges')
num_ranges = min(lenATR, listLimit or lenATR)
for activeTimeRange in activeTimeRanges[:num_ranges]:
active_date = activeTimeRange["date"]
active_time = activeTimeRange["activeTime"]
active_date = activeTimeRange['date']
active_time = activeTimeRange['activeTime']
duration = utils.formatMilliSeconds(active_time)
minutes = active_time // 60000
print(f' date: {active_date}')
@ -222,16 +234,17 @@ def doGetCrosInfo():
print(' recentUsers')
num_ranges = min(lenRU, listLimit or lenRU)
for recentUser in recentUsers[:num_ranges]:
useremail = recentUser.get("email")
useremail = recentUser.get('email')
if not useremail:
if recentUser["type"] == "USER_TYPE_UNMANAGED":
if recentUser['type'] == 'USER_TYPE_UNMANAGED':
useremail = 'UnmanagedUser'
else:
useremail = 'Unknown'
print(f' type: {recentUser["type"]}')
print(f' email: {useremail}')
deviceFiles = _filterCreateReportTime(
cros.get('deviceFiles', []), 'createTime', startDate, endDate)
deviceFiles = _filterCreateReportTime(cros.get('deviceFiles',
[]), 'createTime',
startDate, endDate)
lenDF = len(deviceFiles)
if lenDF:
num_ranges = min(lenDF, listLimit or lenDF)
@ -255,22 +268,21 @@ def doGetCrosInfo():
f'available to download.')
deviceFile = None
if deviceFile:
created = deviceFile["createTime"]
created = deviceFile['createTime']
downloadfile = f'cros-logs-{deviceId}-{created}.zip'
downloadfilename = os.path.join(targetFolder,
downloadfile)
dl_url = deviceFile['downloadUrl']
_, content = cd._http.request(dl_url)
fileutils.write_file(downloadfilename, content,
fileutils.write_file(downloadfilename,
content,
mode='wb',
continue_on_error=True)
print(f'Downloaded: {downloadfilename}')
elif downloadfile:
print('ERROR: no files to download.')
cpuStatusReports = _filterCreateReportTime(
cros.get('cpuStatusReports', []),
'reportTime',
startDate,
cros.get('cpuStatusReports', []), 'reportTime', startDate,
endDate)
lenCSR = len(cpuStatusReports)
if lenCSR:
@ -284,8 +296,8 @@ def doGetCrosInfo():
temp_label = tempInfo['label'].strip()
temperature = tempInfo['temperature']
print(f' {temp_label}: {temperature}')
pct_info = cpuStatusReport["cpuUtilizationPercentageInfo"]
util = ",".join([str(x) for x in pct_info])
pct_info = cpuStatusReport['cpuUtilizationPercentageInfo']
util = ','.join([str(x) for x in pct_info])
print(f' cpuUtilizationPercentageInfo: {util}')
diskVolumeReports = cros.get('diskVolumeReports', [])
lenDVR = len(diskVolumeReports)
@ -303,16 +315,16 @@ def doGetCrosInfo():
print(f' storageFree: {vstorage_free}')
print(f' storageTotal: {vstorage_total}')
systemRamFreeReports = _filterCreateReportTime(
cros.get('systemRamFreeReports', []),
'reportTime', startDate, endDate)
cros.get('systemRamFreeReports', []), 'reportTime', startDate,
endDate)
lenSRFR = len(systemRamFreeReports)
if lenSRFR:
print(' systemRamFreeReports')
num_ranges = min(lenSRFR, listLimit or lenSRFR)
for systemRamFreeReport in systemRamFreeReports[:num_ranges]:
report_time = systemRamFreeReport["reportTime"]
free_info = systemRamFreeReport["systemRamFreeInfo"]
free_ram = ",".join(free_info)
report_time = systemRamFreeReport['reportTime']
free_info = systemRamFreeReport['systemRamFreeInfo']
free_ram = ','.join(free_info)
print(f' reportTime: {report_time}')
print(f' systemRamFreeInfo: {free_ram}')
@ -320,11 +332,15 @@ def doGetCrosInfo():
def doPrintCrosActivity():
cd = gapi_directory.buildGAPIObject()
todrive = False
titles = ['deviceId', 'annotatedAssetId',
'annotatedLocation', 'serialNumber', 'orgUnitPath']
titles = [
'deviceId', 'annotatedAssetId', 'annotatedLocation', 'serialNumber',
'orgUnitPath'
]
csvRows = []
fieldsList = ['deviceId', 'annotatedAssetId',
'annotatedLocation', 'serialNumber', 'orgUnitPath']
fieldsList = [
'deviceId', 'annotatedAssetId', 'annotatedLocation', 'serialNumber',
'orgUnitPath'
]
startDate = endDate = None
selectActiveTimeRanges = selectDeviceFiles = selectRecentUsers = False
listLimit = 0
@ -335,10 +351,10 @@ def doPrintCrosActivity():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg in ['query', 'queries']:
queries = gam.getQueries(myarg, sys.argv[i+1])
queries = gam.getQueries(myarg, sys.argv[i + 1])
i += 2
elif myarg == 'limittoou':
orgUnitPath = gam.getOrgUnitItem(sys.argv[i+1])
orgUnitPath = gam.getOrgUnitItem(sys.argv[i + 1])
i += 2
elif myarg == 'todrive':
todrive = True
@ -360,32 +376,35 @@ def doPrintCrosActivity():
selectRecentUsers = True
i += 1
elif myarg in CROS_START_ARGUMENTS:
startDate = _getFilterDate(sys.argv[i+1])
startDate = _getFilterDate(sys.argv[i + 1])
i += 2
elif myarg in CROS_END_ARGUMENTS:
endDate = _getFilterDate(sys.argv[i+1])
endDate = _getFilterDate(sys.argv[i + 1])
i += 2
elif myarg == 'listlimit':
listLimit = gam.getInteger(sys.argv[i+1], myarg, minVal=0)
listLimit = gam.getInteger(sys.argv[i + 1], myarg, minVal=0)
i += 2
elif myarg == 'delimiter':
delimiter = sys.argv[i+1]
delimiter = sys.argv[i + 1]
i += 2
else:
controlflow.invalid_argument_exit(
sys.argv[i], "gam print crosactivity")
controlflow.invalid_argument_exit(sys.argv[i],
'gam print crosactivity')
if not selectActiveTimeRanges and \
not selectDeviceFiles and \
not selectRecentUsers:
selectActiveTimeRanges = selectRecentUsers = True
if selectRecentUsers:
fieldsList.append('recentUsers')
display.add_titles_to_csv_file(['recentUsers.email', ], titles)
display.add_titles_to_csv_file([
'recentUsers.email',
], titles)
if selectActiveTimeRanges:
fieldsList.append('activeTimeRanges')
titles_to_add = ['activeTimeRanges.date',
'activeTimeRanges.duration',
'activeTimeRanges.minutes']
titles_to_add = [
'activeTimeRanges.date', 'activeTimeRanges.duration',
'activeTimeRanges.minutes'
]
display.add_titles_to_csv_file(titles_to_add, titles)
if selectDeviceFiles:
fieldsList.append('deviceFiles')
@ -395,13 +414,15 @@ def doPrintCrosActivity():
for query in queries:
gam.printGettingAllItems('CrOS Devices', query)
page_message = gapi.got_total_items_msg('CrOS Devices', '...\n')
all_cros = gapi.get_all_pages(cd.chromeosdevices(), 'list',
all_cros = gapi.get_all_pages(cd.chromeosdevices(),
'list',
'chromeosdevices',
page_message=page_message,
query=query,
customerId=GC_Values[GC_CUSTOMER_ID],
projection='FULL',
fields=fields, orgUnitPath=orgUnitPath)
fields=fields,
orgUnitPath=orgUnitPath)
for cros in all_cros:
row = {}
skip_attribs = ['recentUsers', 'activeTimeRanges', 'deviceFiles']
@ -428,9 +449,9 @@ def doPrintCrosActivity():
num_ranges = min(lenRU, listLimit or lenRU)
recent_users = []
for recentUser in recentUsers[:num_ranges]:
useremail = recentUser.get("email")
useremail = recentUser.get('email')
if not useremail:
if recentUser["type"] == "USER_TYPE_UNMANAGED":
if recentUser['type'] == 'USER_TYPE_UNMANAGED':
useremail = 'UnmanagedUser'
else:
useremail = 'Unknown'
@ -439,8 +460,8 @@ def doPrintCrosActivity():
csvRows.append(row)
if selectDeviceFiles:
deviceFiles = _filterCreateReportTime(
cros.get('deviceFiles', []),
'createTime', startDate, endDate)
cros.get('deviceFiles', []), 'createTime', startDate,
endDate)
lenDF = len(deviceFiles)
num_ranges = min(lenDF, listLimit or lenDF)
for deviceFile in deviceFiles[:num_ranges]:
@ -465,6 +486,7 @@ def _checkTPMVulnerability(cros):
def doPrintCrosDevices():
def _getSelectedLists(myarg):
if myarg in CROS_ACTIVE_TIME_RANGES_ARGUMENTS:
selectedLists['activeTimeRanges'] = True
@ -485,8 +507,8 @@ def doPrintCrosDevices():
fieldsTitles = {}
titles = []
csvRows = []
display.add_field_to_csv_file(
'deviceid', CROS_ARGUMENT_TO_PROPERTY_MAP, fieldsList, fieldsTitles, titles)
display.add_field_to_csv_file('deviceid', CROS_ARGUMENT_TO_PROPERTY_MAP,
fieldsList, fieldsTitles, titles)
projection = orderBy = sortOrder = orgUnitPath = None
queries = [None]
noLists = sortHeaders = False
@ -497,10 +519,10 @@ def doPrintCrosDevices():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg in ['query', 'queries']:
queries = gam.getQueries(myarg, sys.argv[i+1])
queries = gam.getQueries(myarg, sys.argv[i + 1])
i += 2
elif myarg == 'limittoou':
orgUnitPath = gam.getOrgUnitItem(sys.argv[i+1])
orgUnitPath = gam.getOrgUnitItem(sys.argv[i + 1])
i += 2
elif myarg == 'todrive':
todrive = True
@ -510,21 +532,24 @@ def doPrintCrosDevices():
selectedLists = {}
i += 1
elif myarg == 'listlimit':
listLimit = gam.getInteger(sys.argv[i+1], myarg, minVal=0)
listLimit = gam.getInteger(sys.argv[i + 1], myarg, minVal=0)
i += 2
elif myarg in CROS_START_ARGUMENTS:
startDate = _getFilterDate(sys.argv[i+1])
startDate = _getFilterDate(sys.argv[i + 1])
i += 2
elif myarg in CROS_END_ARGUMENTS:
endDate = _getFilterDate(sys.argv[i+1])
endDate = _getFilterDate(sys.argv[i + 1])
i += 2
elif myarg == 'orderby':
orderBy = sys.argv[i+1].lower().replace('_', '')
validOrderBy = ['location', 'user', 'lastsync',
'notes', 'serialnumber', 'status', 'supportenddate']
orderBy = sys.argv[i + 1].lower().replace('_', '')
validOrderBy = [
'location', 'user', 'lastsync', 'notes', 'serialnumber',
'status', 'supportenddate'
]
if orderBy not in validOrderBy:
controlflow.expected_argument_exit(
"orderby", ", ".join(validOrderBy), orderBy)
controlflow.expected_argument_exit('orderby',
', '.join(validOrderBy),
orderBy)
if orderBy == 'location':
orderBy = 'annotatedLocation'
elif orderBy == 'user':
@ -559,11 +584,12 @@ def doPrintCrosDevices():
_getSelectedLists(myarg)
i += 1
elif myarg in CROS_ARGUMENT_TO_PROPERTY_MAP:
display.add_field_to_fields_list(
myarg, CROS_ARGUMENT_TO_PROPERTY_MAP, fieldsList)
display.add_field_to_fields_list(myarg,
CROS_ARGUMENT_TO_PROPERTY_MAP,
fieldsList)
i += 1
elif myarg == 'fields':
fieldNameList = sys.argv[i+1]
fieldNameList = sys.argv[i + 1]
for field in fieldNameList.lower().replace(',', ' ').split():
if field in CROS_LISTS_ARGUMENTS:
_getSelectedLists(field)
@ -571,17 +597,18 @@ def doPrintCrosDevices():
display.add_field_to_fields_list(
field, CROS_ARGUMENT_TO_PROPERTY_MAP, fieldsList)
else:
controlflow.invalid_argument_exit(
field, "gam print cros fields")
controlflow.invalid_argument_exit(field,
'gam print cros fields')
i += 2
else:
controlflow.invalid_argument_exit(sys.argv[i], "gam print cros")
controlflow.invalid_argument_exit(sys.argv[i], 'gam print cros')
if selectedLists:
noLists = False
projection = 'FULL'
for selectList in selectedLists:
display.add_field_to_fields_list(
selectList, CROS_ARGUMENT_TO_PROPERTY_MAP, fieldsList)
display.add_field_to_fields_list(selectList,
CROS_ARGUMENT_TO_PROPERTY_MAP,
fieldsList)
if fieldsList:
fieldsList.append('deviceId')
fields = f'nextPageToken,chromeosdevices({",".join(set(fieldsList))})'.replace(
@ -591,13 +618,16 @@ def doPrintCrosDevices():
for query in queries:
gam.printGettingAllItems('CrOS Devices', query)
page_message = gapi.got_total_items_msg('CrOS Devices', '...\n')
all_cros = gapi.get_all_pages(cd.chromeosdevices(), 'list',
all_cros = gapi.get_all_pages(cd.chromeosdevices(),
'list',
'chromeosdevices',
page_message=page_message, query=query,
page_message=page_message,
query=query,
customerId=GC_Values[GC_CUSTOMER_ID],
projection=projection,
orgUnitPath=orgUnitPath,
orderBy=orderBy, sortOrder=sortOrder,
orderBy=orderBy,
sortOrder=sortOrder,
fields=fields)
for cros in all_cros:
_checkTPMVulnerability(cros)
@ -612,8 +642,9 @@ def doPrintCrosDevices():
tempInfos = cpuStatusReport.get('cpuTemperatureInfo', [])
for tempInfo in tempInfos:
tempInfo['label'] = tempInfo['label'].strip()
display.add_row_titles_to_csv_file(utils.flatten_json(
cros, listLimit=listLimit), csvRows, titles)
display.add_row_titles_to_csv_file(
utils.flatten_json(cros, listLimit=listLimit), csvRows,
titles)
continue
for cros in all_cros:
if 'notes' in cros:
@ -623,11 +654,11 @@ def doPrintCrosDevices():
cros['autoUpdateExpiration'])
row = {}
for attrib in cros:
if attrib not in set(['kind', 'etag', 'tpmVersionInfo',
'recentUsers', 'activeTimeRanges',
'deviceFiles', 'cpuStatusReports',
'diskVolumeReports',
'systemRamFreeReports']):
if attrib not in set([
'kind', 'etag', 'tpmVersionInfo', 'recentUsers',
'activeTimeRanges', 'deviceFiles', 'cpuStatusReports',
'diskVolumeReports', 'systemRamFreeReports'
]):
row[attrib] = cros[attrib]
if selectedLists.get('activeTimeRanges'):
timergs = cros.get('activeTimeRanges', [])
@ -649,8 +680,8 @@ def doPrintCrosDevices():
else:
cpu_reports = []
cpuStatusReports = _filterCreateReportTime(cpu_reports,
'reportTime',
startDate, endDate)
'reportTime', startDate,
endDate)
if selectedLists.get('diskVolumeReports'):
diskVolumeReports = cros.get('diskVolumeReports', [])
else:
@ -659,10 +690,8 @@ def doPrintCrosDevices():
ram_reports = cros.get('systemRamFreeReports', [])
else:
ram_reports = []
systemRamFreeReports = _filterCreateReportTime(ram_reports,
'reportTime',
startDate,
endDate)
systemRamFreeReports = _filterCreateReportTime(
ram_reports, 'reportTime', startDate, endDate)
if noLists or (not activeTimeRanges and \
not recentUsers and \
not deviceFiles and \
@ -707,7 +736,7 @@ def doPrintCrosDevices():
tempInfos = cpuStatusReports[i].get('cpuTemperatureInfo',
[])
for tempInfo in tempInfos:
label = tempInfo["label"].strip()
label = tempInfo['label'].strip()
base = 'cpuStatusReports.cpuTemperatureInfo.'
nrow[f'{base}{label}'] = tempInfo['temperature']
cpu_field = 'cpuUtilizationPercentageInfo'
@ -735,16 +764,18 @@ def doPrintCrosDevices():
','.join(ram_info)
display.add_row_titles_to_csv_file(nrow, csvRows, titles)
if sortHeaders:
display.sort_csv_titles(['deviceId', ], titles)
display.sort_csv_titles([
'deviceId',
], titles)
display.write_csv_file(csvRows, titles, 'CrOS', todrive)
def getCrOSDeviceEntity(i, cd):
myarg = sys.argv[i].lower()
if myarg == 'cros_sn':
return i+2, gam.getUsersToModify('cros_sn', sys.argv[i+1])
return i + 2, gam.getUsersToModify('cros_sn', sys.argv[i + 1])
if myarg == 'query':
return i+2, gam.getUsersToModify('crosquery', sys.argv[i+1])
return i + 2, gam.getUsersToModify('crosquery', sys.argv[i + 1])
if myarg[:6] == 'query:':
query = sys.argv[i][6:]
if query[:12].lower() == 'orgunitpath:':
@ -752,12 +783,14 @@ def getCrOSDeviceEntity(i, cd):
else:
kwargs = {'query': query}
fields = 'nextPageToken,chromeosdevices(deviceId)'
devices = gapi.get_all_pages(cd.chromeosdevices(), 'list',
devices = gapi.get_all_pages(cd.chromeosdevices(),
'list',
'chromeosdevices',
customerId=GC_Values[GC_CUSTOMER_ID],
fields=fields, **kwargs)
return i+1, [device['deviceId'] for device in devices]
return i+1, sys.argv[i].replace(',', ' ').split()
fields=fields,
**kwargs)
return i + 1, [device['deviceId'] for device in devices]
return i + 1, sys.argv[i].replace(',', ' ').split()
def _getFilterDate(dateStr):
@ -769,8 +802,8 @@ def _filterTimeRanges(activeTimeRanges, startDate, endDate):
return activeTimeRanges
filteredTimeRanges = []
for timeRange in activeTimeRanges:
activityDate = datetime.datetime.strptime(
timeRange['date'], YYYYMMDD_FORMAT)
activityDate = datetime.datetime.strptime(timeRange['date'],
YYYYMMDD_FORMAT)
if ((startDate is None) or \
(activityDate >= startDate)) and \
((endDate is None) or \

View File

@ -9,11 +9,14 @@ from gam.gapi import reports as gapi_reports
def doGetCustomerInfo():
cd = gapi_directory.buildGAPIObject()
customer_info = gapi.call(cd.customers(), 'get',
customer_info = gapi.call(cd.customers(),
'get',
customerKey=GC_Values[GC_CUSTOMER_ID])
print(f'Customer ID: {customer_info["id"]}')
print(f'Primary Domain: {customer_info["customerDomain"]}')
result = gapi.call(cd.domains(), 'get', customer=customer_info['id'],
result = gapi.call(cd.domains(),
'get',
customer=customer_info['id'],
domainName=customer_info['customerDomain'],
fields='verified')
print(f'Primary Domain Verified: {result["verified"]}')
@ -23,11 +26,13 @@ def doGetCustomerInfo():
customer_creation = customer_info['customerCreationTime']
date_format = '%Y-%m-%dT%H:%M:%S.%fZ'
oldest = datetime.datetime.strptime(customer_creation, date_format)
domains = gapi.get_items(cd.domains(), 'list', 'domains',
domains = gapi.get_items(cd.domains(),
'list',
'domains',
customer=GC_Values[GC_CUSTOMER_ID],
fields='domains(creationTime)')
for domain in domains:
creation_timestamp = int(domain['creationTime'])/1000
creation_timestamp = int(domain['creationTime']) / 1000
domain_creation = datetime.datetime.fromtimestamp(creation_timestamp)
if domain_creation < oldest:
oldest = domain_creation
@ -64,10 +69,12 @@ def doGetCustomerInfo():
throw_reasons = [gapi.errors.ErrorReason.INVALID]
while True:
try:
usage = gapi.get_all_pages(rep.customerUsageReports(), 'get',
usage = gapi.get_all_pages(rep.customerUsageReports(),
'get',
'usageReports',
throw_reasons=throw_reasons,
customerId=customerId, date=tryDate,
customerId=customerId,
date=tryDate,
parameters=parameters)
break
except gapi.errors.GapiInvalidError as e:
@ -92,22 +99,25 @@ def doUpdateCustomer():
if myarg in ADDRESS_FIELDS_ARGUMENT_MAP:
body.setdefault('postalAddress', {})
arg = ADDRESS_FIELDS_ARGUMENT_MAP[myarg]
body['postalAddress'][arg] = sys.argv[i+1]
body['postalAddress'][arg] = sys.argv[i + 1]
i += 2
elif myarg in ['adminsecondaryemail', 'alternateemail']:
body['alternateEmail'] = sys.argv[i+1]
body['alternateEmail'] = sys.argv[i + 1]
i += 2
elif myarg in ['phone', 'phonenumber']:
body['phoneNumber'] = sys.argv[i+1]
body['phoneNumber'] = sys.argv[i + 1]
i += 2
elif myarg == 'language':
body['language'] = sys.argv[i+1]
body['language'] = sys.argv[i + 1]
i += 2
else:
controlflow.invalid_argument_exit(myarg, "gam update customer")
controlflow.invalid_argument_exit(myarg, 'gam update customer')
if not body:
controlflow.system_error_exit(2, 'no arguments specified for "gam '
controlflow.system_error_exit(
2, 'no arguments specified for "gam '
'update customer"')
gapi.call(cd.customers(), 'patch', customerKey=GC_Values[GC_CUSTOMER_ID],
gapi.call(cd.customers(),
'patch',
customerKey=GC_Values[GC_CUSTOMER_ID],
body=body)
print('Updated customer')

View File

@ -36,15 +36,16 @@ def printBuildings():
fieldsList.append(possible_fields[myarg])
i += 1
# Allows shorter arguments like "name" instead of "buildingname"
elif 'building'+myarg in possible_fields:
fieldsList.append(possible_fields['building'+myarg])
elif 'building' + myarg in possible_fields:
fieldsList.append(possible_fields['building' + myarg])
i += 1
else:
controlflow.invalid_argument_exit(
sys.argv[i], "gam print buildings")
controlflow.invalid_argument_exit(sys.argv[i],
'gam print buildings')
if fields:
fields = fields % ','.join(fieldsList)
buildings = gapi.get_all_pages(cd.resources().buildings(), 'list',
buildings = gapi.get_all_pages(cd.resources().buildings(),
'list',
'buildings',
customer=GC_Values[GC_CUSTOMER_ID],
fields=fields)
@ -80,7 +81,7 @@ def printResourceCalendars():
todrive = True
i += 1
elif myarg == 'query':
query = sys.argv[i+1]
query = sys.argv[i + 1]
i += 2
elif myarg == 'allfields':
fieldsList = []
@ -89,8 +90,7 @@ def printResourceCalendars():
for field in RESCAL_ALLFIELDS:
display.add_field_to_csv_file(field,
RESCAL_ARGUMENT_TO_PROPERTY_MAP,
fieldsList, fieldsTitles,
titles)
fieldsList, fieldsTitles, titles)
i += 1
elif myarg in RESCAL_ARGUMENT_TO_PROPERTY_MAP:
display.add_field_to_csv_file(myarg,
@ -98,8 +98,8 @@ def printResourceCalendars():
fieldsList, fieldsTitles, titles)
i += 1
else:
controlflow.invalid_argument_exit(
sys.argv[i], "gam print resources")
controlflow.invalid_argument_exit(sys.argv[i],
'gam print resources')
if not fieldsList:
for field in RESCAL_DFLTFIELDS:
display.add_field_to_csv_file(field,
@ -107,15 +107,19 @@ def printResourceCalendars():
fieldsList, fieldsTitles, titles)
fields = f'nextPageToken,items({",".join(set(fieldsList))})'
if 'buildingId' in fieldsList:
display.add_field_to_csv_file('buildingName', {'buildingName': [
'buildingName', ]}, fieldsList, fieldsTitles, titles)
display.add_field_to_csv_file('buildingName',
{'buildingName': ['buildingName',]},
fieldsList, fieldsTitles, titles)
gam.printGettingAllItems('Resource Calendars', None)
page_message = gapi.got_total_items_first_last_msg('Resource Calendars')
resources = gapi.get_all_pages(cd.resources().calendars(), 'list',
'items', page_message=page_message,
resources = gapi.get_all_pages(cd.resources().calendars(),
'list',
'items',
page_message=page_message,
message_attribute='resourceId',
customer=GC_Values[GC_CUSTOMER_ID],
query=query, fields=fields)
query=query,
fields=fields)
for resource in resources:
if 'featureInstances' in resource:
features = [a_feature['feature']['name'] for \
@ -129,35 +133,50 @@ def printResourceCalendars():
for field in fieldsList:
resUnit[fieldsTitles[field]] = resource.get(field, '')
csvRows.append(resUnit)
display.sort_csv_titles(
['resourceId', 'resourceName', 'resourceEmail'], titles)
display.sort_csv_titles(['resourceId', 'resourceName', 'resourceEmail'],
titles)
display.write_csv_file(csvRows, titles, 'Resources', todrive)
RESCAL_DFLTFIELDS = ['id', 'name', 'email',]
RESCAL_ALLFIELDS = ['id', 'name', 'email', 'description', 'type',
'buildingid', 'category', 'capacity', 'features', 'floor',
'floorsection', 'generatedresourcename',
'uservisibledescription',]
RESCAL_DFLTFIELDS = [
'id',
'name',
'email',
]
RESCAL_ALLFIELDS = [
'id',
'name',
'email',
'description',
'type',
'buildingid',
'category',
'capacity',
'features',
'floor',
'floorsection',
'generatedresourcename',
'uservisibledescription',
]
RESCAL_ARGUMENT_TO_PROPERTY_MAP = {
'description': ['resourceDescription'],
'building': ['buildingId', ],
'buildingid': ['buildingId', ],
'capacity': ['capacity', ],
'category': ['resourceCategory', ],
'building': ['buildingId',],
'buildingid': ['buildingId',],
'capacity': ['capacity',],
'category': ['resourceCategory',],
'email': ['resourceEmail'],
'feature': ['featureInstances', ],
'features': ['featureInstances', ],
'floor': ['floorName', ],
'floorname': ['floorName', ],
'floorsection': ['floorSection', ],
'generatedresourcename': ['generatedResourceName', ],
'feature': ['featureInstances',],
'features': ['featureInstances',],
'floor': ['floorName',],
'floorname': ['floorName',],
'floorsection': ['floorSection',],
'generatedresourcename': ['generatedResourceName',],
'id': ['resourceId'],
'name': ['resourceName'],
'type': ['resourceType'],
'userdescription': ['userVisibleDescription', ],
'uservisibledescription': ['userVisibleDescription', ],
'userdescription': ['userVisibleDescription',],
'uservisibledescription': ['userVisibleDescription',],
}
@ -183,15 +202,15 @@ def printFeatures():
elif myarg in possible_fields:
fieldsList.append(possible_fields[myarg])
i += 1
elif 'feature'+myarg in possible_fields:
fieldsList.append(possible_fields['feature'+myarg])
elif 'feature' + myarg in possible_fields:
fieldsList.append(possible_fields['feature' + myarg])
i += 1
else:
controlflow.invalid_argument_exit(
sys.argv[i], "gam print features")
controlflow.invalid_argument_exit(sys.argv[i], 'gam print features')
if fields:
fields = fields % ','.join(fieldsList)
features = gapi.get_all_pages(cd.resources().features(), 'list',
features = gapi.get_all_pages(cd.resources().features(),
'list',
'features',
customer=GC_Values[GC_CUSTOMER_ID],
fields=fields)
@ -213,57 +232,62 @@ def _getBuildingAttributes(args, body={}):
while i < len(args):
myarg = args[i].lower().replace('_', '')
if myarg == 'id':
body['buildingId'] = args[i+1]
body['buildingId'] = args[i + 1]
i += 2
elif myarg == 'name':
body['buildingName'] = args[i+1]
body['buildingName'] = args[i + 1]
i += 2
elif myarg in ['lat', 'latitude']:
if 'coordinates' not in body:
body['coordinates'] = {}
body['coordinates']['latitude'] = args[i+1]
body['coordinates']['latitude'] = args[i + 1]
i += 2
elif myarg in ['long', 'lng', 'longitude']:
if 'coordinates' not in body:
body['coordinates'] = {}
body['coordinates']['longitude'] = args[i+1]
body['coordinates']['longitude'] = args[i + 1]
i += 2
elif myarg == 'description':
body['description'] = args[i+1]
body['description'] = args[i + 1]
i += 2
elif myarg == 'floors':
body['floorNames'] = args[i+1].split(',')
body['floorNames'] = args[i + 1].split(',')
i += 2
else:
controlflow.invalid_argument_exit(
myarg, "gam create|update building")
controlflow.invalid_argument_exit(myarg,
'gam create|update building')
return body
def createBuilding():
cd = gapi_directory.buildGAPIObject()
body = {'floorNames': ['1'],
body = {
'floorNames': ['1'],
'buildingId': str(uuid.uuid4()),
'buildingName': sys.argv[3]}
'buildingName': sys.argv[3]
}
body = _getBuildingAttributes(sys.argv[4:], body)
print(f'Creating building {body["buildingId"]}...')
gapi.call(cd.resources().buildings(), 'insert',
customer=GC_Values[GC_CUSTOMER_ID], body=body)
gapi.call(cd.resources().buildings(),
'insert',
customer=GC_Values[GC_CUSTOMER_ID],
body=body)
def _makeBuildingIdNameMap(cd):
fields = 'nextPageToken,buildings(buildingId,buildingName)'
buildings = gapi.get_all_pages(cd.resources().buildings(), 'list',
buildings = gapi.get_all_pages(cd.resources().buildings(),
'list',
'buildings',
customer=GC_Values[GC_CUSTOMER_ID],
fields=fields)
GM_Globals[GM_MAP_BUILDING_ID_TO_NAME] = {}
GM_Globals[GM_MAP_BUILDING_NAME_TO_ID] = {}
for building in buildings:
GM_Globals[GM_MAP_BUILDING_ID_TO_NAME][building['buildingId']
] = building['buildingName']
GM_Globals[GM_MAP_BUILDING_NAME_TO_ID][building['buildingName']
] = building['buildingId']
GM_Globals[GM_MAP_BUILDING_ID_TO_NAME][
building['buildingId']] = building['buildingName']
GM_Globals[GM_MAP_BUILDING_NAME_TO_ID][
building['buildingName']] = building['buildingId']
def getBuildingByNameOrId(cd, which_building, minLen=1):
@ -283,10 +307,13 @@ def getBuildingByNameOrId(cd, which_building, minLen=1):
# No exact name match, check for case insensitive name matches
which_building_lower = which_building.lower()
ci_matches = []
for buildingName, buildingId in GM_Globals[GM_MAP_BUILDING_NAME_TO_ID].items():
for buildingName, buildingId in GM_Globals[
GM_MAP_BUILDING_NAME_TO_ID].items():
if buildingName.lower() == which_building_lower:
ci_matches.append(
{'buildingName': buildingName, 'buildingId': buildingId})
ci_matches.append({
'buildingName': buildingName,
'buildingId': buildingId
})
# One match, return ID
if len(ci_matches) == 1:
return ci_matches[0]['buildingId']
@ -323,15 +350,18 @@ def updateBuilding():
buildingId = getBuildingByNameOrId(cd, sys.argv[3])
body = _getBuildingAttributes(sys.argv[4:])
print(f'Updating building {buildingId}...')
gapi.call(cd.resources().buildings(), 'patch',
customer=GC_Values[GC_CUSTOMER_ID], buildingId=buildingId,
gapi.call(cd.resources().buildings(),
'patch',
customer=GC_Values[GC_CUSTOMER_ID],
buildingId=buildingId,
body=body)
def getBuildingInfo():
cd = gapi_directory.buildGAPIObject()
buildingId = getBuildingByNameOrId(cd, sys.argv[3])
building = gapi.call(cd.resources().buildings(), 'get',
building = gapi.call(cd.resources().buildings(),
'get',
customer=GC_Values[GC_CUSTOMER_ID],
buildingId=buildingId)
if 'buildingId' in building:
@ -347,8 +377,10 @@ def deleteBuilding():
cd = gapi_directory.buildGAPIObject()
buildingId = getBuildingByNameOrId(cd, sys.argv[3])
print(f'Deleting building {buildingId}...')
gapi.call(cd.resources().buildings(), 'delete',
customer=GC_Values[GC_CUSTOMER_ID], buildingId=buildingId)
gapi.call(cd.resources().buildings(),
'delete',
customer=GC_Values[GC_CUSTOMER_ID],
buildingId=buildingId)
def _getFeatureAttributes(args, body={}):
@ -356,11 +388,11 @@ def _getFeatureAttributes(args, body={}):
while i < len(args):
myarg = args[i].lower().replace('_', '')
if myarg == 'name':
body['name'] = args[i+1]
body['name'] = args[i + 1]
i += 2
else:
controlflow.invalid_argument_exit(
myarg, "gam create|update feature")
controlflow.invalid_argument_exit(myarg,
'gam create|update feature')
return body
@ -368,8 +400,10 @@ def createFeature():
cd = gapi_directory.buildGAPIObject()
body = _getFeatureAttributes(sys.argv[3:])
print(f'Creating feature {body["name"]}...')
gapi.call(cd.resources().features(), 'insert',
customer=GC_Values[GC_CUSTOMER_ID], body=body)
gapi.call(cd.resources().features(),
'insert',
customer=GC_Values[GC_CUSTOMER_ID],
body=body)
def updateFeature():
@ -380,8 +414,10 @@ def updateFeature():
oldName = sys.argv[3]
body = {'newName': sys.argv[5:]}
print(f'Updating feature {oldName}...')
gapi.call(cd.resources().features(), 'rename',
customer=GC_Values[GC_CUSTOMER_ID], oldName=oldName,
gapi.call(cd.resources().features(),
'rename',
customer=GC_Values[GC_CUSTOMER_ID],
oldName=oldName,
body=body)
@ -389,8 +425,10 @@ def deleteFeature():
cd = gapi_directory.buildGAPIObject()
featureKey = sys.argv[3]
print(f'Deleting feature {featureKey}...')
gapi.call(cd.resources().features(), 'delete',
customer=GC_Values[GC_CUSTOMER_ID], featureKey=featureKey)
gapi.call(cd.resources().features(),
'delete',
customer=GC_Values[GC_CUSTOMER_ID],
featureKey=featureKey)
def _getResourceCalendarAttributes(cd, args, body={}):
@ -398,56 +436,58 @@ def _getResourceCalendarAttributes(cd, args, body={}):
while i < len(args):
myarg = args[i].lower().replace('_', '')
if myarg == 'name':
body['resourceName'] = args[i+1]
body['resourceName'] = args[i + 1]
i += 2
elif myarg == 'description':
body['resourceDescription'] = args[i+1].replace('\\n', '\n')
body['resourceDescription'] = args[i + 1].replace('\\n', '\n')
i += 2
elif myarg == 'type':
body['resourceType'] = args[i+1]
body['resourceType'] = args[i + 1]
i += 2
elif myarg in ['building', 'buildingid']:
body['buildingId'] = getBuildingByNameOrId(
cd, args[i+1], minLen=0)
body['buildingId'] = getBuildingByNameOrId(cd,
args[i + 1],
minLen=0)
i += 2
elif myarg in ['capacity']:
body['capacity'] = gam.getInteger(args[i+1], myarg, minVal=0)
body['capacity'] = gam.getInteger(args[i + 1], myarg, minVal=0)
i += 2
elif myarg in ['feature', 'features']:
features = args[i+1].split(',')
features = args[i + 1].split(',')
body['featureInstances'] = []
for feature in features:
instance = {'feature': {'name': feature}}
body['featureInstances'].append(instance)
i += 2
elif myarg in ['floor', 'floorname']:
body['floorName'] = args[i+1]
body['floorName'] = args[i + 1]
i += 2
elif myarg in ['floorsection']:
body['floorSection'] = args[i+1]
body['floorSection'] = args[i + 1]
i += 2
elif myarg in ['category']:
body['resourceCategory'] = args[i+1].upper()
body['resourceCategory'] = args[i + 1].upper()
if body['resourceCategory'] == 'ROOM':
body['resourceCategory'] = 'CONFERENCE_ROOM'
i += 2
elif myarg in ['uservisibledescription', 'userdescription']:
body['userVisibleDescription'] = args[i+1]
body['userVisibleDescription'] = args[i + 1]
i += 2
else:
controlflow.invalid_argument_exit(
args[i], "gam create|update resource")
controlflow.invalid_argument_exit(args[i],
'gam create|update resource')
return body
def createResourceCalendar():
cd = gapi_directory.buildGAPIObject()
body = {'resourceId': sys.argv[3],
'resourceName': sys.argv[4]}
body = {'resourceId': sys.argv[3], 'resourceName': sys.argv[4]}
body = _getResourceCalendarAttributes(cd, sys.argv[5:], body)
print(f'Creating resource {body["resourceId"]}...')
gapi.call(cd.resources().calendars(), 'insert',
customer=GC_Values[GC_CUSTOMER_ID], body=body)
gapi.call(cd.resources().calendars(),
'insert',
customer=GC_Values[GC_CUSTOMER_ID],
body=body)
def updateResourceCalendar():
@ -456,16 +496,20 @@ def updateResourceCalendar():
body = _getResourceCalendarAttributes(cd, sys.argv[4:])
# Use patch since it seems to work better.
# update requires name to be set.
gapi.call(cd.resources().calendars(), 'patch',
customer=GC_Values[GC_CUSTOMER_ID], calendarResourceId=resId,
body=body, fields='')
gapi.call(cd.resources().calendars(),
'patch',
customer=GC_Values[GC_CUSTOMER_ID],
calendarResourceId=resId,
body=body,
fields='')
print(f'updated resource {resId}')
def getResourceCalendarInfo():
cd = gapi_directory.buildGAPIObject()
resId = sys.argv[3]
resource = gapi.call(cd.resources().calendars(), 'get',
resource = gapi.call(cd.resources().calendars(),
'get',
customer=GC_Values[GC_CUSTOMER_ID],
calendarResourceId=resId)
if 'featureInstances' in resource:
@ -474,8 +518,8 @@ def getResourceCalendarInfo():
features.append(a_feature['feature']['name'])
resource['features'] = ', '.join(features)
if 'buildingId' in resource:
resource['buildingName'] = getBuildingNameById(
cd, resource['buildingId'])
resource['buildingName'] = getBuildingNameById(cd,
resource['buildingId'])
resource['buildingId'] = f'id:{resource["buildingId"]}'
display.print_json(resource)
@ -484,5 +528,7 @@ def deleteResourceCalendar():
resId = sys.argv[3]
cd = gapi_directory.buildGAPIObject()
print(f'Deleting resource calendar {resId}')
gapi.call(cd.resources().calendars(), 'delete',
customer=GC_Values[GC_CUSTOMER_ID], calendarResourceId=resId)
gapi.call(cd.resources().calendars(),
'delete',
customer=GC_Values[GC_CUSTOMER_ID],
calendarResourceId=resId)

View File

@ -144,23 +144,27 @@ class ErrorReason(Enum):
# Common sets of GAPI error reasons
DEFAULT_RETRY_REASONS = [
ErrorReason.QUOTA_EXCEEDED, ErrorReason.RATE_LIMIT_EXCEEDED,
ErrorReason.USER_RATE_LIMIT_EXCEEDED, ErrorReason.BACKEND_ERROR,
ErrorReason.BAD_GATEWAY, ErrorReason.GATEWAY_TIMEOUT,
ErrorReason.INTERNAL_ERROR, ErrorReason.FOUR_TWO_NINE,
]
ErrorReason.QUOTA_EXCEEDED,
ErrorReason.RATE_LIMIT_EXCEEDED,
ErrorReason.USER_RATE_LIMIT_EXCEEDED,
ErrorReason.BACKEND_ERROR,
ErrorReason.BAD_GATEWAY,
ErrorReason.GATEWAY_TIMEOUT,
ErrorReason.INTERNAL_ERROR,
ErrorReason.FOUR_TWO_NINE,
]
GMAIL_THROW_REASONS = [ErrorReason.SERVICE_NOT_AVAILABLE]
GROUP_GET_THROW_REASONS = [
ErrorReason.GROUP_NOT_FOUND, ErrorReason.DOMAIN_NOT_FOUND,
ErrorReason.DOMAIN_CANNOT_USE_APIS, ErrorReason.FORBIDDEN,
ErrorReason.BAD_REQUEST
]
]
GROUP_GET_RETRY_REASONS = [ErrorReason.INVALID, ErrorReason.SYSTEM_ERROR]
MEMBERS_THROW_REASONS = [
ErrorReason.GROUP_NOT_FOUND, ErrorReason.DOMAIN_NOT_FOUND,
ErrorReason.DOMAIN_CANNOT_USE_APIS, ErrorReason.INVALID,
ErrorReason.FORBIDDEN
]
]
MEMBERS_RETRY_REASONS = [ErrorReason.SYSTEM_ERROR]
# A map of GAPI error reasons to the corresponding GAM Python Exception
@ -211,7 +215,7 @@ ERROR_REASON_TO_EXCEPTION = {
GapiServiceNotAvailableError,
ErrorReason.USER_NOT_FOUND:
GapiUserNotFoundError,
}
}
# OAuth Token Errors
OAUTH2_TOKEN_ERRORS = [
@ -233,7 +237,7 @@ OAUTH2_TOKEN_ERRORS = [
'using this method, or client not authorized for any of the scopes '
'requested',
'unauthorized_client: Unauthorized client or scope in request',
]
]
def _create_http_error_dict(status_code, reason, message):
@ -281,18 +285,22 @@ def get_gapi_error_detail(e,
try:
error = json.loads(e.content.decode(UTF8))
except ValueError:
error_content = e.content.decode(UTF8) if isinstance(e.content,
bytes) else e.content
error_content = e.content.decode(UTF8) if isinstance(
e.content, bytes) else e.content
if (e.resp['status'] == '503') and (
error_content == 'Quota exceeded for the current request'):
return (e.resp['status'], ErrorReason.QUOTA_EXCEEDED.value, error_content)
if (e.resp['status'] == '403') and (
error_content.startswith('Request rate higher than configured')):
return (e.resp['status'], ErrorReason.QUOTA_EXCEEDED.value, error_content)
return (e.resp['status'], ErrorReason.QUOTA_EXCEEDED.value,
error_content)
if (e.resp['status'] == '403') and (error_content.startswith(
'Request rate higher than configured')):
return (e.resp['status'], ErrorReason.QUOTA_EXCEEDED.value,
error_content)
if (e.resp['status'] == '502') and ('Bad Gateway' in error_content):
return (e.resp['status'], ErrorReason.BAD_GATEWAY.value, error_content)
return (e.resp['status'], ErrorReason.BAD_GATEWAY.value,
error_content)
if (e.resp['status'] == '504') and ('Gateway Timeout' in error_content):
return (e.resp['status'], ErrorReason.GATEWAY_TIMEOUT.value, error_content)
return (e.resp['status'], ErrorReason.GATEWAY_TIMEOUT.value,
error_content)
if (e.resp['status'] == '403') and ('Invalid domain.' in error_content):
error = _create_http_error_dict(403, ErrorReason.NOT_FOUND.value,
'Domain not found')
@ -324,7 +332,8 @@ def get_gapi_error_detail(e,
if error['error_description'] == 'Invalid Value':
message = error['error_description']
http_status = 400
error = _create_http_error_dict(400, ErrorReason.INVALID.value, message)
error = _create_http_error_dict(400, ErrorReason.INVALID.value,
message)
else:
controlflow.system_error_exit(4, str(error))
else:

View File

@ -117,7 +117,8 @@ class ErrorsTest(unittest.TestCase):
self.assertEqual(message, 'Invalid Input: userId')
def test_get_gapi_error_extracts_invalid_member(self):
err = create_simple_http_error(400, 'invalid', 'Invalid Input: memberKey')
err = create_simple_http_error(400, 'invalid',
'Invalid Input: memberKey')
http_status, reason, message = errors.get_gapi_error_detail(err)
self.assertEqual(http_status, 400)
self.assertEqual(reason, errors.ErrorReason.INVALID_MEMBER.value)
@ -151,8 +152,8 @@ class ErrorsTest(unittest.TestCase):
'Cyclic memberships not allowed')
http_status, reason, message = errors.get_gapi_error_detail(err)
self.assertEqual(http_status, 400)
self.assertEqual(reason,
errors.ErrorReason.CYCLIC_MEMBERSHIPS_NOT_ALLOWED.value)
self.assertEqual(
reason, errors.ErrorReason.CYCLIC_MEMBERSHIPS_NOT_ALLOWED.value)
self.assertEqual(message, 'Cyclic memberships not allowed')
def test_get_gapi_error_extracts_single_error_with_message(self):

View File

@ -42,12 +42,13 @@ REPORT_CHOICE_MAP = {
def showUsageParameters():
rep = buildGAPIObject()
throw_reasons = [gapi.errors.ErrorReason.INVALID,
gapi.errors.ErrorReason.BAD_REQUEST]
throw_reasons = [
gapi.errors.ErrorReason.INVALID, gapi.errors.ErrorReason.BAD_REQUEST
]
todrive = False
if len(sys.argv) == 3:
controlflow.missing_argument_exit(
'user or customer', 'report usageparameters')
controlflow.missing_argument_exit('user or customer',
'report usageparameters')
report = sys.argv[3].lower()
titles = ['parameter']
if report == 'customer':
@ -57,8 +58,8 @@ def showUsageParameters():
endpoint = rep.userUsageReport()
kwargs = {'userKey': gam._getValueFromOAuth('email')}
else:
controlflow.expected_argument_exit(
'usageparameters', ['user', 'customer'], report)
controlflow.expected_argument_exit('usageparameters',
['user', 'customer'], report)
customerId = GC_Values[GC_CUSTOMER_ID]
if customerId == MY_CUSTOMER:
customerId = None
@ -73,10 +74,12 @@ def showUsageParameters():
todrive = True
i += 1
else:
controlflow.invalid_argument_exit(sys.argv[i], "gam report usageparameters")
controlflow.invalid_argument_exit(sys.argv[i],
'gam report usageparameters')
while True:
try:
response = gapi.call(endpoint, 'get',
response = gapi.call(endpoint,
'get',
throw_reasons=throw_reasons,
date=tryDate,
customerId=customerId,
@ -87,7 +90,9 @@ def showUsageParameters():
if data.get('key') == 'application':
partial_on_thisday.append(data['value'])
if partial_apps:
partial_apps = [app for app in partial_apps if app in partial_on_thisday]
partial_apps = [
app for app in partial_apps if app in partial_on_thisday
]
else:
partial_apps = partial_on_thisday
for parameter in response['usageReports'][0]['parameters']:
@ -104,19 +109,24 @@ def showUsageParameters():
csvRows = []
for parameter in all_parameters:
csvRows.append({'parameter': parameter})
display.write_csv_file(
csvRows, titles, f'{report.capitalize()} Report Usage Parameters', todrive)
display.write_csv_file(csvRows, titles,
f'{report.capitalize()} Report Usage Parameters',
todrive)
REPORTS_PARAMETERS_SIMPLE_TYPES = [
'intValue', 'boolValue', 'datetimeValue', 'stringValue'
]
REPORTS_PARAMETERS_SIMPLE_TYPES = ['intValue', 'boolValue', 'datetimeValue', 'stringValue']
def showUsage():
rep = buildGAPIObject()
throw_reasons = [gapi.errors.ErrorReason.INVALID,
gapi.errors.ErrorReason.BAD_REQUEST]
throw_reasons = [
gapi.errors.ErrorReason.INVALID, gapi.errors.ErrorReason.BAD_REQUEST
]
todrive = False
if len(sys.argv) == 3:
controlflow.missing_argument_exit(
'user or customer', 'report usage')
controlflow.missing_argument_exit('user or customer', 'report usage')
report = sys.argv[3].lower()
titles = ['date']
if report == 'customer':
@ -127,8 +137,8 @@ def showUsage():
kwargs = [{'userKey': 'all'}]
titles.append('user')
else:
controlflow.expected_argument_exit(
'usage', ['user', 'customer'], report)
controlflow.expected_argument_exit('usage', ['user', 'customer'],
report)
customerId = GC_Values[GC_CUSTOMER_ID]
if customerId == MY_CUSTOMER:
customerId = None
@ -141,43 +151,47 @@ def showUsage():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'startdate':
start_date = utils.get_yyyymmdd(sys.argv[i+1], returnDateTime=True)
start_date = utils.get_yyyymmdd(sys.argv[i + 1],
returnDateTime=True)
i += 2
elif myarg == 'enddate':
end_date = utils.get_yyyymmdd(sys.argv[i+1], returnDateTime=True)
end_date = utils.get_yyyymmdd(sys.argv[i + 1], returnDateTime=True)
i += 2
elif myarg == 'todrive':
todrive = True
i += 1
elif myarg in ['fields', 'parameters']:
parameters = sys.argv[i+1].split(',')
parameters = sys.argv[i + 1].split(',')
i += 2
elif myarg == 'skipdates':
for skip in sys.argv[i+1].split(','):
for skip in sys.argv[i + 1].split(','):
if skip.find(':') == -1:
skip_dates.add(utils.get_yyyymmdd(skip, returnDateTime=True))
skip_dates.add(utils.get_yyyymmdd(skip,
returnDateTime=True))
else:
skip_start, skip_end = skip.split(':', 1)
skip_start = utils.get_yyyymmdd(skip_start, returnDateTime=True)
skip_start = utils.get_yyyymmdd(skip_start,
returnDateTime=True)
skip_end = utils.get_yyyymmdd(skip_end, returnDateTime=True)
while skip_start <= skip_end:
skip_dates.add(skip_start)
skip_start += one_day
i += 2
elif myarg == 'skipdaysofweek':
skipdaynames = sys.argv[i+1].split(',')
skipdaynames = sys.argv[i + 1].split(',')
dow = [d.lower() for d in calendar.day_abbr]
skip_day_numbers = [dow.index(d) for d in skipdaynames if d in dow]
i += 2
elif report == 'user' and myarg in ['orgunit', 'org', 'ou']:
_, orgUnitId = gam.getOrgUnitId(sys.argv[i+1])
_, orgUnitId = gam.getOrgUnitId(sys.argv[i + 1])
i += 2
elif report == 'user' and myarg in usergroup_types:
users = gam.getUsersToModify(myarg, sys.argv[i+1])
users = gam.getUsersToModify(myarg, sys.argv[i + 1])
kwargs = [{'userKey': user} for user in users]
i += 2
else:
controlflow.invalid_argument_exit(sys.argv[i], f'gam report usage {report}')
controlflow.invalid_argument_exit(sys.argv[i],
f'gam report usage {report}')
if parameters:
titles.extend(parameters)
parameters = ','.join(parameters)
@ -206,7 +220,8 @@ def showUsage():
try:
for kwarg in kwargs:
try:
usage = gapi.get_all_pages(endpoint, 'get',
usage = gapi.get_all_pages(endpoint,
'get',
'usageReports',
throw_reasons=throw_reasons,
customerId=customerId,
@ -250,8 +265,7 @@ def showUsage():
report_name = f'{report.capitalize()} Usage Report - {start_use_date}:{end_use_date}'
else:
report_name = f'{report.capitalize()} Usage Report - {start_date}:{end_date} - No Data'
display.write_csv_file(
csvRows, titles, report_name, todrive)
display.write_csv_file(csvRows, titles, report_name, todrive)
def showReport():
@ -266,11 +280,12 @@ def showReport():
showUsageParameters()
return
valid_apps = gapi.get_enum_values_minus_unspecified(
rep._rootDesc['resources']['activities']['methods']['list'][
'parameters']['applicationName']['enum'])+['customer', 'user']
rep._rootDesc['resources']['activities']['methods']['list']
['parameters']['applicationName']['enum']) + ['customer', 'user']
if report not in valid_apps:
controlflow.expected_argument_exit(
"report", ", ".join(sorted(valid_apps)), report)
controlflow.expected_argument_exit('report',
', '.join(sorted(valid_apps)),
report)
customerId = GC_Values[GC_CUSTOMER_ID]
if customerId == MY_CUSTOMER:
customerId = None
@ -283,51 +298,53 @@ def showReport():
while i < len(sys.argv):
myarg = sys.argv[i].lower()
if myarg == 'date':
tryDate = utils.get_yyyymmdd(sys.argv[i+1])
tryDate = utils.get_yyyymmdd(sys.argv[i + 1])
i += 2
elif myarg in ['orgunit', 'org', 'ou']:
_, orgUnitId = gam.getOrgUnitId(sys.argv[i+1])
_, orgUnitId = gam.getOrgUnitId(sys.argv[i + 1])
i += 2
elif myarg == 'fulldatarequired':
fullDataRequired = []
fdr = sys.argv[i+1].lower()
fdr = sys.argv[i + 1].lower()
if fdr and fdr != 'all':
fullDataRequired = fdr.replace(',', ' ').split()
i += 2
elif myarg == 'start':
startTime = utils.get_time_or_delta_from_now(sys.argv[i+1])
startTime = utils.get_time_or_delta_from_now(sys.argv[i + 1])
i += 2
elif myarg == 'end':
endTime = utils.get_time_or_delta_from_now(sys.argv[i+1])
endTime = utils.get_time_or_delta_from_now(sys.argv[i + 1])
i += 2
elif myarg == 'event':
eventName = sys.argv[i+1]
eventName = sys.argv[i + 1]
i += 2
elif myarg == 'user':
userKey = gam.normalizeEmailAddressOrUID(sys.argv[i+1])
userKey = gam.normalizeEmailAddressOrUID(sys.argv[i + 1])
i += 2
elif myarg in ['filter', 'filters']:
filters = sys.argv[i+1]
filters = sys.argv[i + 1]
i += 2
elif myarg in ['fields', 'parameters']:
parameters = sys.argv[i+1]
parameters = sys.argv[i + 1]
i += 2
elif myarg == 'ip':
actorIpAddress = sys.argv[i+1]
actorIpAddress = sys.argv[i + 1]
i += 2
elif myarg == 'todrive':
to_drive = True
i += 1
else:
controlflow.invalid_argument_exit(sys.argv[i], "gam report")
controlflow.invalid_argument_exit(sys.argv[i], 'gam report')
if report == 'user':
while True:
try:
if fullDataRequired is not None:
warnings = gapi.get_items(rep.userUsageReport(), 'get',
warnings = gapi.get_items(rep.userUsageReport(),
'get',
'warnings',
throw_reasons=throw_reasons,
date=tryDate, userKey=userKey,
date=tryDate,
userKey=userKey,
customerId=customerId,
orgUnitID=orgUnitId,
fields='warnings')
@ -339,11 +356,13 @@ def showReport():
if fullData == 0:
continue
page_message = gapi.got_total_items_msg('Users', '...\n')
usage = gapi.get_all_pages(rep.userUsageReport(), 'get',
usage = gapi.get_all_pages(rep.userUsageReport(),
'get',
'usageReports',
page_message=page_message,
throw_reasons=throw_reasons,
date=tryDate, userKey=userKey,
date=tryDate,
userKey=userKey,
customerId=customerId,
orgUnitID=orgUnitId,
filters=filters,
@ -359,8 +378,7 @@ def showReport():
for user_report in usage:
if 'entity' not in user_report:
continue
row = {'email': user_report['entity']
['userEmail'], 'date': tryDate}
row = {'email': user_report['entity']['userEmail'], 'date': tryDate}
for item in user_report.get('parameters', []):
if 'name' not in item:
continue
@ -374,14 +392,15 @@ def showReport():
else:
row[name] = ''
csvRows.append(row)
display.write_csv_file(
csvRows, titles, f'User Reports - {tryDate}', to_drive)
display.write_csv_file(csvRows, titles, f'User Reports - {tryDate}',
to_drive)
elif report == 'customer':
while True:
try:
if fullDataRequired is not None:
warnings = gapi.get_items(rep.customerUsageReports(),
'get', 'warnings',
'get',
'warnings',
throw_reasons=throw_reasons,
customerId=customerId,
date=tryDate,
@ -393,7 +412,8 @@ def showReport():
sys.exit(1)
if fullData == 0:
continue
usage = gapi.get_all_pages(rep.customerUsageReports(), 'get',
usage = gapi.get_all_pages(rep.customerUsageReports(),
'get',
'usageReports',
throw_reasons=throw_reasons,
customerId=customerId,
@ -442,8 +462,7 @@ def showReport():
value = ' '.join(values)
elif 'version_number' in subitem \
and 'num_devices' in subitem:
values.append(
f'{subitem["version_number"]}:'
values.append(f'{subitem["version_number"]}:'
f'{subitem["num_devices"]}')
else:
continue
@ -451,18 +470,24 @@ def showReport():
csvRows.append({'name': name, 'value': value})
for app in auth_apps: # put apps at bottom
csvRows.append(app)
display.write_csv_file(
csvRows, titles, f'Customer Report - {tryDate}', todrive=to_drive)
display.write_csv_file(csvRows,
titles,
f'Customer Report - {tryDate}',
todrive=to_drive)
else:
page_message = gapi.got_total_items_msg('Activities', '...\n')
activities = gapi.get_all_pages(rep.activities(), 'list', 'items',
activities = gapi.get_all_pages(rep.activities(),
'list',
'items',
page_message=page_message,
applicationName=report,
userKey=userKey,
customerId=customerId,
actorIpAddress=actorIpAddress,
startTime=startTime, endTime=endTime,
eventName=eventName, filters=filters,
startTime=startTime,
endTime=endTime,
eventName=eventName,
filters=filters,
orgUnitID=orgUnitId)
if activities:
titles = ['name']
@ -495,10 +520,11 @@ def showReport():
parts = {}
for message in item['multiMessageValue']:
for mess in message['parameter']:
value = mess.get('value', ' '.join(
mess.get('multiValue', [])))
value = mess.get(
'value',
' '.join(mess.get('multiValue', [])))
parts[mess['name']] = parts.get(
mess['name'], [])+[value]
mess['name'], []) + [value]
for part, v in parts.items():
if part == 'scope_name':
part = 'scope'
@ -513,14 +539,17 @@ def showReport():
if item not in titles:
titles.append(item)
csvRows.append(row)
display.sort_csv_titles(['name', ], titles)
display.write_csv_file(
csvRows, titles, f'{report.capitalize()} Activity Report',
display.sort_csv_titles([
'name',
], titles)
display.write_csv_file(csvRows, titles,
f'{report.capitalize()} Activity Report',
to_drive)
def _adjust_date(errMsg):
match_date = re.match('Data for dates later than (.*) is not yet '
match_date = re.match(
'Data for dates later than (.*) is not yet '
'available. Please check back later', errMsg)
if not match_date:
match_date = re.match('Start date can not be later than (.*)', errMsg)

View File

@ -16,7 +16,10 @@ def build_gapi():
return gam.buildGAPIObject('storage')
def get_cloud_storage_object(s, bucket, object_, local_file=None,
def get_cloud_storage_object(s,
bucket,
object_,
local_file=None,
expectedMd5=None):
if not local_file:
local_file = object_
@ -60,13 +63,19 @@ def download_bucket():
s = build_gapi()
page_message = gapi.got_total_items_msg('Files', '...')
fields = 'nextPageToken,items(name,id,md5Hash)'
objects = gapi.get_all_pages(s.objects(), 'list', 'items',
page_message=page_message, bucket=bucket,
projection='noAcl', fields=fields)
objects = gapi.get_all_pages(s.objects(),
'list',
'items',
page_message=page_message,
bucket=bucket,
projection='noAcl',
fields=fields)
i = 1
for object_ in objects:
print(f'{i}/{len(objects)}')
expectedMd5 = base64.b64decode(object_['md5Hash']).hex()
get_cloud_storage_object(
s, bucket, object_['name'], expectedMd5=expectedMd5)
get_cloud_storage_object(s,
bucket,
object_['name'],
expectedMd5=expectedMd5)
i += 1

View File

@ -23,7 +23,8 @@ def validateCollaborators(collaboratorList, cd):
for collaborator in collaboratorList.split(','):
collaborator_id = gam.convertEmailAddressToUID(collaborator, cd)
if not collaborator_id:
controlflow.system_error_exit(4, f'failed to get a UID for '
controlflow.system_error_exit(
4, f'failed to get a UID for '
f'{collaborator}. Please make '
f'sure this is a real user.')
collaborators.append({'email': collaborator, 'id': collaborator_id})
@ -32,7 +33,7 @@ def validateCollaborators(collaboratorList, cd):
def createMatter():
v = buildGAPIObject()
matter_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
matter_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
body = {'name': f'New Matter - {matter_time}'}
collaborators = []
cd = None
@ -40,26 +41,29 @@ def createMatter():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'name':
body['name'] = sys.argv[i+1]
body['name'] = sys.argv[i + 1]
i += 2
elif myarg == 'description':
body['description'] = sys.argv[i+1]
body['description'] = sys.argv[i + 1]
i += 2
elif myarg in ['collaborator', 'collaborators']:
if not cd:
cd = gam.buildGAPIObject('directory')
collaborators.extend(validateCollaborators(sys.argv[i+1], cd))
collaborators.extend(validateCollaborators(sys.argv[i + 1], cd))
i += 2
else:
controlflow.invalid_argument_exit(sys.argv[i], "gam create matter")
controlflow.invalid_argument_exit(sys.argv[i], 'gam create matter')
matterId = gapi.call(v.matters(), 'create', body=body,
fields='matterId')['matterId']
print(f'Created matter {matterId}')
for collaborator in collaborators:
print(f' adding collaborator {collaborator["email"]}')
body = {'matterPermission': {
body = {
'matterPermission': {
'role': 'COLLABORATOR',
'accountId': collaborator['id']}}
'accountId': collaborator['id']
}
}
gapi.call(v.matters(), 'addPermissions', matterId=matterId, body=body)
@ -77,8 +81,9 @@ VAULT_SEARCH_METHODS_MAP = {
'teamdrive': 'SHARED_DRIVE',
'teamdrives': 'SHARED_DRIVE',
}
VAULT_SEARCH_METHODS_LIST = ['accounts',
'orgunit', 'shareddrives', 'rooms', 'everyone']
VAULT_SEARCH_METHODS_LIST = [
'accounts', 'orgunit', 'shareddrives', 'rooms', 'everyone'
]
def createExport():
@ -98,17 +103,18 @@ def createExport():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'matter':
matterId = getMatterItem(v, sys.argv[i+1])
matterId = getMatterItem(v, sys.argv[i + 1])
body['matterId'] = matterId
i += 2
elif myarg == 'name':
body['name'] = sys.argv[i+1]
body['name'] = sys.argv[i + 1]
i += 2
elif myarg == 'corpus':
body['query']['corpus'] = sys.argv[i+1].upper()
body['query']['corpus'] = sys.argv[i + 1].upper()
if body['query']['corpus'] not in allowed_corpuses:
controlflow.expected_argument_exit(
"corpus", ", ".join(allowed_corpuses), sys.argv[i+1])
controlflow.expected_argument_exit('corpus',
', '.join(allowed_corpuses),
sys.argv[i + 1])
i += 2
elif myarg in VAULT_SEARCH_METHODS_MAP:
if body['query'].get('searchMethod'):
@ -120,82 +126,93 @@ def createExport():
body['query']['searchMethod'] = searchMethod
if searchMethod == 'ACCOUNT':
body['query']['accountInfo'] = {
'emails': sys.argv[i+1].split(',')}
'emails': sys.argv[i + 1].split(',')
}
i += 2
elif searchMethod == 'ORG_UNIT':
body['query']['orgUnitInfo'] = {
'orgUnitId': gam.getOrgUnitId(sys.argv[i+1])[1]}
'orgUnitId': gam.getOrgUnitId(sys.argv[i + 1])[1]
}
i += 2
elif searchMethod == 'SHARED_DRIVE':
body['query']['sharedDriveInfo'] = {
'sharedDriveIds': sys.argv[i+1].split(',')}
'sharedDriveIds': sys.argv[i + 1].split(',')
}
i += 2
elif searchMethod == 'ROOM':
body['query']['hangoutsChatInfo'] = {
'roomId': sys.argv[i+1].split(',')}
'roomId': sys.argv[i + 1].split(',')
}
i += 2
else:
i += 1
elif myarg == 'scope':
body['query']['dataScope'] = sys.argv[i+1].upper()
body['query']['dataScope'] = sys.argv[i + 1].upper()
if body['query']['dataScope'] not in allowed_scopes:
controlflow.expected_argument_exit(
"scope", ", ".join(allowed_scopes), sys.argv[i+1])
controlflow.expected_argument_exit('scope',
', '.join(allowed_scopes),
sys.argv[i + 1])
i += 2
elif myarg in ['terms']:
body['query']['terms'] = sys.argv[i+1]
body['query']['terms'] = sys.argv[i + 1]
i += 2
elif myarg in ['start', 'starttime']:
body['query']['startTime'] = utils.get_date_zero_time_or_full_time(
sys.argv[i+1])
sys.argv[i + 1])
i += 2
elif myarg in ['end', 'endtime']:
body['query']['endTime'] = utils.get_date_zero_time_or_full_time(
sys.argv[i+1])
sys.argv[i + 1])
i += 2
elif myarg in ['timezone']:
body['query']['timeZone'] = sys.argv[i+1]
body['query']['timeZone'] = sys.argv[i + 1]
i += 2
elif myarg in ['excludedrafts']:
body['query']['mailOptions'] = {
'excludeDrafts': gam.getBoolean(sys.argv[i+1], myarg)}
'excludeDrafts': gam.getBoolean(sys.argv[i + 1], myarg)
}
i += 2
elif myarg in ['driveversiondate']:
body['query'].setdefault('driveOptions', {})['versionDate'] = \
utils.get_date_zero_time_or_full_time(sys.argv[i+1])
i += 2
elif myarg in ['includeshareddrives', 'includeteamdrives']:
body['query'].setdefault('driveOptions', {})[
'includeSharedDrives'] = gam.getBoolean(sys.argv[i+1], myarg)
body['query'].setdefault(
'driveOptions', {})['includeSharedDrives'] = gam.getBoolean(
sys.argv[i + 1], myarg)
i += 2
elif myarg in ['includerooms']:
body['query']['hangoutsChatOptions'] = {
'includeRooms': gam.getBoolean(sys.argv[i+1], myarg)}
'includeRooms': gam.getBoolean(sys.argv[i + 1], myarg)
}
i += 2
elif myarg in ['format']:
export_format = sys.argv[i+1].upper()
export_format = sys.argv[i + 1].upper()
if export_format not in allowed_formats:
controlflow.expected_argument_exit(
"export format", ", ".join(allowed_formats), export_format)
controlflow.expected_argument_exit('export format',
', '.join(allowed_formats),
export_format)
i += 2
elif myarg in ['showconfidentialmodecontent']:
showConfidentialModeContent = gam.getBoolean(sys.argv[i+1], myarg)
showConfidentialModeContent = gam.getBoolean(sys.argv[i + 1], myarg)
i += 2
elif myarg in ['region']:
allowed_regions = gapi.get_enum_values_minus_unspecified(
v._rootDesc['schemas']['ExportOptions']['properties'][
'region']['enum'])
body['exportOptions']['region'] = sys.argv[i+1].upper()
v._rootDesc['schemas']['ExportOptions']['properties']['region']
['enum'])
body['exportOptions']['region'] = sys.argv[i + 1].upper()
if body['exportOptions']['region'] not in allowed_regions:
controlflow.expected_argument_exit("region", ", ".join(
allowed_regions), body['exportOptions']['region'])
controlflow.expected_argument_exit(
'region', ', '.join(allowed_regions),
body['exportOptions']['region'])
i += 2
elif myarg in ['includeaccessinfo']:
body['exportOptions'].setdefault('driveOptions', {})[
'includeAccessInfo'] = gam.getBoolean(sys.argv[i+1], myarg)
body['exportOptions'].setdefault(
'driveOptions', {})['includeAccessInfo'] = gam.getBoolean(
sys.argv[i + 1], myarg)
i += 2
else:
controlflow.invalid_argument_exit(sys.argv[i], "gam create export")
controlflow.invalid_argument_exit(sys.argv[i], 'gam create export')
if not matterId:
controlflow.system_error_exit(
3, 'you must specify a matter for the new export.')
@ -207,7 +224,7 @@ def createExport():
'for the new export. Choose one of ' \
f'{", ".join(VAULT_SEARCH_METHODS_LIST)}')
if 'name' not in body:
corpus_name = body["query"]["corpus"]
corpus_name = body['query']['corpus']
corpus_date = datetime.datetime.now()
body['name'] = f'GAM {corpus_name} export - {corpus_date}'
options_field = None
@ -223,8 +240,10 @@ def createExport():
if showConfidentialModeContent is not None:
body['exportOptions'][options_field][
'showConfidentialModeContent'] = showConfidentialModeContent
results = gapi.call(v.matters().exports(), 'create',
matterId=matterId, body=body)
results = gapi.call(v.matters().exports(),
'create',
matterId=matterId,
body=body)
print(f'Created export {results["id"]}')
display.print_json(results)
@ -234,16 +253,20 @@ def deleteExport():
matterId = getMatterItem(v, sys.argv[3])
exportId = convertExportNameToID(v, sys.argv[4], matterId)
print(f'Deleting export {sys.argv[4]} / {exportId}')
gapi.call(v.matters().exports(), 'delete',
matterId=matterId, exportId=exportId)
gapi.call(v.matters().exports(),
'delete',
matterId=matterId,
exportId=exportId)
def getExportInfo():
v = buildGAPIObject()
matterId = getMatterItem(v, sys.argv[3])
exportId = convertExportNameToID(v, sys.argv[4], matterId)
export = gapi.call(v.matters().exports(), 'get',
matterId=matterId, exportId=exportId)
export = gapi.call(v.matters().exports(),
'get',
matterId=matterId,
exportId=exportId)
display.print_json(export)
@ -261,35 +284,37 @@ def createHold():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'name':
body['name'] = sys.argv[i+1]
body['name'] = sys.argv[i + 1]
i += 2
elif myarg == 'query':
query = sys.argv[i+1]
query = sys.argv[i + 1]
i += 2
elif myarg == 'corpus':
body['corpus'] = sys.argv[i+1].upper()
body['corpus'] = sys.argv[i + 1].upper()
if body['corpus'] not in allowed_corpuses:
controlflow.expected_argument_exit(
"corpus", ", ".join(allowed_corpuses), sys.argv[i+1])
controlflow.expected_argument_exit('corpus',
', '.join(allowed_corpuses),
sys.argv[i + 1])
i += 2
elif myarg in ['accounts', 'users', 'groups']:
accounts = sys.argv[i+1].split(',')
accounts = sys.argv[i + 1].split(',')
i += 2
elif myarg in ['orgunit', 'ou']:
body['orgUnit'] = {
'orgUnitId': gam.getOrgUnitId(sys.argv[i+1])[1]}
'orgUnitId': gam.getOrgUnitId(sys.argv[i + 1])[1]
}
i += 2
elif myarg in ['start', 'starttime']:
start_time = utils.get_date_zero_time_or_full_time(sys.argv[i+1])
start_time = utils.get_date_zero_time_or_full_time(sys.argv[i + 1])
i += 2
elif myarg in ['end', 'endtime']:
end_time = utils.get_date_zero_time_or_full_time(sys.argv[i+1])
end_time = utils.get_date_zero_time_or_full_time(sys.argv[i + 1])
i += 2
elif myarg == 'matter':
matterId = getMatterItem(v, sys.argv[i+1])
matterId = getMatterItem(v, sys.argv[i + 1])
i += 2
else:
controlflow.invalid_argument_exit(sys.argv[i], "gam create hold")
controlflow.invalid_argument_exit(sys.argv[i], 'gam create hold')
if not matterId:
controlflow.system_error_exit(
3, 'you must specify a matter for the new hold.')
@ -322,13 +347,15 @@ def createHold():
cd = gam.buildGAPIObject('directory')
account_type = 'group' if body['corpus'] == 'GROUPS' else 'user'
for account in accounts:
body['accounts'].append(
{'accountId': gam.convertEmailAddressToUID(account,
cd,
account_type)}
)
holdId = gapi.call(v.matters().holds(), 'create',
matterId=matterId, body=body, fields='holdId')
body['accounts'].append({
'accountId':
gam.convertEmailAddressToUID(account, cd, account_type)
})
holdId = gapi.call(v.matters().holds(),
'create',
matterId=matterId,
body=body,
fields='holdId')
print(f'Created hold {holdId["holdId"]}')
@ -340,11 +367,11 @@ def deleteHold():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'matter':
matterId = getMatterItem(v, sys.argv[i+1])
matterId = getMatterItem(v, sys.argv[i + 1])
holdId = convertHoldNameToID(v, hold, matterId)
i += 2
else:
controlflow.invalid_argument_exit(myarg, "gam delete hold")
controlflow.invalid_argument_exit(myarg, 'gam delete hold')
if not matterId:
controlflow.system_error_exit(
3, 'you must specify a matter for the hold.')
@ -360,23 +387,24 @@ def getHoldInfo():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'matter':
matterId = getMatterItem(v, sys.argv[i+1])
matterId = getMatterItem(v, sys.argv[i + 1])
holdId = convertHoldNameToID(v, hold, matterId)
i += 2
else:
controlflow.invalid_argument_exit(myarg, "gam info hold")
controlflow.invalid_argument_exit(myarg, 'gam info hold')
if not matterId:
controlflow.system_error_exit(
3, 'you must specify a matter for the hold.')
results = gapi.call(v.matters().holds(), 'get',
matterId=matterId, holdId=holdId)
results = gapi.call(v.matters().holds(),
'get',
matterId=matterId,
holdId=holdId)
cd = gam.buildGAPIObject('directory')
if 'accounts' in results:
account_type = 'group' if results['corpus'] == 'GROUPS' else 'user'
for i in range(0, len(results['accounts'])):
uid = f'uid:{results["accounts"][i]["accountId"]}'
acct_email = gam.convertUIDtoEmailAddress(
uid, cd, [account_type])
acct_email = gam.convertUIDtoEmailAddress(uid, cd, [account_type])
results['accounts'][i]['email'] = acct_email
if 'orgUnit' in results:
results['orgUnit']['orgUnitPath'] = gam.doGetOrgInfo(
@ -390,12 +418,16 @@ def convertExportNameToID(v, nameOrID, matterId):
if cg:
return cg.group(1)
fields = 'exports(id,name),nextPageToken'
exports = gapi.get_all_pages(v.matters().exports(
), 'list', 'exports', matterId=matterId, fields=fields)
exports = gapi.get_all_pages(v.matters().exports(),
'list',
'exports',
matterId=matterId,
fields=fields)
for export in exports:
if export['name'].lower() == nameOrID:
return export['id']
controlflow.system_error_exit(4, f'could not find export name {nameOrID} '
controlflow.system_error_exit(
4, f'could not find export name {nameOrID} '
f'in matter {matterId}')
@ -405,12 +437,16 @@ def convertHoldNameToID(v, nameOrID, matterId):
if cg:
return cg.group(1)
fields = 'holds(holdId,name),nextPageToken'
holds = gapi.get_all_pages(v.matters().holds(
), 'list', 'holds', matterId=matterId, fields=fields)
holds = gapi.get_all_pages(v.matters().holds(),
'list',
'holds',
matterId=matterId,
fields=fields)
for hold in holds:
if hold['name'].lower() == nameOrID:
return hold['holdId']
controlflow.system_error_exit(4, f'could not find hold name {nameOrID} '
controlflow.system_error_exit(
4, f'could not find hold name {nameOrID} '
f'in matter {matterId}')
@ -420,8 +456,11 @@ def convertMatterNameToID(v, nameOrID):
if cg:
return cg.group(1)
fields = 'matters(matterId,name),nextPageToken'
matters = gapi.get_all_pages(v.matters(
), 'list', 'matters', view='BASIC', fields=fields)
matters = gapi.get_all_pages(v.matters(),
'list',
'matters',
view='BASIC',
fields=fields)
for matter in matters:
if matter['name'].lower() == nameOrID:
return matter['matterId']
@ -449,36 +488,41 @@ def updateHold():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'matter':
matterId = getMatterItem(v, sys.argv[i+1])
matterId = getMatterItem(v, sys.argv[i + 1])
holdId = convertHoldNameToID(v, hold, matterId)
i += 2
elif myarg == 'query':
query = sys.argv[i+1]
query = sys.argv[i + 1]
i += 2
elif myarg in ['orgunit', 'ou']:
body['orgUnit'] = {'orgUnitId': gam.getOrgUnitId(sys.argv[i+1])[1]}
body['orgUnit'] = {
'orgUnitId': gam.getOrgUnitId(sys.argv[i + 1])[1]
}
i += 2
elif myarg in ['start', 'starttime']:
start_time = utils.get_date_zero_time_or_full_time(sys.argv[i+1])
start_time = utils.get_date_zero_time_or_full_time(sys.argv[i + 1])
i += 2
elif myarg in ['end', 'endtime']:
end_time = utils.get_date_zero_time_or_full_time(sys.argv[i+1])
end_time = utils.get_date_zero_time_or_full_time(sys.argv[i + 1])
i += 2
elif myarg in ['addusers', 'addaccounts', 'addgroups']:
add_accounts = sys.argv[i+1].split(',')
add_accounts = sys.argv[i + 1].split(',')
i += 2
elif myarg in ['removeusers', 'removeaccounts', 'removegroups']:
del_accounts = sys.argv[i+1].split(',')
del_accounts = sys.argv[i + 1].split(',')
i += 2
else:
controlflow.invalid_argument_exit(myarg, "gam update hold")
controlflow.invalid_argument_exit(myarg, 'gam update hold')
if not matterId:
controlflow.system_error_exit(
3, 'you must specify a matter for the hold.')
if query or start_time or end_time or body.get('orgUnit'):
fields = 'corpus,query,orgUnit'
old_body = gapi.call(v.matters().holds(
), 'get', matterId=matterId, holdId=holdId, fields=fields)
old_body = gapi.call(v.matters().holds(),
'get',
matterId=matterId,
holdId=holdId,
fields=fields)
body['query'] = old_body['query']
body['corpus'] = old_body['corpus']
if 'orgUnit' in old_body and 'orgUnit' not in body:
@ -502,20 +546,29 @@ def updateHold():
body['query'][query_type]['endTime'] = end_time
if body:
print(f'Updating hold {hold} / {holdId}')
gapi.call(v.matters().holds(), 'update',
matterId=matterId, holdId=holdId, body=body)
gapi.call(v.matters().holds(),
'update',
matterId=matterId,
holdId=holdId,
body=body)
if add_accounts or del_accounts:
cd = gam.buildGAPIObject('directory')
for account in add_accounts:
print(f'adding {account} to hold.')
add_body = {'accountId': gam.convertEmailAddressToUID(account, cd)}
gapi.call(v.matters().holds().accounts(), 'create',
matterId=matterId, holdId=holdId, body=add_body)
gapi.call(v.matters().holds().accounts(),
'create',
matterId=matterId,
holdId=holdId,
body=add_body)
for account in del_accounts:
print(f'removing {account} from hold.')
accountId = gam.convertEmailAddressToUID(account, cd)
gapi.call(v.matters().holds().accounts(), 'delete',
matterId=matterId, holdId=holdId, accountId=accountId)
gapi.call(v.matters().holds().accounts(),
'delete',
matterId=matterId,
holdId=holdId,
accountId=accountId)
def updateMatter(action=None):
@ -530,30 +583,30 @@ def updateMatter(action=None):
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'action':
action = sys.argv[i+1].lower()
action = sys.argv[i + 1].lower()
if action not in VAULT_MATTER_ACTIONS:
controlflow.system_error_exit(3, f'allowed actions are ' \
f'{", ".join(VAULT_MATTER_ACTIONS)}, got {action}')
i += 2
elif myarg == 'name':
body['name'] = sys.argv[i+1]
body['name'] = sys.argv[i + 1]
i += 2
elif myarg == 'description':
body['description'] = sys.argv[i+1]
body['description'] = sys.argv[i + 1]
i += 2
elif myarg in ['addcollaborator', 'addcollaborators']:
if not cd:
cd = gam.buildGAPIObject('directory')
add_collaborators.extend(validateCollaborators(sys.argv[i+1], cd))
add_collaborators.extend(validateCollaborators(sys.argv[i + 1], cd))
i += 2
elif myarg in ['removecollaborator', 'removecollaborators']:
if not cd:
cd = gam.buildGAPIObject('directory')
remove_collaborators.extend(
validateCollaborators(sys.argv[i+1], cd))
validateCollaborators(sys.argv[i + 1], cd))
i += 2
else:
controlflow.invalid_argument_exit(sys.argv[i], "gam update matter")
controlflow.invalid_argument_exit(sys.argv[i], 'gam update matter')
if action == 'delete':
action_kwargs = {}
if body:
@ -561,8 +614,10 @@ def updateMatter(action=None):
if 'name' not in body or 'description' not in body:
# bah, API requires name/description to be sent
# on update even when it's not changing
result = gapi.call(v.matters(), 'get',
matterId=matterId, view='BASIC')
result = gapi.call(v.matters(),
'get',
matterId=matterId,
view='BASIC')
body.setdefault('name', result['name'])
body.setdefault('description', result.get('description'))
gapi.call(v.matters(), 'update', body=body, matterId=matterId)
@ -571,12 +626,18 @@ def updateMatter(action=None):
gapi.call(v.matters(), action, matterId=matterId, **action_kwargs)
for collaborator in add_collaborators:
print(f' adding collaborator {collaborator["email"]}')
body = {'matterPermission': {'role': 'COLLABORATOR',
'accountId': collaborator['id']}}
body = {
'matterPermission': {
'role': 'COLLABORATOR',
'accountId': collaborator['id']
}
}
gapi.call(v.matters(), 'addPermissions', matterId=matterId, body=body)
for collaborator in remove_collaborators:
print(f' removing collaborator {collaborator["email"]}')
gapi.call(v.matters(), 'removePermissions', matterId=matterId,
gapi.call(v.matters(),
'removePermissions',
matterId=matterId,
body={'accountId': collaborator['id']})
@ -605,7 +666,7 @@ def downloadExport():
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'targetfolder':
targetFolder = os.path.expanduser(sys.argv[i+1])
targetFolder = os.path.expanduser(sys.argv[i + 1])
if not os.path.isdir(targetFolder):
os.makedirs(targetFolder)
i += 2
@ -616,10 +677,12 @@ def downloadExport():
extractFiles = False
i += 1
else:
controlflow.invalid_argument_exit(
sys.argv[i], "gam download export")
export = gapi.call(v.matters().exports(), 'get',
matterId=matterId, exportId=exportId)
controlflow.invalid_argument_exit(sys.argv[i],
'gam download export')
export = gapi.call(v.matters().exports(),
'get',
matterId=matterId,
exportId=exportId)
for s_file in export['cloudStorageSink']['files']:
bucket = s_file['bucketName']
s_object = s_file['objectName']
@ -631,8 +694,8 @@ def downloadExport():
done = False
while not done:
status, done = downloader.next_chunk()
sys.stdout.write(
' Downloaded: {0:>7.2%}\r'.format(status.progress()))
sys.stdout.write(' Downloaded: {0:>7.2%}\r'.format(
status.progress()))
sys.stdout.flush()
sys.stdout.write('\n Download complete. Flushing to disk...\n')
fileutils.close_file(f, True)
@ -665,23 +728,26 @@ def printMatters():
i += 1
elif myarg == 'matterstate':
valid_states = gapi.get_enum_values_minus_unspecified(
v._rootDesc['schemas']['Matter']['properties']['state'][
'enum'])
state = sys.argv[i+1].upper()
v._rootDesc['schemas']['Matter']['properties']['state']['enum'])
state = sys.argv[i + 1].upper()
if state not in valid_states:
controlflow.expected_argument_exit(
'state', ', '.join(valid_states), state)
controlflow.expected_argument_exit('state',
', '.join(valid_states),
state)
i += 2
else:
controlflow.invalid_argument_exit(myarg, "gam print matters")
controlflow.invalid_argument_exit(myarg, 'gam print matters')
gam.printGettingAllItems('Vault Matters', None)
page_message = gapi.got_total_items_msg('Vault Matters', '...\n')
matters = gapi.get_all_pages(
v.matters(), 'list', 'matters', page_message=page_message, view=view,
matters = gapi.get_all_pages(v.matters(),
'list',
'matters',
page_message=page_message,
view=view,
state=state)
for matter in matters:
display.add_row_titles_to_csv_file(
utils.flatten_json(matter), csvRows, titles)
display.add_row_titles_to_csv_file(utils.flatten_json(matter), csvRows,
titles)
display.sort_csv_titles(initialTitles, titles)
display.write_csv_file(csvRows, titles, 'Vault Matters', todrive)
@ -701,14 +767,18 @@ def printExports():
todrive = True
i += 1
elif myarg in ['matter', 'matters']:
matters = sys.argv[i+1].split(',')
matters = sys.argv[i + 1].split(',')
i += 2
else:
controlflow.invalid_argument_exit(myarg, "gam print exports")
controlflow.invalid_argument_exit(myarg, 'gam print exports')
if not matters:
fields = 'matters(matterId),nextPageToken'
matters_results = gapi.get_all_pages(v.matters(
), 'list', 'matters', view='BASIC', state='OPEN', fields=fields)
matters_results = gapi.get_all_pages(v.matters(),
'list',
'matters',
view='BASIC',
state='OPEN',
fields=fields)
for matter in matters_results:
matterIds.append(matter['matterId'])
else:
@ -716,11 +786,14 @@ def printExports():
matterIds.append(getMatterItem(v, matter))
for matterId in matterIds:
sys.stderr.write(f'Retrieving exports for matter {matterId}\n')
exports = gapi.get_all_pages(
v.matters().exports(), 'list', 'exports', matterId=matterId)
exports = gapi.get_all_pages(v.matters().exports(),
'list',
'exports',
matterId=matterId)
for export in exports:
display.add_row_titles_to_csv_file(utils.flatten_json(
export, flattened={'matterId': matterId}), csvRows, titles)
display.add_row_titles_to_csv_file(
utils.flatten_json(export, flattened={'matterId': matterId}),
csvRows, titles)
display.sort_csv_titles(initialTitles, titles)
display.write_csv_file(csvRows, titles, 'Vault Exports', todrive)
@ -740,14 +813,18 @@ def printHolds():
todrive = True
i += 1
elif myarg in ['matter', 'matters']:
matters = sys.argv[i+1].split(',')
matters = sys.argv[i + 1].split(',')
i += 2
else:
controlflow.invalid_argument_exit(myarg, "gam print holds")
controlflow.invalid_argument_exit(myarg, 'gam print holds')
if not matters:
fields = 'matters(matterId),nextPageToken'
matters_results = gapi.get_all_pages(v.matters(
), 'list', 'matters', view='BASIC', state='OPEN', fields=fields)
matters_results = gapi.get_all_pages(v.matters(),
'list',
'matters',
view='BASIC',
state='OPEN',
fields=fields)
for matter in matters_results:
matterIds.append(matter['matterId'])
else:
@ -755,10 +832,13 @@ def printHolds():
matterIds.append(getMatterItem(v, matter))
for matterId in matterIds:
sys.stderr.write(f'Retrieving holds for matter {matterId}\n')
holds = gapi.get_all_pages(
v.matters().holds(), 'list', 'holds', matterId=matterId)
holds = gapi.get_all_pages(v.matters().holds(),
'list',
'holds',
matterId=matterId)
for hold in holds:
display.add_row_titles_to_csv_file(utils.flatten_json(
hold, flattened={'matterId': matterId}), csvRows, titles)
display.add_row_titles_to_csv_file(
utils.flatten_json(hold, flattened={'matterId': matterId}),
csvRows, titles)
display.sort_csv_titles(initialTitles, titles)
display.write_csv_file(csvRows, titles, 'Vault Holds', todrive)

View File

@ -31,8 +31,7 @@ def create_http(cache=None,
GC_TLS_MIN_VERSION)
tls_maximum_version = override_max_tls if override_max_tls else GC_Values.get(
GC_TLS_MAX_VERSION)
httpObj = httplib2.Http(
ca_certs=GC_Values.get(GC_CA_FILE),
httpObj = httplib2.Http(ca_certs=GC_Values.get(GC_CA_FILE),
tls_maximum_version=tls_maximum_version,
tls_minimum_version=tls_minimum_version,
cache=cache,

View File

@ -25,7 +25,8 @@ class CreateHttpTest(unittest.TestCase):
transport.GC_Values[transport.GC_TLS_MIN_VERSION])
self.assertEqual(http.tls_maximum_version,
transport.GC_Values[transport.GC_TLS_MAX_VERSION])
self.assertEqual(http.ca_certs, transport.GC_Values[transport.GC_CA_FILE])
self.assertEqual(http.ca_certs,
transport.GC_Values[transport.GC_CA_FILE])
def test_create_http_sets_tls_min_version(self):
http = transport.create_http(override_min_tls='TLSv1_1')
@ -90,7 +91,9 @@ class TransportTest(unittest.TestCase):
def test_request_call_forces_user_agent_no_agent_in_headers(self):
request = transport.Request(self.mock_http)
fake_request_headers = {'some-header-thats-not-a-user-agent': 'someData'}
fake_request_headers = {
'some-header-thats-not-a-user-agent': 'someData'
}
request(self.test_uri, headers=fake_request_headers)
final_headers = self.mock_http.request.call_args[1]['headers']
@ -119,31 +122,34 @@ class TransportTest(unittest.TestCase):
self.assertIn('user-agent', final_headers)
self.assertIn(transport.GAM_USER_AGENT, final_headers['user-agent'])
# Make sure the header wasn't duplicated
self.assertEqual(
len(transport.GAM_USER_AGENT), len(final_headers['user-agent']))
self.assertEqual(len(transport.GAM_USER_AGENT),
len(final_headers['user-agent']))
def test_authorizedhttp_is_google_auth_httplib2_compatible(self):
http = transport.AuthorizedHttp(self.mock_credentials)
self.assertIsInstance(http, google_auth_httplib2.AuthorizedHttp)
def test_authorizedhttp_request_returns_response_content(self):
http = transport.AuthorizedHttp(self.mock_credentials, http=self.mock_http)
http = transport.AuthorizedHttp(self.mock_credentials,
http=self.mock_http)
response, content = http.request(self.test_uri)
self.assertEqual(self.mock_response, response)
self.assertEqual(self.mock_content, content)
def test_authorizedhttp_request_forces_user_agent_no_provided_headers(self):
authorized_http = transport.AuthorizedHttp(
self.mock_credentials, http=self.mock_http)
authorized_http = transport.AuthorizedHttp(self.mock_credentials,
http=self.mock_http)
authorized_http.request(self.test_uri)
headers = self.mock_http.request.call_args[1]['headers']
self.assertIn('user-agent', headers)
self.assertIn(transport.GAM_USER_AGENT, headers['user-agent'])
def test_authorizedhttp_request_forces_user_agent_no_agent_in_headers(self):
authorized_http = transport.AuthorizedHttp(
self.mock_credentials, http=self.mock_http)
fake_request_headers = {'some-header-thats-not-a-user-agent': 'someData'}
authorized_http = transport.AuthorizedHttp(self.mock_credentials,
http=self.mock_http)
fake_request_headers = {
'some-header-thats-not-a-user-agent': 'someData'
}
authorized_http.request(self.test_uri, headers=fake_request_headers)
final_headers = self.mock_http.request.call_args[1]['headers']
@ -155,8 +161,8 @@ class TransportTest(unittest.TestCase):
def test_authorizedhttp_request_forces_user_agent_with_another_agent_in_headers(
self):
authorized_http = transport.AuthorizedHttp(
self.mock_credentials, http=self.mock_http)
authorized_http = transport.AuthorizedHttp(self.mock_credentials,
http=self.mock_http)
headers_with_user_agent = {'user-agent': 'existing-user-agent'}
authorized_http.request(self.test_uri, headers=headers_with_user_agent)
@ -166,8 +172,8 @@ class TransportTest(unittest.TestCase):
self.assertIn(transport.GAM_USER_AGENT, final_headers['user-agent'])
def test_authorizedhttp_request_same_user_agent_already_in_headers(self):
authorized_http = transport.AuthorizedHttp(
self.mock_credentials, http=self.mock_http)
authorized_http = transport.AuthorizedHttp(self.mock_credentials,
http=self.mock_http)
same_user_agent_header = {'user-agent': transport.GAM_USER_AGENT}
authorized_http.request(self.test_uri, headers=same_user_agent_header)
@ -175,5 +181,5 @@ class TransportTest(unittest.TestCase):
self.assertIn('user-agent', final_headers)
self.assertIn(transport.GAM_USER_AGENT, final_headers['user-agent'])
# Make sure the header wasn't duplicated
self.assertEqual(
len(transport.GAM_USER_AGENT), len(final_headers['user-agent']))
self.assertEqual(len(transport.GAM_USER_AGENT),
len(final_headers['user-agent']))

View File

@ -13,6 +13,7 @@ from gam import fileutils
from gam import transport
from gam.var import *
class _DeHTMLParser(HTMLParser):
def __init__(self):
@ -23,14 +24,15 @@ class _DeHTMLParser(HTMLParser):
self.__text.append(data)
def handle_charref(self, name):
self.__text.append(chr(int(name[1:], 16)) if name.startswith('x') else chr(int(name)))
self.__text.append(
chr(int(name[1:], 16)) if name.startswith('x') else chr(int(name)))
def handle_entityref(self, name):
cp = name2codepoint.get(name)
if cp:
self.__text.append(chr(cp))
else:
self.__text.append('&'+name)
self.__text.append('&' + name)
def handle_starttag(self, tag, attrs):
if tag == 'p':
@ -53,7 +55,9 @@ class _DeHTMLParser(HTMLParser):
self.__text.append('\n\n')
def text(self):
return re.sub(r'\n{2}\n+', '\n\n', re.sub(r'\n +', '\n', ''.join(self.__text))).strip()
return re.sub(r'\n{2}\n+', '\n\n',
re.sub(r'\n +', '\n', ''.join(self.__text))).strip()
def dehtml(text):
try:
@ -66,9 +70,11 @@ def dehtml(text):
print_exc(file=sys.stderr)
return text
def indentMultiLineText(message, n=0):
return message.replace('\n', '\n{0}'.format(' ' * n)).rstrip()
def flatten_json(structure, key='', path='', flattened=None, listLimit=None):
if flattened is None:
flattened = {}
@ -78,24 +84,38 @@ def flatten_json(structure, key='', path='', flattened=None, listLimit=None):
for i, item in enumerate(structure):
if listLimit and (i >= listLimit):
break
flatten_json(item, f'{i}', '.'.join([item for item in [path, key] if item]), flattened=flattened, listLimit=listLimit)
flatten_json(item,
f'{i}',
'.'.join([item for item in [path, key] if item]),
flattened=flattened,
listLimit=listLimit)
else:
for new_key, value in list(structure.items()):
if new_key in ['kind', 'etag', '@type']:
continue
if value == NEVER_TIME:
value = 'Never'
flatten_json(value, new_key, '.'.join([item for item in [path, key] if item]), flattened=flattened, listLimit=listLimit)
flatten_json(value,
new_key,
'.'.join([item for item in [path, key] if item]),
flattened=flattened,
listLimit=listLimit)
return flattened
def formatTimestampYMD(timestamp):
return datetime.datetime.fromtimestamp(int(timestamp)/1000).strftime('%Y-%m-%d')
return datetime.datetime.fromtimestamp(int(timestamp) /
1000).strftime('%Y-%m-%d')
def formatTimestampYMDHMS(timestamp):
return datetime.datetime.fromtimestamp(int(timestamp)/1000).strftime('%Y-%m-%d %H:%M:%S')
return datetime.datetime.fromtimestamp(int(timestamp) /
1000).strftime('%Y-%m-%d %H:%M:%S')
def formatTimestampYMDHMSF(timestamp):
return str(datetime.datetime.fromtimestamp(int(timestamp)/1000))
return str(datetime.datetime.fromtimestamp(int(timestamp) / 1000))
def formatFileSize(fileSize):
if fileSize == 0:
@ -108,12 +128,14 @@ def formatFileSize(fileSize):
return f'{fileSize // ONE_MEGA_BYTES}mb'
return f'{fileSize // ONE_GIGA_BYTES}gb'
def formatMilliSeconds(millis):
seconds, millis = divmod(millis, 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
return f'{hours:02d}:{minutes:02d}:{seconds:02d}'
def integerLimits(minVal, maxVal, item='integer'):
if (minVal is not None) and (maxVal is not None):
return f'{item} {minVal}<=x<={maxVal}'
@ -123,13 +145,18 @@ def integerLimits(minVal, maxVal, item='integer'):
return f'{item} x<={maxVal}'
return f'{item} x'
def get_string(i, item, optional=False, minLen=1, maxLen=None):
if i < len(sys.argv):
argstr = sys.argv[i]
if argstr:
if (len(argstr) >= minLen) and ((maxLen is None) or (len(argstr) <= maxLen)):
if (len(argstr) >= minLen) and ((maxLen is None) or
(len(argstr) <= maxLen)):
return argstr
controlflow.system_error_exit(2, f'expected <{integerLimits(minLen, maxLen, "string length")} for {item}>')
controlflow.system_error_exit(
2,
f'expected <{integerLimits(minLen, maxLen, "string length")} for {item}>'
)
if optional or (minLen == 0):
return ''
controlflow.system_error_exit(2, f'expected a Non-empty <{item}>')
@ -137,6 +164,7 @@ def get_string(i, item, optional=False, minLen=1, maxLen=None):
return ''
controlflow.system_error_exit(2, f'expected a <{item}>')
def get_delta(argstr, pattern):
tg = pattern.match(argstr.lower())
if tg is None:
@ -145,7 +173,7 @@ def get_delta(argstr, pattern):
delta = int(tg.group(2))
unit = tg.group(3)
if unit == 'y':
deltaTime = datetime.timedelta(days=delta*365)
deltaTime = datetime.timedelta(days=delta * 365)
elif unit == 'w':
deltaTime = datetime.timedelta(weeks=delta)
elif unit == 'd':
@ -158,37 +186,45 @@ def get_delta(argstr, pattern):
return -deltaTime
return deltaTime
def get_delta_date(argstr):
deltaDate = get_delta(argstr, DELTA_DATE_PATTERN)
if deltaDate is None:
controlflow.system_error_exit(2, f'expected a <{DELTA_DATE_FORMAT_REQUIRED}>; got {argstr}')
controlflow.system_error_exit(
2, f'expected a <{DELTA_DATE_FORMAT_REQUIRED}>; got {argstr}')
return deltaDate
def get_delta_time(argstr):
deltaTime = get_delta(argstr, DELTA_TIME_PATTERN)
if deltaTime is None:
controlflow.system_error_exit(2, f'expected a <{DELTA_TIME_FORMAT_REQUIRED}>; got {argstr}')
controlflow.system_error_exit(
2, f'expected a <{DELTA_TIME_FORMAT_REQUIRED}>; got {argstr}')
return deltaTime
def get_yyyymmdd(argstr, minLen=1, returnTimeStamp=False, returnDateTime=False):
argstr = argstr.strip()
if argstr:
if argstr[0] in ['+', '-']:
today = datetime.date.today()
argstr = (datetime.datetime(today.year, today.month, today.day)+get_delta_date(argstr)).strftime(YYYYMMDD_FORMAT)
argstr = (datetime.datetime(today.year, today.month, today.day) +
get_delta_date(argstr)).strftime(YYYYMMDD_FORMAT)
try:
dateTime = datetime.datetime.strptime(argstr, YYYYMMDD_FORMAT)
if returnTimeStamp:
return time.mktime(dateTime.timetuple())*1000
return time.mktime(dateTime.timetuple()) * 1000
if returnDateTime:
return dateTime
return argstr
except ValueError:
controlflow.system_error_exit(2, f'expected a <{YYYYMMDD_FORMAT_REQUIRED}>; got {argstr}')
controlflow.system_error_exit(
2, f'expected a <{YYYYMMDD_FORMAT_REQUIRED}>; got {argstr}')
elif minLen == 0:
return ''
controlflow.system_error_exit(2, f'expected a <{YYYYMMDD_FORMAT_REQUIRED}>')
def get_time_or_delta_from_now(time_string):
"""Get an ISO 8601 time or a positive/negative delta applied to now.
Args:
@ -200,8 +236,11 @@ def get_time_or_delta_from_now(time_string):
if time_string:
if time_string[0] not in ['+', '-']:
return time_string
return (datetime.datetime.utcnow() + get_delta_time(time_string)).isoformat() + 'Z'
controlflow.system_error_exit(2, f'expected a <{YYYYMMDDTHHMMSS_FORMAT_REQUIRED}>')
return (datetime.datetime.utcnow() +
get_delta_time(time_string)).isoformat() + 'Z'
controlflow.system_error_exit(
2, f'expected a <{YYYYMMDDTHHMMSS_FORMAT_REQUIRED}>')
def get_row_filter_date_or_delta_from_now(date_string):
"""Get an ISO 8601 date or a positive/negative delta applied to now.
@ -217,14 +256,19 @@ def get_row_filter_date_or_delta_from_now(date_string):
if deltaDate is None:
return (False, DELTA_DATE_FORMAT_REQUIRED)
today = datetime.date.today()
return (True, (datetime.datetime(today.year, today.month, today.day)+deltaDate).isoformat()+'Z')
return (True,
(datetime.datetime(today.year, today.month, today.day) +
deltaDate).isoformat() + 'Z')
try:
deltaDate = dateutil.parser.parse(date_string, ignoretz=True)
return (True, datetime.datetime(deltaDate.year, deltaDate.month, deltaDate.day).isoformat()+'Z')
return (True,
datetime.datetime(deltaDate.year, deltaDate.month,
deltaDate.day).isoformat() + 'Z')
except ValueError:
pass
return (False, YYYYMMDD_FORMAT_REQUIRED)
def get_row_filter_time_or_delta_from_now(time_string):
"""Get an ISO 8601 time or a positive/negative delta applied to now.
Args:
@ -240,42 +284,48 @@ def get_row_filter_time_or_delta_from_now(time_string):
deltaTime = get_delta(time_string, DELTA_TIME_PATTERN)
if deltaTime is None:
return (False, DELTA_TIME_FORMAT_REQUIRED)
return (True, (datetime.datetime.utcnow()+deltaTime).isoformat()+'Z')
return (True,
(datetime.datetime.utcnow() + deltaTime).isoformat() + 'Z')
try:
deltaTime = dateutil.parser.parse(time_string, ignoretz=True)
return (True, deltaTime.isoformat()+'Z')
return (True, deltaTime.isoformat() + 'Z')
except ValueError:
pass
return (False, YYYYMMDDTHHMMSS_FORMAT_REQUIRED)
def get_date_zero_time_or_full_time(time_string):
time_string = time_string.strip()
if time_string:
if YYYYMMDD_PATTERN.match(time_string):
return get_yyyymmdd(time_string)+'T00:00:00.000Z'
return get_yyyymmdd(time_string) + 'T00:00:00.000Z'
return get_time_or_delta_from_now(time_string)
controlflow.system_error_exit(2, f'expected a <{YYYYMMDDTHHMMSS_FORMAT_REQUIRED}>')
controlflow.system_error_exit(
2, f'expected a <{YYYYMMDDTHHMMSS_FORMAT_REQUIRED}>')
def md5_matches_file(local_file, expected_md5, exitOnError):
f = fileutils.open_file(local_file, 'rb')
hash_md5 = md5()
for chunk in iter(lambda: f.read(4096), b""):
for chunk in iter(lambda: f.read(4096), b''):
hash_md5.update(chunk)
actual_hash = hash_md5.hexdigest()
if exitOnError and actual_hash != expected_md5:
controlflow.system_error_exit(6, f'actual hash was {actual_hash}. Exiting on corrupt file.')
controlflow.system_error_exit(
6, f'actual hash was {actual_hash}. Exiting on corrupt file.')
return actual_hash == expected_md5
URL_SHORTENER_ENDPOINT = 'https://gam-shortn.appspot.com/create'
def shorten_url(long_url, httpc=None):
if not httpc:
httpc = transport.create_http(timeout=10)
headers = {'Content-Type': 'application/json', 'User-Agent': GAM_INFO}
try:
payload = json.dumps({'long_url': long_url})
resp, content = httpc.request(
URL_SHORTENER_ENDPOINT,
resp, content = httpc.request(URL_SHORTENER_ENDPOINT,
'POST',
payload,
headers=headers)

File diff suppressed because it is too large Load Diff