#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # GAM7 # # Copyright 2024, All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ GAM is a command line tool which allows Administrators to control their Google Workspace domain and accounts. For more information, see: https://github.com/GAM-team/GAM https://github.com/GAM-team/GAM/wiki """ __author__ = 'GAM Team ' __version__ = '7.06.10' __license__ = 'Apache License 2.0 (http://www.apache.org/licenses/LICENSE-2.0)' #pylint: disable=wrong-import-position import base64 import calendar as calendarlib import codecs import collections import configparser import csv import datetime from email.charset import add_charset, QP from email.generator import Generator from email.header import decode_header, Header from email import message_from_string from email.mime.application import MIMEApplication from email.mime.audio import MIMEAudio from email.mime.base import MIMEBase from email.mime.image import MIMEImage from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email.utils import formatdate from email.policy import SMTP as policySMTP import hashlib from html.entities import name2codepoint from html.parser import HTMLParser import http.client as http_client import importlib from importlib.metadata import version as lib_version import io import ipaddress import json import logging from logging.handlers import RotatingFileHandler import mimetypes import multiprocessing import os import platform import queue import random import re from secrets import SystemRandom import shlex import signal import smtplib import socket import sqlite3 import ssl import string import struct import subprocess import sys from tempfile import TemporaryFile try: import termios except ImportError: # termios does not exist for Windows pass import threading import time from traceback import print_exc import types from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse, parse_qs import uuid import warnings import webbrowser import wsgiref.simple_server import wsgiref.util import zipfile # disable legacy stuff we don't use and isn't secure os.environ['CRYPTOGRAPHY_OPENSSL_NO_LEGACY'] = "1" from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.x509.oid import NameOID # 10/2024 - I don't recall why we did this but PyInstaller # 6.10.0+ does not like it. Only run this when we're not # Frozen. if not getattr(sys, 'frozen', False): sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) from dateutil.relativedelta import relativedelta from pathvalidate import sanitize_filename, sanitize_filepath import google.oauth2.credentials import google.oauth2.id_token import google.auth from google.auth.jwt import Credentials as JWTCredentials import google.oauth2.service_account import google_auth_oauthlib.flow import google_auth_httplib2 import httplib2 httplib2.RETRIES = 5 from passlib.hash import sha512_crypt from filelock import FileLock if platform.system() == 'Linux': import distro from gamlib import glaction from gamlib import glapi as API from gamlib import glcfg as GC from gamlib import glclargs from gamlib import glentity from gamlib import glgapi as GAPI from gamlib import glgdata as GDATA from gamlib import glglobals as GM from gamlib import glindent from gamlib import glmsgs as Msg from gamlib import glskus as SKU from gamlib import gluprop as UProp from gamlib import glverlibs import gdata.apps.service import gdata.apps.audit import gdata.apps.audit.service import gdata.apps.contacts import gdata.apps.contacts.service # Import local library, does not include discovery documents import googleapiclient import googleapiclient.discovery import googleapiclient.errors import googleapiclient.http from iso8601 import iso8601 IS08601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S%:z' RFC2822_TIME_FORMAT = '%a, %d %b %Y %H:%M:%S %z' def ISOformatTimeStamp(timestamp): return timestamp.isoformat('T', 'seconds') def currentISOformatTimeStamp(timespec='milliseconds'): return datetime.datetime.now(GC.Values[GC.TIMEZONE]).isoformat('T', timespec) Act = glaction.GamAction() Cmd = glclargs.GamCLArgs() Ent = glentity.GamEntity() Ind = glindent.GamIndent() # Finding path method varies between Python source, PyInstaller and StaticX if os.environ.get('STATICX_PROG_PATH', False): # StaticX static executable GM.Globals[GM.GAM_PATH] = os.path.dirname(os.environ['STATICX_PROG_PATH']) GM.Globals[GM.GAM_TYPE] = 'staticx' elif getattr(sys, 'frozen', False): # Pyinstaller executable GM.Globals[GM.GAM_PATH] = os.path.dirname(sys.executable) GM.Globals[GM.GAM_TYPE] = 'pyinstaller' else: # Source code GM.Globals[GM.GAM_PATH] = os.path.dirname(os.path.realpath(__file__)) GM.Globals[GM.GAM_TYPE] = 'pythonsource' GIT_USER = 'GAM-team' GAM = 'GAM' GAM_URL = f'https://github.com/{GIT_USER}/{GAM}' GAM_USER_AGENT = (f'{GAM} {__version__} - {GAM_URL} / ' f'{__author__} / ' f'Python {sys.version_info[0]}.{sys.version_info[1]}.{sys.version_info[2]} {sys.version_info[3]} / ' f'{platform.platform()} {platform.machine()} /' ) GAM_RELEASES = f'https://github.com/{GIT_USER}/{GAM}/releases' GAM_WIKI = f'https://github.com/{GIT_USER}/{GAM}/wiki' GAM_LATEST_RELEASE = f'https://api.github.com/repos/{GIT_USER}/{GAM}/releases/latest' GAM_PROJECT_CREATION = 'GAM Project Creation' GAM_PROJECT_CREATION_CLIENT_ID = '297408095146-fug707qsjv4ikron0hugpevbrjhkmsk7.apps.googleusercontent.com' TRUE = 'true' FALSE = 'false' TRUE_VALUES = [TRUE, 'on', 'yes', 'enabled', '1'] FALSE_VALUES = [FALSE, 'off', 'no', 'disabled', '0'] TRUE_FALSE = [TRUE, FALSE] ERROR = 'ERROR' ERROR_PREFIX = ERROR+': ' WARNING = 'WARNING' WARNING_PREFIX = WARNING+': ' ONE_KILO_10_BYTES = 1000 ONE_MEGA_10_BYTES = 1000000 ONE_GIGA_10_BYTES = 1000000000 ONE_KILO_BYTES = 1024 ONE_MEGA_BYTES = 1048576 ONE_GIGA_BYTES = 1073741824 SECONDS_PER_MINUTE = 60 SECONDS_PER_HOUR = 3600 SECONDS_PER_DAY = 86400 SECONDS_PER_WEEK = 604800 MAX_GOOGLE_SHEET_CELLS = 10000000 # See https://support.google.com/drive/answer/37603 MAX_LOCAL_GOOGLE_TIME_OFFSET = 30 SHARED_DRIVE_MAX_FILES_FOLDERS = 500000 UTF8 = 'utf-8' UTF8_SIG = 'utf-8-sig' EV_GAMCFGDIR = 'GAMCFGDIR' EV_GAMCFGSECTION = 'GAMCFGSECTION' EV_OLDGAMPATH = 'OLDGAMPATH' FN_GAM_CFG = 'gam.cfg' FN_LAST_UPDATE_CHECK_TXT = 'lastupdatecheck.txt' FN_GAMCOMMANDS_TXT = 'GamCommands.txt' MY_DRIVE = 'My Drive' TEAM_DRIVE = 'Drive' ROOT = 'root' ROOTID = 'rootid' ORPHANS = 'Orphans' SHARED_WITHME = 'SharedWithMe' SHARED_DRIVES = 'SharedDrives' LOWERNUMERIC_CHARS = string.ascii_lowercase+string.digits ALPHANUMERIC_CHARS = LOWERNUMERIC_CHARS+string.ascii_uppercase URL_SAFE_CHARS = ALPHANUMERIC_CHARS+'-._~' PASSWORD_SAFE_CHARS = ALPHANUMERIC_CHARS+'!#$%&()*-./:;<=>?@[\\]^_{|}~' FILENAME_SAFE_CHARS = ALPHANUMERIC_CHARS+'-_.() ' CHAT_MESSAGEID_CHARS = string.ascii_lowercase+string.digits+'-' ADMIN_ACCESS_OPTIONS = {'adminaccess', 'asadmin'} OWNER_ACCESS_OPTIONS = {'owneraccess', 'asowner'} # Python 3 values DEFAULT_CSV_READ_MODE = 'r' DEFAULT_FILE_APPEND_MODE = 'a' DEFAULT_FILE_READ_MODE = 'r' DEFAULT_FILE_WRITE_MODE = 'w' # Google API constants APPLICATION_VND_GOOGLE_APPS = 'application/vnd.google-apps.' MIMETYPE_GA_DOCUMENT = f'{APPLICATION_VND_GOOGLE_APPS}document' MIMETYPE_GA_DRAWING = f'{APPLICATION_VND_GOOGLE_APPS}drawing' MIMETYPE_GA_FILE = f'{APPLICATION_VND_GOOGLE_APPS}file' MIMETYPE_GA_FOLDER = f'{APPLICATION_VND_GOOGLE_APPS}folder' MIMETYPE_GA_FORM = f'{APPLICATION_VND_GOOGLE_APPS}form' MIMETYPE_GA_FUSIONTABLE = f'{APPLICATION_VND_GOOGLE_APPS}fusiontable' MIMETYPE_GA_JAM = f'{APPLICATION_VND_GOOGLE_APPS}jam' MIMETYPE_GA_MAP = f'{APPLICATION_VND_GOOGLE_APPS}map' MIMETYPE_GA_PRESENTATION = f'{APPLICATION_VND_GOOGLE_APPS}presentation' MIMETYPE_GA_SCRIPT = f'{APPLICATION_VND_GOOGLE_APPS}script' MIMETYPE_GA_SCRIPT_JSON = f'{APPLICATION_VND_GOOGLE_APPS}script+json' MIMETYPE_GA_SHORTCUT = f'{APPLICATION_VND_GOOGLE_APPS}shortcut' MIMETYPE_GA_3P_SHORTCUT = f'{APPLICATION_VND_GOOGLE_APPS}drive-sdk' MIMETYPE_GA_SITE = f'{APPLICATION_VND_GOOGLE_APPS}site' MIMETYPE_GA_SPREADSHEET = f'{APPLICATION_VND_GOOGLE_APPS}spreadsheet' MIMETYPE_TEXT_CSV = 'text/csv' MIMETYPE_TEXT_HTML = 'text/html' MIMETYPE_TEXT_PLAIN = 'text/plain' GOOGLE_NAMESERVERS = ['8.8.8.8', '8.8.4.4'] GOOGLE_TIMECHECK_LOCATION = 'admin.googleapis.com' NEVER_DATE = '1970-01-01' NEVER_DATETIME = '1970-01-01 00:00' NEVER_TIME = '1970-01-01T00:00:00.000Z' NEVER_TIME_NOMS = '1970-01-01T00:00:00Z' NEVER_END_DATE = '1969-12-31' NEVER_START_DATE = NEVER_DATE PROJECTION_CHOICE_MAP = {'basic': 'BASIC', 'full': 'FULL'} REFRESH_EXPIRY = '1970-01-01T00:00:01Z' REPLACE_GROUP_PATTERN = re.compile(r'\\(\d+)') UNKNOWN = 'Unknown' # Queries ME_IN_OWNERS = "'me' in owners" ME_IN_OWNERS_AND = ME_IN_OWNERS+" and " AND_ME_IN_OWNERS = " and "+ME_IN_OWNERS NOT_ME_IN_OWNERS = "not "+ME_IN_OWNERS NOT_ME_IN_OWNERS_AND = NOT_ME_IN_OWNERS+" and " AND_NOT_ME_IN_OWNERS = " and "+NOT_ME_IN_OWNERS ANY_FOLDERS = "mimeType = '"+MIMETYPE_GA_FOLDER+"'" MY_FOLDERS = ME_IN_OWNERS_AND+ANY_FOLDERS NON_TRASHED = "trashed = false" WITH_PARENTS = "'{0}' in parents" ANY_NON_TRASHED_WITH_PARENTS = "trashed = false and '{0}' in parents" ANY_NON_TRASHED_FOLDER_NAME = "mimeType = '"+MIMETYPE_GA_FOLDER+"' and name = '{0}' and trashed = false" MY_NON_TRASHED_FOLDER_NAME = ME_IN_OWNERS_AND+ANY_NON_TRASHED_FOLDER_NAME MY_NON_TRASHED_FOLDER_NAME_WITH_PARENTS = ME_IN_OWNERS_AND+"mimeType = '"+MIMETYPE_GA_FOLDER+"' and name = '{0}' and trashed = false and '{1}' in parents" ANY_NON_TRASHED_FOLDER_NAME_WITH_PARENTS = "mimeType = '"+MIMETYPE_GA_FOLDER+"' and name = '{0}' and trashed = false and '{1}' in parents" WITH_ANY_FILE_NAME = "name = '{0}'" WITH_MY_FILE_NAME = ME_IN_OWNERS_AND+WITH_ANY_FILE_NAME WITH_OTHER_FILE_NAME = NOT_ME_IN_OWNERS_AND+WITH_ANY_FILE_NAME AND_NOT_SHORTCUT = " and mimeType != '"+MIMETYPE_GA_SHORTCUT+"'" # Program return codes UNKNOWN_ERROR_RC = 1 USAGE_ERROR_RC = 2 SOCKET_ERROR_RC = 3 GOOGLE_API_ERROR_RC = 4 NETWORK_ERROR_RC = 5 FILE_ERROR_RC = 6 MEMORY_ERROR_RC = 7 KEYBOARD_INTERRUPT_RC = 8 HTTP_ERROR_RC = 9 SCOPES_NOT_AUTHORIZED_RC = 10 DATA_ERROR_RC = 11 API_ACCESS_DENIED_RC = 12 CONFIG_ERROR_RC = 13 SYSTEM_ERROR_RC = 14 NO_SCOPES_FOR_API_RC = 15 CLIENT_SECRETS_JSON_REQUIRED_RC = 16 OAUTH2SERVICE_JSON_REQUIRED_RC = 16 OAUTH2_TXT_REQUIRED_RC = 16 INVALID_JSON_RC = 17 JSON_ALREADY_EXISTS_RC = 17 AUTHENTICATION_TOKEN_REFRESH_ERROR_RC = 18 HARD_ERROR_RC = 19 # Information ENTITY_IS_A_USER_RC = 20 ENTITY_IS_A_USER_ALIAS_RC = 21 ENTITY_IS_A_GROUP_RC = 22 ENTITY_IS_A_GROUP_ALIAS_RC = 23 ENTITY_IS_AN_UNMANAGED_ACCOUNT_RC = 24 ORGUNIT_NOT_EMPTY_RC = 25 CHECK_USER_GROUPS_ERROR_RC = 29 ORPHANS_COLLECTED_RC = 30 # Warnings/Errors ACTION_FAILED_RC = 50 ACTION_NOT_PERFORMED_RC = 51 INVALID_ENTITY_RC = 52 BAD_REQUEST_RC = 53 ENTITY_IS_NOT_UNIQUE_RC = 54 DATA_NOT_AVALIABLE_RC = 55 ENTITY_DOES_NOT_EXIST_RC = 56 ENTITY_DUPLICATE_RC = 57 ENTITY_IS_NOT_AN_ALIAS_RC = 58 ENTITY_IS_UKNOWN_RC = 59 NO_ENTITIES_FOUND_RC = 60 INVALID_DOMAIN_RC = 61 INVALID_DOMAIN_VALUE_RC = 62 INVALID_TOKEN_RC = 63 JSON_LOADS_ERROR_RC = 64 MULTIPLE_DELETED_USERS_FOUND_RC = 65 MULTIPLE_PROJECT_FOLDERS_FOUND_RC = 65 STDOUT_STDERR_ERROR_RC = 66 INSUFFICIENT_PERMISSIONS_RC = 67 REQUEST_COMPLETED_NO_RESULTS_RC = 71 REQUEST_NOT_COMPLETED_RC = 72 SERVICE_NOT_APPLICABLE_RC = 73 TARGET_DRIVE_SPACE_ERROR_RC = 74 USER_REQUIRED_TO_CHANGE_PASSWORD_ERROR_RC = 75 USER_SUSPENDED_ERROR_RC = 76 NO_CSV_DATA_TO_UPLOAD_RC = 77 NO_SA_ACCESS_CONTEXT_MANAGER_EDITOR_ROLE_RC = 78 ACCESS_POLICY_ERROR_RC = 79 YUBIKEY_CONNECTION_ERROR_RC = 80 YUBIKEY_INVALID_KEY_TYPE_RC = 81 YUBIKEY_INVALID_SLOT_RC = 82 YUBIKEY_INVALID_PIN_RC = 83 YUBIKEY_APDU_ERROR_RC = 84 YUBIKEY_VALUE_ERROR_RC = 85 YUBIKEY_MULTIPLE_CONNECTED_RC = 86 YUBIKEY_NOT_FOUND_RC = 87 # Multiprocessing lock mplock = None # stdin/stdout/stderr def readStdin(prompt): return input(prompt) def stdErrorExit(e): try: sys.stderr.write(f'\n{ERROR_PREFIX}{str(e)}\n') except IOError: pass sys.exit(STDOUT_STDERR_ERROR_RC) def writeStdout(data): try: GM.Globals[GM.STDOUT].get(GM.REDIRECT_MULTI_FD, sys.stdout).write(data) except IOError as e: stdErrorExit(e) def flushStdout(): try: GM.Globals[GM.STDOUT].get(GM.REDIRECT_MULTI_FD, sys.stdout).flush() except IOError as e: stdErrorExit(e) def writeStderr(data): flushStdout() try: GM.Globals[GM.STDERR].get(GM.REDIRECT_MULTI_FD, sys.stderr).write(data) except IOError as e: stdErrorExit(e) def flushStderr(): try: GM.Globals[GM.STDERR].get(GM.REDIRECT_MULTI_FD, sys.stderr).flush() except IOError as e: stdErrorExit(e) # Error messages def setSysExitRC(sysRC): GM.Globals[GM.SYSEXITRC] = sysRC def stderrErrorMsg(message): writeStderr(f'\n{ERROR_PREFIX}{message}\n') def stderrWarningMsg(message): writeStderr(f'\n{WARNING_PREFIX}{message}\n') def systemErrorExit(sysRC, message): if message: stderrErrorMsg(message) sys.exit(sysRC) def printErrorMessage(sysRC, message): setSysExitRC(sysRC) writeStderr(f'\n{Ind.Spaces()}{ERROR_PREFIX}{message}\n') def printWarningMessage(sysRC, message): setSysExitRC(sysRC) writeStderr(f'\n{Ind.Spaces()}{WARNING_PREFIX}{message}\n') def supportsColoredText(): """Determines if the current terminal environment supports colored text. Returns: Bool, True if the current terminal environment supports colored text via ANSI escape characters. """ # Make a rudimentary check for Windows. Though Windows does seem to support # colorization with VT100 emulation, it is disabled by default. Therefore, # we'll simply disable it in GAM on Windows for now. return not sys.platform.startswith('win') def createColoredText(text, color): """Uses ANSI escape characters to create colored text in supported terminals. See more at https://en.wikipedia.org/wiki/ANSI_escape_code#Colors Args: text: String, The text to colorize using ANSI escape characters. color: String, An ANSI escape sequence denoting the color of the text to be created. See more at https://en.wikipedia.org/wiki/ANSI_escape_code#Colors Returns: The input text with appropriate ANSI escape characters to create colorization in a supported terminal environment. """ END_COLOR_SEQUENCE = '\033[0m' # Ends the applied color formatting if supportsColoredText(): return color + text + END_COLOR_SEQUENCE return text # Hand back the plain text, uncolorized. def createRedText(text): """Uses ANSI encoding to create red colored text if supported.""" return createColoredText(text, '\033[91m') def createGreenText(text): """Uses ANSI encoding to create green colored text if supported.""" return createColoredText(text, '\u001b[32m') def createYellowText(text): """Uses ANSI encoding to create yellow text if supported.""" return createColoredText(text, '\u001b[33m') def executeBatch(dbatch): dbatch.execute() if GC.Values[GC.INTER_BATCH_WAIT] > 0: time.sleep(GC.Values[GC.INTER_BATCH_WAIT]) def _stripControlCharsFromName(name): for cc in ['\x00', '\r', '\n']: name = name.replace(cc, '') return name class LazyLoader(types.ModuleType): """Lazily import a module, mainly to avoid pulling in large dependencies. `contrib`, and `ffmpeg` are examples of modules that are large and not always needed, and this allows them to only be loaded when they are used. """ # The lint error here is incorrect. def __init__(self, local_name, parent_module_globals, name): self._local_name = local_name self._parent_module_globals = parent_module_globals super().__init__(name) def _load(self): # Import the target module and insert it into the parent's namespace module = importlib.import_module(self.__name__) self._parent_module_globals[self._local_name] = module # Update this object's dict so that if someone keeps a reference to the # LazyLoader, lookups are efficient (__getattr__ is only called on lookups # that fail). self.__dict__.update(module.__dict__) return module def __getattr__(self, item): module = self._load() return getattr(module, item) def __dir__(self): module = self._load() return dir(module) yubikey = LazyLoader('yubikey', globals(), 'gam.gamlib.yubikey') # gam yubikey resetpvi [yubikey_serialnumber ] def doResetYubiKeyPIV(): new_data = {} while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'yubikeyserialnumber': new_data['yubikey_serial_number'] = getInteger() else: unknownArgumentExit() yk = yubikey.YubiKey(new_data) yk.serial_number = yk.get_serial_number() yk.reset_piv() class _DeHTMLParser(HTMLParser): #pylint: disable=abstract-method def __init__(self): HTMLParser.__init__(self) self.__text = [] def handle_data(self, data): self.__text.append(data) def handle_charref(self, name): self.__text.append(chr(int(name[1:], 16)) if name.startswith('x') else chr(int(name))) def handle_entityref(self, name): cp = name2codepoint.get(name) if cp: self.__text.append(chr(cp)) else: self.__text.append('&'+name) def handle_starttag(self, tag, attrs): if tag == 'p': self.__text.append('\n\n') elif tag == 'br': self.__text.append('\n') elif tag == 'a': for attr in attrs: if attr[0] == 'href': self.__text.append(f'({attr[1]}) ') break elif tag == 'div': if not attrs: self.__text.append('\n') elif tag in {'http:', 'https'}: self.__text.append(f' ({tag}//{attrs[0][0]}) ') def handle_startendtag(self, tag, attrs): if tag == 'br': self.__text.append('\n\n') def text(self): return re.sub(r'\n{2}\n+', '\n\n', re.sub(r'\n +', '\n', ''.join(self.__text))).strip() def dehtml(text): parser = _DeHTMLParser() parser.feed(str(text)) parser.close() return parser.text() def currentCount(i, count): return f' ({i}/{count})' if (count > GC.Values[GC.SHOW_COUNTS_MIN]) else '' def currentCountNL(i, count): return f' ({i}/{count})\n' if (count > GC.Values[GC.SHOW_COUNTS_MIN]) else '\n' # Format a key value list # key, value -> "key: value" + ", " if not last item # key, '' -> "key:" + ", " if not last item # key, None -> "key" + " " if not last item def formatKeyValueList(prefixStr, kvList, suffixStr): msg = prefixStr i = 0 l = len(kvList) while i < l: if isinstance(kvList[i], (bool, float, int)): msg += str(kvList[i]) else: msg += kvList[i] i += 1 if i < l: val = kvList[i] if (val is not None) or (i == l-1): msg += ':' if (val is not None) and (not isinstance(val, str) or val): msg += ' ' if isinstance(val, (bool, float, int)): msg += str(val) else: msg += val i += 1 if i < l: msg += ', ' else: i += 1 if i < l: msg += ' ' msg += suffixStr return msg # Something's wrong with CustomerID?? def accessErrorMessage(cd, errMsg=None): if cd is None: cd = buildGAPIObject(API.DIRECTORY) try: callGAPI(cd.customers(), 'get', throwReasons=[GAPI.BAD_REQUEST, GAPI.INVALID_INPUT, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], customerKey=GC.Values[GC.CUSTOMER_ID], fields='id') except (GAPI.badRequest, GAPI.invalidInput): return formatKeyValueList('', [Ent.Singular(Ent.CUSTOMER_ID), GC.Values[GC.CUSTOMER_ID], Msg.INVALID], '') except GAPI.resourceNotFound: return formatKeyValueList('', [Ent.Singular(Ent.CUSTOMER_ID), GC.Values[GC.CUSTOMER_ID], Msg.DOES_NOT_EXIST], '') except GAPI.forbidden: return formatKeyValueList('', Ent.FormatEntityValueList([Ent.CUSTOMER_ID, GC.Values[GC.CUSTOMER_ID], Ent.DOMAIN, GC.Values[GC.DOMAIN], Ent.USER, GM.Globals[GM.ADMIN]])+[Msg.ACCESS_FORBIDDEN], '') if errMsg: return formatKeyValueList('', [Ent.Singular(Ent.CUSTOMER_ID), GC.Values[GC.CUSTOMER_ID], errMsg], '') return None def accessErrorExit(cd, errMsg=None): systemErrorExit(INVALID_DOMAIN_RC, accessErrorMessage(cd or buildGAPIObject(API.DIRECTORY), errMsg)) def accessErrorExitNonDirectory(api, errMsg): systemErrorExit(API_ACCESS_DENIED_RC, formatKeyValueList('', Ent.FormatEntityValueList([Ent.CUSTOMER_ID, GC.Values[GC.CUSTOMER_ID], Ent.DOMAIN, GC.Values[GC.DOMAIN], Ent.API, api])+[errMsg], '')) def ClientAPIAccessDeniedExit(errMsg=None): stderrErrorMsg(Msg.API_ACCESS_DENIED) if errMsg: stderrErrorMsg(errMsg) missingScopes = API.getClientScopesSet(GM.Globals[GM.CURRENT_CLIENT_API])-GM.Globals[GM.CURRENT_CLIENT_API_SCOPES] if missingScopes: writeStderr(Msg.API_CHECK_CLIENT_AUTHORIZATION.format(GM.Globals[GM.OAUTH2_CLIENT_ID], ','.join(sorted(missingScopes)))) systemErrorExit(API_ACCESS_DENIED_RC, None) def SvcAcctAPIAccessDenied(): _getSvcAcctData() if (GM.Globals[GM.CURRENT_SVCACCT_API] == API.GMAIL and GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES] and GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES][0] == API.GMAIL_SEND_SCOPE): systemErrorExit(OAUTH2SERVICE_JSON_REQUIRED_RC, Msg.NO_SVCACCT_ACCESS_ALLOWED) stderrErrorMsg(Msg.API_ACCESS_DENIED) apiOrScopes = API.getAPIName(GM.Globals[GM.CURRENT_SVCACCT_API]) if GM.Globals[GM.CURRENT_SVCACCT_API] else ','.join(sorted(GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES])) writeStderr(Msg.API_CHECK_SVCACCT_AUTHORIZATION.format(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['client_id'], apiOrScopes, GM.Globals[GM.CURRENT_SVCACCT_USER] or _getAdminEmail())) def SvcAcctAPIAccessDeniedExit(): SvcAcctAPIAccessDenied() systemErrorExit(API_ACCESS_DENIED_RC, None) def SvcAcctAPIDisabledExit(): if not GM.Globals[GM.CURRENT_SVCACCT_USER] and GM.Globals[GM.CURRENT_CLIENT_API]: ClientAPIAccessDeniedExit() if GM.Globals[GM.CURRENT_SVCACCT_API]: stderrErrorMsg(Msg.SERVICE_ACCOUNT_API_DISABLED.format(API.getAPIName(GM.Globals[GM.CURRENT_SVCACCT_API]))) systemErrorExit(API_ACCESS_DENIED_RC, None) systemErrorExit(API_ACCESS_DENIED_RC, Msg.API_ACCESS_DENIED) def APIAccessDeniedExit(): if not GM.Globals[GM.CURRENT_SVCACCT_USER] and GM.Globals[GM.CURRENT_CLIENT_API]: ClientAPIAccessDeniedExit() if GM.Globals[GM.CURRENT_SVCACCT_API]: SvcAcctAPIAccessDeniedExit() systemErrorExit(API_ACCESS_DENIED_RC, Msg.API_ACCESS_DENIED) def checkEntityDNEorAccessErrorExit(cd, entityType, entityName, i=0, count=0): message = accessErrorMessage(cd) if message: systemErrorExit(INVALID_DOMAIN_RC, message) entityDoesNotExistWarning(entityType, entityName, i, count) def checkEntityAFDNEorAccessErrorExit(cd, entityType, entityName, i=0, count=0): message = accessErrorMessage(cd) if message: systemErrorExit(INVALID_DOMAIN_RC, message) entityActionFailedWarning([entityType, entityName], Msg.DOES_NOT_EXIST, i, count) def checkEntityItemValueAFDNEorAccessErrorExit(cd, entityType, entityName, itemType, itemValue, i=0, count=0): message = accessErrorMessage(cd) if message: systemErrorExit(INVALID_DOMAIN_RC, message) entityActionFailedWarning([entityType, entityName, itemType, itemValue], Msg.DOES_NOT_EXIST, i, count) def invalidClientSecretsJsonExit(errMsg): stderrErrorMsg(Msg.DOES_NOT_EXIST_OR_HAS_INVALID_FORMAT.format(Ent.Singular(Ent.CLIENT_SECRETS_JSON_FILE), GC.Values[GC.CLIENT_SECRETS_JSON], errMsg)) writeStderr(Msg.INSTRUCTIONS_CLIENT_SECRETS_JSON) systemErrorExit(CLIENT_SECRETS_JSON_REQUIRED_RC, None) def invalidOauth2serviceJsonExit(errMsg): stderrErrorMsg(Msg.DOES_NOT_EXIST_OR_HAS_INVALID_FORMAT.format(Ent.Singular(Ent.OAUTH2SERVICE_JSON_FILE), GC.Values[GC.OAUTH2SERVICE_JSON], errMsg)) writeStderr(Msg.INSTRUCTIONS_OAUTH2SERVICE_JSON) systemErrorExit(OAUTH2SERVICE_JSON_REQUIRED_RC, None) def invalidOauth2TxtExit(errMsg): stderrErrorMsg(Msg.DOES_NOT_EXIST_OR_HAS_INVALID_FORMAT.format(Ent.Singular(Ent.OAUTH2_TXT_FILE), GC.Values[GC.OAUTH2_TXT], errMsg)) writeStderr(Msg.EXECUTE_GAM_OAUTH_CREATE) systemErrorExit(OAUTH2_TXT_REQUIRED_RC, None) def expiredRevokedOauth2TxtExit(): stderrErrorMsg(Msg.IS_EXPIRED_OR_REVOKED.format(Ent.Singular(Ent.OAUTH2_TXT_FILE), GC.Values[GC.OAUTH2_TXT])) writeStderr(Msg.EXECUTE_GAM_OAUTH_CREATE) systemErrorExit(OAUTH2_TXT_REQUIRED_RC, None) def invalidDiscoveryJsonExit(fileName, errMsg): stderrErrorMsg(Msg.DOES_NOT_EXIST_OR_HAS_INVALID_FORMAT.format(Ent.Singular(Ent.DISCOVERY_JSON_FILE), fileName, errMsg)) systemErrorExit(INVALID_JSON_RC, None) def entityActionFailedExit(entityValueList, errMsg, i=0, count=0): systemErrorExit(ACTION_FAILED_RC, formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.Failed(), errMsg], currentCountNL(i, count))) def entityDoesNotExistExit(entityType, entityName, i=0, count=0, errMsg=None): Cmd.Backup() writeStderr(Cmd.CommandLineWithBadArgumentMarked(False)) systemErrorExit(ENTITY_DOES_NOT_EXIST_RC, formatKeyValueList(Ind.Spaces(), [Ent.Singular(entityType), entityName, errMsg or Msg.DOES_NOT_EXIST], currentCountNL(i, count))) def entityDoesNotHaveItemExit(entityValueList, i=0, count=0): Cmd.Backup() writeStderr(Cmd.CommandLineWithBadArgumentMarked(False)) systemErrorExit(ENTITY_DOES_NOT_EXIST_RC, formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Msg.DOES_NOT_EXIST], currentCountNL(i, count))) def entityIsNotUniqueExit(entityType, entityName, valueType, valueList, i=0, count=0): Cmd.Backup() writeStderr(Cmd.CommandLineWithBadArgumentMarked(False)) systemErrorExit(ENTITY_IS_NOT_UNIQUE_RC, formatKeyValueList(Ind.Spaces(), [Ent.Singular(entityType), entityName, Msg.IS_NOT_UNIQUE.format(Ent.Plural(valueType), ','.join(valueList))], currentCountNL(i, count))) def usageErrorExit(message, extraneous=False): writeStderr(Cmd.CommandLineWithBadArgumentMarked(extraneous)) stderrErrorMsg(message) writeStderr(Msg.HELP_SYNTAX.format(os.path.join(GM.Globals[GM.GAM_PATH], FN_GAMCOMMANDS_TXT))) writeStderr(Msg.HELP_WIKI.format(GAM_WIKI)) sys.exit(USAGE_ERROR_RC) def csvFieldErrorExit(fieldName, fieldNames, backupArg=False, checkForCharset=False): if backupArg: Cmd.Backup() if checkForCharset and Cmd.Previous() == 'charset': Cmd.Backup() Cmd.Backup() usageErrorExit(Msg.HEADER_NOT_FOUND_IN_CSV_HEADERS.format(fieldName, ','.join(fieldNames))) def csvDataAlreadySavedErrorExit(): Cmd.Backup() usageErrorExit(Msg.CSV_DATA_ALREADY_SAVED) # The last thing shown is unknown def unknownArgumentExit(): Cmd.Backup() usageErrorExit(Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_INVALID][1]) # Argument describes what's expected def expectedArgumentExit(problem, argument): usageErrorExit(f'{problem}: {Msg.EXPECTED} <{argument}>') def blankArgumentExit(argument): expectedArgumentExit(Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_BLANK][1], f'{Msg.NON_BLANK} {argument}') def emptyArgumentExit(argument): expectedArgumentExit(Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_EMPTY][1], f'{Msg.NON_EMPTY} {argument}') def invalidArgumentExit(argument): expectedArgumentExit(Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_INVALID][1], argument) def missingArgumentExit(argument): expectedArgumentExit(Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_MISSING][1], argument) def deprecatedArgument(argument): Cmd.Backup() writeStderr(Cmd.CommandLineWithBadArgumentMarked(False)) Cmd.Advance() stderrWarningMsg(f'{Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_DEPRECATED][1]}: {Msg.IGNORED} <{argument}>') def deprecatedArgumentExit(argument): usageErrorExit(f'{Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_DEPRECATED][1]}: <{argument}>') def deprecatedCommandExit(): systemErrorExit(USAGE_ERROR_RC, Msg.SITES_COMMAND_DEPRECATED.format(Cmd.CommandDeprecated())) # Choices is the valid set of choices that was expected def formatChoiceList(choices): choiceList = [c if c else "''" for c in choices] if len(choiceList) <= 5: return '|'.join(choiceList) return '|'.join(sorted(choiceList)) def invalidChoiceExit(choice, choices, backupArg): if backupArg: Cmd.Backup() expectedArgumentExit(Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_INVALID_CHOICE][1].format(choice), formatChoiceList(choices)) def missingChoiceExit(choices): expectedArgumentExit(Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_MISSING][1], formatChoiceList(choices)) # Check if argument present def checkArgumentPresent(choices, required=False): choiceList = choices if isinstance(choices, (list, set)) else [choices] if Cmd.ArgumentsRemaining(): choice = Cmd.Current().strip().lower().replace('_', '') if choice: if choice in choiceList: Cmd.Advance() return True if not required: return False invalidChoiceExit(choice, choiceList, False) elif not required: return False missingChoiceExit(choiceList) # Check that there are no extraneous arguments at the end of the command line def checkForExtraneousArguments(): if Cmd.ArgumentsRemaining(): usageErrorExit(Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_EXTRANEOUS][[1, 0][Cmd.MultipleArgumentsRemaining()]], extraneous=True) # Check that an argument remains, get an argument, downshift, delete underscores def checkGetArgument(): if Cmd.ArgumentsRemaining(): argument = Cmd.Current().lower() if argument: Cmd.Advance() return argument.replace('_', '') missingArgumentExit(Cmd.OB_ARGUMENT) # Get an argument, downshift, delete underscores def getArgument(): argument = Cmd.Current().lower() if argument: Cmd.Advance() return argument.replace('_', '') missingArgumentExit(Cmd.OB_ARGUMENT) # Get an argument, downshift, delete underscores # An empty argument is allowed def getArgumentEmptyAllowed(): argument = Cmd.Current().lower() Cmd.Advance() return argument.replace('_', '') def getACLRoles(aclRolesMap): roles = [] for role in getString(Cmd.OB_ROLE_LIST, minLen=0).strip().lower().replace(',', ' ').split(): if role == 'all': for arole in aclRolesMap: roles.append(aclRolesMap[arole]) elif role in aclRolesMap: roles.append(aclRolesMap[role]) else: invalidChoiceExit(role, aclRolesMap, True) return set(roles) def getBoolean(defaultValue=True): if Cmd.ArgumentsRemaining(): boolean = Cmd.Current().strip().lower() if boolean in TRUE_VALUES: Cmd.Advance() return True if boolean in FALSE_VALUES: Cmd.Advance() return False if defaultValue is not None: if not Cmd.Current().strip(): # If current argument is empty, skip over it Cmd.Advance() return defaultValue invalidChoiceExit(boolean, TRUE_FALSE, False) if defaultValue is not None: return defaultValue missingChoiceExit(TRUE_FALSE) def getCharSet(): if checkArgumentPresent('charset'): return getString(Cmd.OB_CHAR_SET) return GC.Values[GC.CHARSET] DEFAULT_CHOICE = 'defaultChoice' CHOICE_ALIASES = 'choiceAliases' MAP_CHOICE = 'mapChoice' NO_DEFAULT = 'NoDefault' def getChoice(choices, **opts): if Cmd.ArgumentsRemaining(): choice = Cmd.Current().strip().lower() if choice or '' in choices: if choice in opts.get(CHOICE_ALIASES, []): choice = opts[CHOICE_ALIASES][choice] if choice not in choices: choice = choice.replace('_', '').replace('-', '') if choice in opts.get(CHOICE_ALIASES, []): choice = opts[CHOICE_ALIASES][choice] if choice in choices: Cmd.Advance() return choice if not opts.get(MAP_CHOICE, False) else choices[choice] if opts.get(DEFAULT_CHOICE, NO_DEFAULT) != NO_DEFAULT: return opts[DEFAULT_CHOICE] invalidChoiceExit(choice, choices, False) elif opts.get(DEFAULT_CHOICE, NO_DEFAULT) != NO_DEFAULT: return opts[DEFAULT_CHOICE] missingChoiceExit(choices) def getChoiceAndValue(item, choices, delimiter): if not Cmd.ArgumentsRemaining() or Cmd.Current().find(delimiter) == -1: return (None, None) choice, value = Cmd.Current().strip().split(delimiter, 1) choice = choice.strip().lower() value = value.strip() if choice in choices: if value: Cmd.Advance() return (choice, value) missingArgumentExit(item) invalidChoiceExit(choice, choices, False) SUSPENDED_ARGUMENTS = {'notsuspended', 'suspended', 'issuspended'} SUSPENDED_CHOICE_MAP = {'notsuspended': False, 'suspended': True} def _getIsSuspended(myarg): if myarg in SUSPENDED_CHOICE_MAP: return SUSPENDED_CHOICE_MAP[myarg] return getBoolean() ARCHIVED_ARGUMENTS = {'notarchived', 'archived', 'isarchived'} ARCHIVED_CHOICE_MAP = {'notarchived': False, 'archived': True} def _getIsArchived(myarg): if myarg in ARCHIVED_CHOICE_MAP: return ARCHIVED_CHOICE_MAP[myarg] return getBoolean() def _getOptionalIsSuspendedIsArchived(): isSuspended = isArchived = None while True: if Cmd.PeekArgumentPresent(SUSPENDED_ARGUMENTS): isSuspended = getChoice(SUSPENDED_CHOICE_MAP, defaultChoice=None, mapChoice=True) if isSuspended is None: isSuspended = getBoolean() elif Cmd.PeekArgumentPresent(ARCHIVED_ARGUMENTS): isArchived = getChoice(ARCHIVED_CHOICE_MAP, defaultChoice=None, mapChoice=True) if isArchived is None: isArchived = getBoolean() else: break return isSuspended, isArchived CALENDAR_COLOR_MAP = { 'amethyst': 24, 'avocado': 10, 'banana': 12, 'basil': 8, 'birch': 20, 'blueberry': 16, 'cherryblossom': 22, 'citron': 11, 'cobalt': 15, 'cocoa': 1, 'eucalyptus': 7, 'flamingo': 2, 'grape': 23, 'graphite': 19, 'lavender': 17, 'mango': 6, 'peacock': 14, 'pistachio': 9, 'pumpkin': 5, 'radicchio': 21, 'sage': 13, 'tangerine': 4, 'tomato': 3, 'wisteria': 18, } CALENDAR_EVENT_COLOR_MAP = { 'banana': 5, 'basil': 10, 'blueberry': 9, 'flamingo': 4, 'graphite': 8, 'grape': 3, 'lavender': 1, 'peacock': 7, 'sage': 2, 'tangerine': 6, 'tomato': 11, } GOOGLE_COLOR_MAP = { 'asparagus': '#7bd148', 'bluevelvet': '#9a9cff', 'bubblegum': '#f691b2', 'cardinal': '#f83a22', 'chocolateicecream': '#ac725e', 'denim': '#9fc6e7', 'desertsand': '#fbe983', 'earthworm': '#cca6ac', 'macaroni': '#fad165', 'marsorange': '#ff7537', 'mountaingray': '#cabdbf', 'mountaingrey': '#cabdbf', 'mouse': '#8f8f8f', 'oldbrickred': '#d06b64', 'pool': '#9fe1e7', 'purpledino': '#b99aff', 'purplerain': '#cd74e6', 'rainysky': '#4986e7', 'seafoam': '#92e1c0', 'slimegreen': '#b3dc6c', 'spearmint': '#42d692', 'toyeggplant': '#a47ae2', 'vernfern': '#16a765', 'wildstrawberries': '#fa573c', 'yellowcab': '#ffad46', } WEB_COLOR_MAP = { 'aliceblue': '#f0f8ff', 'antiquewhite': '#faebd7', 'aqua': '#00ffff', 'aquamarine': '#7fffd4', 'azure': '#f0ffff', 'beige': '#f5f5dc', 'bisque': '#ffe4c4', 'black': '#000000', 'blanchedalmond': '#ffebcd', 'blue': '#0000ff', 'blueviolet': '#8a2be2', 'brown': '#a52a2a', 'burlywood': '#deb887', 'cadetblue': '#5f9ea0', 'chartreuse': '#7fff00', 'chocolate': '#d2691e', 'coral': '#ff7f50', 'cornflowerblue': '#6495ed', 'cornsilk': '#fff8dc', 'crimson': '#dc143c', 'cyan': '#00ffff', 'darkblue': '#00008b', 'darkcyan': '#008b8b', 'darkgoldenrod': '#b8860b', 'darkgray': '#a9a9a9', 'darkgrey': '#a9a9a9', 'darkgreen': '#006400', 'darkkhaki': '#bdb76b', 'darkmagenta': '#8b008b', 'darkolivegreen': '#556b2f', 'darkorange': '#ff8c00', 'darkorchid': '#9932cc', 'darkred': '#8b0000', 'darksalmon': '#e9967a', 'darkseagreen': '#8fbc8f', 'darkslateblue': '#483d8b', 'darkslategray': '#2f4f4f', 'darkslategrey': '#2f4f4f', 'darkturquoise': '#00ced1', 'darkviolet': '#9400d3', 'deeppink': '#ff1493', 'deepskyblue': '#00bfff', 'dimgray': '#696969', 'dimgrey': '#696969', 'dodgerblue': '#1e90ff', 'firebrick': '#b22222', 'floralwhite': '#fffaf0', 'forestgreen': '#228b22', 'fuchsia': '#ff00ff', 'gainsboro': '#dcdcdc', 'ghostwhite': '#f8f8ff', 'gold': '#ffd700', 'goldenrod': '#daa520', 'gray': '#808080', 'grey': '#808080', 'green': '#008000', 'greenyellow': '#adff2f', 'honeydew': '#f0fff0', 'hotpink': '#ff69b4', 'indianred': '#cd5c5c', 'indigo': '#4b0082', 'ivory': '#fffff0', 'khaki': '#f0e68c', 'lavender': '#e6e6fa', 'lavenderblush': '#fff0f5', 'lawngreen': '#7cfc00', 'lemonchiffon': '#fffacd', 'lightblue': '#add8e6', 'lightcoral': '#f08080', 'lightcyan': '#e0ffff', 'lightgoldenrodyellow': '#fafad2', 'lightgray': '#d3d3d3', 'lightgrey': '#d3d3d3', 'lightgreen': '#90ee90', 'lightpink': '#ffb6c1', 'lightsalmon': '#ffa07a', 'lightseagreen': '#20b2aa', 'lightskyblue': '#87cefa', 'lightslategray': '#778899', 'lightslategrey': '#778899', 'lightsteelblue': '#b0c4de', 'lightyellow': '#ffffe0', 'lime': '#00ff00', 'limegreen': '#32cd32', 'linen': '#faf0e6', 'magenta': '#ff00ff', 'maroon': '#800000', 'mediumaquamarine': '#66cdaa', 'mediumblue': '#0000cd', 'mediumorchid': '#ba55d3', 'mediumpurple': '#9370db', 'mediumseagreen': '#3cb371', 'mediumslateblue': '#7b68ee', 'mediumspringgreen': '#00fa9a', 'mediumturquoise': '#48d1cc', 'mediumvioletred': '#c71585', 'midnightblue': '#191970', 'mintcream': '#f5fffa', 'mistyrose': '#ffe4e1', 'moccasin': '#ffe4b5', 'navajowhite': '#ffdead', 'navy': '#000080', 'oldlace': '#fdf5e6', 'olive': '#808000', 'olivedrab': '#6b8e23', 'orange': '#ffa500', 'orangered': '#ff4500', 'orchid': '#da70d6', 'palegoldenrod': '#eee8aa', 'palegreen': '#98fb98', 'paleturquoise': '#afeeee', 'palevioletred': '#db7093', 'papayawhip': '#ffefd5', 'peachpuff': '#ffdab9', 'peru': '#cd853f', 'pink': '#ffc0cb', 'plum': '#dda0dd', 'powderblue': '#b0e0e6', 'purple': '#800080', 'red': '#ff0000', 'rosybrown': '#bc8f8f', 'royalblue': '#4169e1', 'saddlebrown': '#8b4513', 'salmon': '#fa8072', 'sandybrown': '#f4a460', 'seagreen': '#2e8b57', 'seashell': '#fff5ee', 'sienna': '#a0522d', 'silver': '#c0c0c0', 'skyblue': '#87ceeb', 'slateblue': '#6a5acd', 'slategray': '#708090', 'slategrey': '#708090', 'snow': '#fffafa', 'springgreen': '#00ff7f', 'steelblue': '#4682b4', 'tan': '#d2b48c', 'teal': '#008080', 'thistle': '#d8bfd8', 'tomato': '#ff6347', 'turquoise': '#40e0d0', 'violet': '#ee82ee', 'wheat': '#f5deb3', 'white': '#ffffff', 'whitesmoke': '#f5f5f5', 'yellow': '#ffff00', 'yellowgreen': '#9acd32', } COLORHEX_PATTERN = re.compile(r'^#[0-9a-fA-F]{6}$') COLORHEX_FORMAT_REQUIRED = 'ColorName|ColorHex' def getColor(): if Cmd.ArgumentsRemaining(): color = Cmd.Current().strip().lower() if color in GOOGLE_COLOR_MAP: Cmd.Advance() return GOOGLE_COLOR_MAP[color] if color in WEB_COLOR_MAP: Cmd.Advance() return WEB_COLOR_MAP[color] tg = COLORHEX_PATTERN.match(color) if tg: Cmd.Advance() return tg.group(0) invalidArgumentExit(COLORHEX_FORMAT_REQUIRED) missingArgumentExit(COLORHEX_FORMAT_REQUIRED) LABEL_COLORS = [ '#000000', '#076239', '#0b804b', '#149e60', '#16a766', '#1a764d', '#1c4587', '#285bac', '#2a9c68', '#3c78d8', '#3dc789', '#41236d', '#434343', '#43d692', '#44b984', '#4a86e8', '#653e9b', '#666666', '#68dfa9', '#6d9eeb', '#822111', '#83334c', '#89d3b2', '#8e63ce', '#999999', '#a0eac9', '#a46a21', '#a479e2', '#a4c2f4', '#aa8831', '#ac2b16', '#b65775', '#b694e8', '#b9e4d0', '#c6f3de', '#c9daf8', '#cc3a21', '#cccccc', '#cf8933', '#d0bcf1', '#d5ae49', '#e07798', '#e4d7f5', '#e66550', '#eaa041', '#efa093', '#efefef', '#f2c960', '#f3f3f3', '#f691b3', '#f6c5be', '#f7a7c0', '#fad165', '#fb4c2f', '#fbc8d9', '#fcda83', '#fcdee8', '#fce8b3', '#fef1d1', '#ffad47', '#ffbc6b', '#ffd6a2', '#ffe6c7', '#ffffff', ] LABEL_BACKGROUND_COLORS = [ '#16a765', '#2da2bb', '#42d692', '#4986e7', '#98d7e4', '#a2dcc1', '#b3efd3', '#b6cff5', '#b99aff', '#c2c2c2', '#cca6ac', '#e3d7ff', '#e7e7e7', '#ebdbde', '#f2b2a8', '#f691b2', '#fb4c2f', '#fbd3e0', '#fbe983', '#fdedc1', '#ff7537', '#ffad46', '#ffc8af', '#ffdeb5', ] LABEL_TEXT_COLORS = [ '#04502e', '#094228', '#0b4f30', '#0d3472', '#0d3b44', '#3d188e', '#464646', '#594c05', '#662e37', '#684e07', '#711a36', '#7a2e0b', '#7a4706', '#8a1c0a', '#994a64', '#ffffff', ] def getLabelColor(colorType): if Cmd.ArgumentsRemaining(): color = Cmd.Current().strip().lower() tg = COLORHEX_PATTERN.match(color) if tg: color = tg.group(0) if color in colorType or color in LABEL_COLORS: Cmd.Advance() return color elif color.startswith('custom:'): tg = COLORHEX_PATTERN.match(color[7:]) if tg: Cmd.Advance() return tg.group(0) invalidArgumentExit('|'.join(colorType)) missingArgumentExit(Cmd.OB_LABEL_COLOR_HEX) # Language codes used in Drive Labels/Youtube BCP47_LANGUAGE_CODES_MAP = { 'ar-sa': 'ar-SA', 'cs-cz': 'cs-CZ', 'da-dk': 'da-DK', 'de-de': 'de-DE', #Arabic Saudi Arabia, Czech Czech Republic, Danish Denmark, German Germany 'el-gr': 'el-GR', 'en-au': 'en-AU', 'en-gb': 'en-GB', 'en-ie': 'en-IE', #Modern Greek Greece, English Australia, English United Kingdom, English Ireland 'en-us': 'en-US', 'en-za': 'en-ZA', 'es-es': 'es-ES', 'es-mx': 'es-MX', #English United States, English South Africa, Spanish Spain, Spanish Mexico 'fi-fi': 'fi-FI', 'fr-ca': 'fr-CA', 'fr-fr': 'fr-FR', 'he-il': 'he-IL', #Finnish Finland, French Canada, French France, Hebrew Israel 'hi-in': 'hi-IN', 'hu-hu': 'hu-HU', 'id-id': 'id-ID', 'it-it': 'it-IT', #Hindi India, Hungarian Hungary, Indonesian Indonesia, Italian Italy 'ja-jp': 'ja-JP', 'ko-kr': 'ko-KR', 'nl-be': 'nl-BE', 'nl-nl': 'nl-NL', #Japanese Japan, Korean Republic of Korea, Dutch Belgium, Dutch Netherlands 'no-no': 'no-NO', 'pl-pl': 'pl-PL', 'pt-br': 'pt-BR', 'pt-pt': 'pt-PT', #Norwegian Norway, Polish Poland, Portuguese Brazil, Portuguese Portugal 'ro-ro': 'ro-RO', 'ru-ru': 'ru-RU', 'sk-sk': 'sk-SK', 'sv-se': 'sv-SE', #Romanian Romania, Russian Russian Federation, Slovak Slovakia, Swedish Sweden 'th-th': 'th-TH', 'tr-tr': 'tr-TR', 'zh-cn': 'zh-CN', 'zh-hk': 'zh-HK', #Thai Thailand, Turkish Turkey, Chinese China, Chinese Hong Kong 'zh-tw': 'zh-TW' #Chinese Taiwan } # Valid language codes LANGUAGE_CODES_MAP = { 'ach': 'ach', 'af': 'af', 'ag': 'ga', 'ak': 'ak', 'am': 'am', 'ar': 'ar', 'az': 'az', #Luo, Afrikaans, Irish, Akan, Amharic, Arabica, Azerbaijani 'be': 'be', 'bem': 'bem', 'bg': 'bg', 'bn': 'bn', 'br': 'br', 'bs': 'bs', 'ca': 'ca', #Belarusian, Bemba, Bulgarian, Bengali, Breton, Bosnian, Catalan 'chr': 'chr', 'ckb': 'ckb', 'co': 'co', 'crs': 'crs', 'cs': 'cs', 'cy': 'cy', 'da': 'da', #Cherokee, Kurdish (Sorani), Corsican, Seychellois Creole, Czech, Welsh, Danish 'de': 'de', 'ee': 'ee', 'el': 'el', 'en': 'en', 'en-ca': 'en-CA', 'en-gb': 'en-GB', 'en-us': 'en-US', 'eo': 'eo', #German, Ewe, Greek, English, English (CA), English (UK), English (US), Esperanto 'es': 'es', 'es-419': 'es-419', 'et': 'et', 'eu': 'eu', 'fa': 'fa', 'fi': 'fi', 'fil': 'fil', 'fo': 'fo', #Spanish, Spanish (Latin American), Estonian, Basque, Persian, Finnish, Filipino, Faroese 'fr': 'fr', 'fr-ca': 'fr-CA', 'fy': 'fy', 'ga': 'ga', 'gaa': 'gaa', 'gd': 'gd', 'gl': 'gl', #French, French (Canada), Frisian, Irish, Ga, Scots Gaelic, Galician 'gn': 'gn', 'gu': 'gu', 'ha': 'ha', 'haw': 'haw', 'he': 'he', 'hi': 'hi', 'hr': 'hr', #Guarani, Gujarati, Hausa, Hawaiian, Hebrew, Hindi, Croatian 'ht': 'ht', 'hu': 'hu', 'hy': 'hy', 'ia': 'ia', 'id': 'id', 'ig': 'ig', 'in': 'in', #Haitian Creole, Hungarian, Armenian, Interlingua, Indonesian, Igbo, in 'is': 'is', 'it': 'it', 'iw': 'iw', 'ja': 'ja', 'jw': 'jw', 'ka': 'ka', 'kg': 'kg', #Icelandic, Italian, Hebrew, Japanese, Javanese, Georgian, Kongo 'kk': 'kk', 'km': 'km', 'kn': 'kn', 'ko': 'ko', 'kri': 'kri', 'k': 'k', 'ky': 'ky', #Kazakh, Khmer, Kannada, Korean, Krio (Sierra Leone), Kurdish, Kyrgyz 'la': 'la', 'lg': 'lg', 'ln': 'ln', 'lo': 'lo', 'loz': 'loz', 'lt': 'lt', 'lua': 'lua', #Latin, Luganda, Lingala, Laothian, Lozi, Lithuanian, Tshiluba 'lv': 'lv', 'mfe': 'mfe', 'mg': 'mg', 'mi': 'mi', 'mk': 'mk', 'ml': 'ml', 'mn': 'mn', #Latvian, Mauritian Creole, Malagasy, Maori, Macedonian, Malayalam, Mongolian 'mo': 'mo', 'mr': 'mr', 'ms': 'ms', 'mt': 'mt', 'my': 'my', 'ne': 'ne', 'nl': 'nl', #Moldavian, Marathi, Malay, Maltese, Burmese, Nepali, Dutch 'nn': 'nn', 'no': 'no', 'nso': 'nso', 'ny': 'ny', 'nyn': 'nyn', 'oc': 'oc', 'om': 'om', #Norwegian (Nynorsk), Norwegian, Northern Sotho, Chichewa, Runyakitara, Occitan, Oromo 'or': 'or', 'pa': 'pa', 'pcm': 'pcm', 'pl': 'pl', 'ps': 'ps', 'pt-br': 'pt-BR', 'pt-pt': 'pt-PT', #Oriya, Punjabi, Nigerian Pidgin, Polish, Pashto, Portuguese (Brazil), Portuguese (Portugal) 'q': 'q', 'rm': 'rm', 'rn': 'rn', 'ro': 'ro', 'ru': 'ru', 'rw': 'rw', 'sd': 'sd', #Quechua, Romansh, Kirundi, Romanian, Russian, Kinyarwanda, Sindhi 'sh': 'sh', 'si': 'si', 'sk': 'sk', 'sl': 'sl', 'sn': 'sn', 'so': 'so', 'sq': 'sq', #Serbo-Croatian, Sinhalese, Slovak, Slovenian, Shona, Somali, Albanian 'sr': 'sr', 'sr-me': 'sr-ME', 'st': 'st', 'su': 'su', 'sv': 'sv', 'sw': 'sw', 'ta': 'ta', #Serbian, Montenegrin, Sesotho, Sundanese, Swedish, Swahili, Tamil 'te': 'te', 'tg': 'tg', 'th': 'th', 'ti': 'ti', 'tk': 'tk', 'tl': 'tl', 'tn': 'tn', #Telugu, Tajik, Thai, Tigrinya, Turkmen, Tagalog, Setswana 'to': 'to', 'tr': 'tr', 'tt': 'tt', 'tum': 'tum', 'tw': 'tw', 'ug': 'ug', 'uk': 'uk', #Tonga, Turkish, Tatar, Tumbuka, Twi, Uighur, Ukrainian 'ur': 'ur', 'uz': 'uz', 'vi': 'vi', 'wo': 'wo', 'xh': 'xh', 'yi': 'yi', 'yo': 'yo', #Urdu, Uzbek, Vietnamese, Wolof, Xhosa, Yiddish, Yoruba 'zh-cn': 'zh-CN', 'zh-hk': 'zh-HK', 'zh-tw': 'zh-TW', 'zu': 'zu', #Chinese (Simplified), Chinese (Hong Kong/Traditional), Chinese (Taiwan/Traditional), Zulu } LOCALE_CODES_MAP = { '': '', 'ar-eg': 'ar_EG', #Arabic, Egypt 'az-az': 'az_AZ', #Azerbaijani, Azerbaijan 'be-by': 'be_BY', #Belarusian, Belarus 'bg-bg': 'bg_BG', #Bulgarian, Bulgaria 'bn-in': 'bn_IN', #Bengali, India 'ca-es': 'ca_ES', #Catalan, Spain 'cs-cz': 'cs_CZ', #Czech, Czech Republic 'cy-gb': 'cy_GB', #Welsh, United Kingdom 'da-dk': 'da_DK', #Danish, Denmark 'de-ch': 'de_CH', #German, Switzerland 'de-de': 'de_DE', #German, Germany 'el-gr': 'el_GR', #Greek, Greece 'en-au': 'en_AU', #English, Australia 'en-ca': 'en_CA', #English, Canada 'en-gb': 'en_GB', #English, United Kingdom 'en-ie': 'en_IE', #English, Ireland 'en-us': 'en_US', #English, U.S.A. 'es-ar': 'es_AR', #Spanish, Argentina 'es-bo': 'es_BO', #Spanish, Bolivia 'es-cl': 'es_CL', #Spanish, Chile 'es-co': 'es_CO', #Spanish, Colombia 'es-ec': 'es_EC', #Spanish, Ecuador 'es-es': 'es_ES', #Spanish, Spain 'es-mx': 'es_MX', #Spanish, Mexico 'es-py': 'es_PY', #Spanish, Paraguay 'es-uy': 'es_UY', #Spanish, Uruguay 'es-ve': 'es_VE', #Spanish, Venezuela 'fi-fi': 'fi_FI', #Finnish, Finland 'fil-ph': 'fil_PH', #Filipino, Philippines 'fr-ca': 'fr_CA', #French, Canada 'fr-fr': 'fr_FR', #French, France 'gu-in': 'gu_IN', #Gujarati, India 'hi-in': 'hi_IN', #Hindi, India 'hr-hr': 'hr_HR', #Croatian, Croatia 'hu-hu': 'hu_HU', #Hungarian, Hungary 'hy-am': 'hy_AM', #Armenian, Armenia 'in-id': 'in_ID', #Indonesian, Indonesia 'it-it': 'it_IT', #Italian, Italy 'iw-il': 'iw_IL', #Hebrew, Israel 'ja-jp': 'ja_JP', #Japanese, Japan 'ka-ge': 'ka_GE', #Georgian, Georgia 'kk-kz': 'kk_KZ', #Kazakh, Kazakhstan 'kn-in': 'kn_IN', #Kannada, India 'ko-kr': 'ko_KR', #Korean, Korea 'lt-lt': 'lt_LT', #Lithuanian, Lithuania 'lv-lv': 'lv_LV', #Latvian, Latvia 'ml-in': 'ml_IN', #Malayalam, India 'mn-mn': 'mn_MN', #Mongolian, Mongolia 'mr-in': 'mr_IN', #Marathi, India 'my-mn': 'my_MN', #Burmese, Myanmar 'nl-nl': 'nl_NL', #Dutch, Netherlands 'nn-no': 'nn_NO', #Nynorsk, Norway 'no-no': 'no_NO', #Bokmal, Norway 'pa-in': 'pa_IN', #Punjabi, India 'pl-pl': 'pl_PL', #Polish, Poland 'pt-br': 'pt_BR', #Portuguese, Brazil 'pt-pt': 'pt_PT', #Portuguese, Portugal 'ro-ro': 'ro_RO', #Romanian, Romania 'ru-ru': 'ru_RU', #Russian, Russia 'sk-sk': 'sk_SK', #Slovak, Slovakia 'sl-si': 'sl_SI', #Slovenian, Slovenia 'sr-rs': 'sr_RS', #Serbian, Serbia 'sv-se': 'sv_SE', #Swedish, Sweden 'ta-in': 'ta_IN', #Tamil, India 'te-in': 'te_IN', #Telugu, India 'th-th': 'th_TH', #Thai, Thailand 'tr-tr': 'tr_TR', #Turkish, Turkey 'uk-ua': 'uk_UA', #Ukrainian, Ukraine 'vi-vn': 'vi_VN', #Vietnamese, Vietnam 'zh-cn': 'zh_CN', #Simplified Chinese, China 'zh-hk': 'zh_HK', #Traditional Chinese, Hong Kong SAR China 'zh-tw': 'zh_TW', #Traditional Chinese, Taiwan } def getLanguageCode(languageCodeMap): if Cmd.ArgumentsRemaining(): choice = Cmd.Current().strip().lower().replace('_', '-') if choice in languageCodeMap: Cmd.Advance() return languageCodeMap[choice] invalidChoiceExit(choice, languageCodeMap, False) missingChoiceExit(languageCodeMap) def addCourseIdScope(courseId): if not courseId.isdigit() and courseId[:2] not in {'d:', 'p:'}: return f'd:{courseId}' return courseId def removeCourseIdScope(courseId): if courseId.startswith('d:'): return courseId[2:] return courseId def addCourseAliasScope(alias): if alias[:2] not in {'d:', 'p:'}: return f'd:{alias}' return alias def removeCourseAliasScope(alias): if alias.startswith('d:'): return alias[2:] return alias def getCourseAlias(): if Cmd.ArgumentsRemaining(): courseAlias = Cmd.Current() if courseAlias: Cmd.Advance() return addCourseAliasScope(courseAlias) missingArgumentExit(Cmd.OB_COURSE_ALIAS) DELIVERY_SETTINGS_UNDEFINED = 'DSU' GROUP_DELIVERY_SETTINGS_MAP = { 'allmail': 'ALL_MAIL', 'abridged': 'DAILY', 'daily': 'DAILY', 'digest': 'DIGEST', 'disabled': 'DISABLED', 'none': 'NONE', 'nomail': 'NONE', } def getDeliverySettings(): if checkArgumentPresent(['delivery', 'deliverysettings']): return getChoice(GROUP_DELIVERY_SETTINGS_MAP, mapChoice=True) return getChoice(GROUP_DELIVERY_SETTINGS_MAP, defaultChoice=DELIVERY_SETTINGS_UNDEFINED, mapChoice=True) UID_PATTERN = re.compile(r'u?id: ?(.+)', re.IGNORECASE) PEOPLE_PATTERN = re.compile(r'people/([0-9]+)$', re.IGNORECASE) def validateEmailAddressOrUID(emailAddressOrUID, checkPeople=True, ciGroupsAPI=False): cg = UID_PATTERN.match(emailAddressOrUID) if cg: return cg.group(1) if checkPeople: cg = PEOPLE_PATTERN.match(emailAddressOrUID) if cg: return cg.group(1) if ciGroupsAPI and emailAddressOrUID.startswith('groups/'): return emailAddressOrUID return emailAddressOrUID.find('@') != 0 and emailAddressOrUID.count('@') <= 1 # Normalize user/group email address/uid # uid:12345abc -> 12345abc # foo -> foo@domain # foo@ -> foo@domain # foo@bar.com -> foo@bar.com # @domain -> domain def normalizeEmailAddressOrUID(emailAddressOrUID, noUid=False, checkForCustomerId=False, noLower=False, ciGroupsAPI=False): if checkForCustomerId and (emailAddressOrUID == GC.Values[GC.CUSTOMER_ID]): return emailAddressOrUID if not noUid: cg = UID_PATTERN.match(emailAddressOrUID) if cg: return cg.group(1) cg = PEOPLE_PATTERN.match(emailAddressOrUID) if cg: return cg.group(1) if ciGroupsAPI and emailAddressOrUID.startswith('groups/'): return emailAddressOrUID atLoc = emailAddressOrUID.find('@') if atLoc == 0: return emailAddressOrUID[1:].lower() if not noLower else emailAddressOrUID[1:] if (atLoc == -1) or (atLoc == len(emailAddressOrUID)-1) and GC.Values[GC.DOMAIN]: if atLoc == -1: emailAddressOrUID = f'{emailAddressOrUID}@{GC.Values[GC.DOMAIN]}' else: emailAddressOrUID = f'{emailAddressOrUID}{GC.Values[GC.DOMAIN]}' return emailAddressOrUID.lower() if not noLower else emailAddressOrUID # Normalize student/guardian email address/uid # 12345678 -> 12345678 # - -> - # Otherwise, same results as normalizeEmailAddressOrUID def normalizeStudentGuardianEmailAddressOrUID(emailAddressOrUID, allowDash=False): if emailAddressOrUID.isdigit() or (allowDash and emailAddressOrUID == '-'): return emailAddressOrUID return normalizeEmailAddressOrUID(emailAddressOrUID) def getEmailAddress(noUid=False, minLen=1, optional=False, returnUIDprefix=''): if Cmd.ArgumentsRemaining(): emailAddress = Cmd.Current().strip().lower() if emailAddress: cg = UID_PATTERN.match(emailAddress) if cg: if not noUid: if cg.group(1): Cmd.Advance() return f'{returnUIDprefix}{cg.group(1)}' else: invalidArgumentExit('name@domain') else: atLoc = emailAddress.find('@') if atLoc == -1: if GC.Values[GC.DOMAIN]: emailAddress = f'{emailAddress}@{GC.Values[GC.DOMAIN]}' Cmd.Advance() return emailAddress if atLoc != 0: if (atLoc == len(emailAddress)-1) and GC.Values[GC.DOMAIN]: emailAddress = f'{emailAddress}{GC.Values[GC.DOMAIN]}' Cmd.Advance() return emailAddress invalidArgumentExit('name@domain') if optional: Cmd.Advance() return None if minLen == 0: Cmd.Advance() return '' elif optional: return None missingArgumentExit([Cmd.OB_EMAIL_ADDRESS_OR_UID, Cmd.OB_EMAIL_ADDRESS][noUid]) def getFilename(): filename = os.path.expanduser(getString(Cmd.OB_FILE_NAME)) if os.path.isfile(filename): return filename entityDoesNotExistExit(Ent.FILE, filename) def getPermissionId(): if Cmd.ArgumentsRemaining(): emailAddress = Cmd.Current().strip() if emailAddress: cg = UID_PATTERN.match(emailAddress) if cg: Cmd.Advance() return (False, cg.group(1)) emailAddress = emailAddress.lower() atLoc = emailAddress.find('@') if atLoc == -1: if emailAddress == 'anyone': Cmd.Advance() return (False, emailAddress) if emailAddress == 'anyonewithlink': Cmd.Advance() return (False, 'anyoneWithLink') if GC.Values[GC.DOMAIN]: emailAddress = f'{emailAddress}@{GC.Values[GC.DOMAIN]}' Cmd.Advance() return (True, emailAddress) if atLoc != 0: if (atLoc == len(emailAddress)-1) and GC.Values[GC.DOMAIN]: emailAddress = f'{emailAddress}{GC.Values[GC.DOMAIN]}' Cmd.Advance() return (True, emailAddress) invalidArgumentExit('name@domain') missingArgumentExit(Cmd.OB_DRIVE_FILE_PERMISSION_ID) def getGoogleProduct(): if Cmd.ArgumentsRemaining(): product = Cmd.Current().strip() if product: status, productId = SKU.normalizeProductId(product) if not status: invalidChoiceExit(productId, SKU.getSortedProductList(), False) Cmd.Advance() return productId missingArgumentExit(Cmd.OB_PRODUCT_ID) def getGoogleProductList(): if Cmd.ArgumentsRemaining(): productsList = [] for product in Cmd.Current().split(','): status, productId = SKU.normalizeProductId(product) if not status: invalidChoiceExit(productId, SKU.getSortedProductList(), False) if productId not in productsList: productsList.append(productId) Cmd.Advance() return productsList missingArgumentExit(Cmd.OB_PRODUCT_ID_LIST) def getGoogleSKU(): if Cmd.ArgumentsRemaining(): sku = Cmd.Current().strip() if sku: Cmd.Advance() return SKU.getProductAndSKU(sku) missingArgumentExit(Cmd.OB_SKU_ID) def getGoogleSKUList(allowUnknownProduct=False): if Cmd.ArgumentsRemaining(): skusList = [] for sku in Cmd.Current().split(','): productId, sku = SKU.getProductAndSKU(sku) if not productId and not allowUnknownProduct: invalidChoiceExit(sku, SKU.getSortedSKUList(), False) if (productId, sku) not in skusList: skusList.append((productId, sku)) Cmd.Advance() return skusList missingArgumentExit(Cmd.OB_SKU_ID_LIST) def floatLimits(minVal, maxVal, item='float'): if (minVal is not None) and (maxVal is not None): return f'{item} {minVal:.3f}<=x<={maxVal:.3f}' if minVal is not None: return f'{item} x>={minVal:.3f}' if maxVal is not None: return f'{item} x<={maxVal:.3f}' return f'{item} x' def getFloat(minVal=None, maxVal=None): if Cmd.ArgumentsRemaining(): try: number = float(Cmd.Current().strip()) if ((minVal is None) or (number >= minVal)) and ((maxVal is None) or (number <= maxVal)): Cmd.Advance() return number except ValueError: pass invalidArgumentExit(floatLimits(minVal, maxVal)) missingArgumentExit(floatLimits(minVal, maxVal)) def integerLimits(minVal, maxVal, item='integer'): if (minVal is not None) and (maxVal is not None): return f'{item} {minVal}<=x<={maxVal}' if minVal is not None: return f'{item} x>={minVal}' if maxVal is not None: return f'{item} x<={maxVal}' return f'{item} x' def getInteger(minVal=None, maxVal=None, default=None): if Cmd.ArgumentsRemaining(): try: number = int(Cmd.Current().strip()) if ((minVal is None) or (number >= minVal)) and ((maxVal is None) or (number <= maxVal)): Cmd.Advance() return number except ValueError: if default is not None: if not Cmd.Current().strip(): # If current argument is empty, skip over it Cmd.Advance() return default invalidArgumentExit(integerLimits(minVal, maxVal)) elif default is not None: return default missingArgumentExit(integerLimits(minVal, maxVal)) def getIntegerEmptyAllowed(minVal=None, maxVal=None, default=0): if Cmd.ArgumentsRemaining(): number = Cmd.Current().strip() if not number: Cmd.Advance() return default try: number = int(number) if ((minVal is None) or (number >= minVal)) and ((maxVal is None) or (number <= maxVal)): Cmd.Advance() return number except ValueError: pass invalidArgumentExit(integerLimits(minVal, maxVal)) return default SORTORDER_CHOICE_MAP = {'ascending': 'ASCENDING', 'descending': 'DESCENDING'} class OrderBy(): def __init__(self, choiceMap, ascendingKeyword='', descendingKeyword='desc'): self.choiceMap = choiceMap self.ascendingKeyword = ascendingKeyword self.descendingKeyword = descendingKeyword self.items = [] def GetChoice(self): fieldName = getChoice(self.choiceMap, mapChoice=True) fieldNameAscending = fieldName if self.ascendingKeyword: fieldNameAscending += f' {self.ascendingKeyword}' if fieldNameAscending in self.items: self.items.remove(fieldNameAscending) fieldNameDescending = fieldName if self.descendingKeyword: fieldNameDescending += f' {self.descendingKeyword}' if fieldNameDescending in self.items: self.items.remove(fieldNameDescending) if getChoice(SORTORDER_CHOICE_MAP, defaultChoice=None, mapChoice=True) != 'DESCENDING': self.items.append(fieldNameAscending) else: self.items.append(fieldNameDescending) def SetItems(self, itemList): self.items = itemList.split(',') @property def orderBy(self): return ','.join(self.items) def getOrderBySortOrder(choiceMap, defaultSortOrderChoice='ASCENDING', mapSortOrderChoice=True): return (getChoice(choiceMap, mapChoice=True), getChoice(SORTORDER_CHOICE_MAP, defaultChoice=defaultSortOrderChoice, mapChoice=mapSortOrderChoice)) def orgUnitPathQuery(path, isSuspended): query = "orgUnitPath='{0}'".format(path.replace("'", "\\'")) if path != '/' else '' if isSuspended is not None: query += f' isSuspended={isSuspended}' return query def makeOrgUnitPathAbsolute(path): if path == '/': return path if path.startswith('/'): if not path.endswith('/'): return path return path[:-1] if path.startswith('id:'): return path if path.startswith('uid:'): return path[1:] if not path.endswith('/'): return '/'+path return '/'+path[:-1] def makeOrgUnitPathRelative(path): if path == '/': return path if path.startswith('/'): if not path.endswith('/'): return path[1:] return path[1:-1] if path.startswith('id:'): return path if path.startswith('uid:'): return path[1:] if not path.endswith('/'): return path return path[:-1] def encodeOrgUnitPath(path): # 6.22.19 - Encoding doesn't work # % no longer needs encoding and + is handled incorrectly in API with or without encoding return path # if path.find('+') == -1 and path.find('%') == -1: # return path # encpath = '' # for c in path: # if c == '+': # encpath += '%2B' # elif c == '%': # encpath += '%25' # else: # encpath += c # return encpath def getOrgUnitItem(pathOnly=False, absolutePath=True, cd=None): if Cmd.ArgumentsRemaining(): path = Cmd.Current().strip() if path: if pathOnly and (path.startswith('id:') or path.startswith('uid:')) and cd is not None: try: result = callGAPI(cd.orgunits(), 'get', throwReasons=GAPI.ORGUNIT_GET_THROW_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], orgUnitPath=path, fields='orgUnitPath') Cmd.Advance() if absolutePath: return makeOrgUnitPathAbsolute(result['orgUnitPath']) return makeOrgUnitPathRelative(result['orgUnitPath']) except (GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.backendError, GAPI.badRequest, GAPI.invalidCustomerId, GAPI.loginRequired): checkEntityAFDNEorAccessErrorExit(cd, Ent.ORGANIZATIONAL_UNIT, path) invalidArgumentExit(Cmd.OB_ORGUNIT_PATH) Cmd.Advance() if absolutePath: return makeOrgUnitPathAbsolute(path) return makeOrgUnitPathRelative(path) missingArgumentExit([Cmd.OB_ORGUNIT_ITEM, Cmd.OB_ORGUNIT_PATH][pathOnly]) def getTopLevelOrgId(cd, parentOrgUnitPath): if parentOrgUnitPath != '/': try: result = callGAPI(cd.orgunits(), 'get', throwReasons=GAPI.ORGUNIT_GET_THROW_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], orgUnitPath=encodeOrgUnitPath(makeOrgUnitPathRelative(parentOrgUnitPath)), fields='orgUnitId') return result['orgUnitId'] except (GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.backendError): return None except (GAPI.badRequest, GAPI.invalidCustomerId, GAPI.loginRequired): checkEntityAFDNEorAccessErrorExit(cd, Ent.ORGANIZATIONAL_UNIT, parentOrgUnitPath) return None try: result = callGAPI(cd.orgunits(), 'list', throwReasons=GAPI.ORGUNIT_GET_THROW_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], orgUnitPath='/', type='allIncludingParent', fields='organizationUnits(orgUnitId,orgUnitPath)') for orgUnit in result.get('organizationUnits', []): if orgUnit['orgUnitPath'] == '/': return orgUnit['orgUnitId'] return None except (GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.backendError): return None except (GAPI.badRequest, GAPI.invalidCustomerId, GAPI.loginRequired): checkEntityAFDNEorAccessErrorExit(cd, Ent.ORGANIZATIONAL_UNIT, parentOrgUnitPath) return None def getOrgUnitId(cd=None, orgUnit=None): if cd is None: cd = buildGAPIObject(API.DIRECTORY) if orgUnit is None: orgUnit = getOrgUnitItem() try: if orgUnit == '/': result = callGAPI(cd.orgunits(), 'list', throwReasons=GAPI.ORGUNIT_GET_THROW_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], orgUnitPath='/', type='children', fields='organizationUnits(parentOrgUnitId,parentOrgUnitPath)') if result.get('organizationUnits', []): return (result['organizationUnits'][0]['parentOrgUnitPath'], result['organizationUnits'][0]['parentOrgUnitId']) topLevelOrgId = getTopLevelOrgId(cd, '/') if topLevelOrgId: return (orgUnit, topLevelOrgId) return (orgUnit, '/') #Bogus but should never happen result = callGAPI(cd.orgunits(), 'get', throwReasons=GAPI.ORGUNIT_GET_THROW_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], orgUnitPath=encodeOrgUnitPath(makeOrgUnitPathRelative(orgUnit)), fields='orgUnitId,orgUnitPath') return (result['orgUnitPath'], result['orgUnitId']) except (GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.backendError): entityDoesNotExistExit(Ent.ORGANIZATIONAL_UNIT, orgUnit) except (GAPI.badRequest, GAPI.invalidCustomerId, GAPI.loginRequired): accessErrorExit(cd) def getAllParentOrgUnitsForUser(cd, user): try: result = callGAPI(cd.users(), 'get', throwReasons=GAPI.USER_GET_THROW_REASONS, userKey=user, fields='orgUnitPath', projection='basic') except (GAPI.userNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden): entityDoesNotExistExit(Ent.USER, user) except (GAPI.badRequest, GAPI.invalidCustomerId, GAPI.loginRequired): accessErrorExit(cd) parentPath = result['orgUnitPath'] if parentPath == '/': orgUnitPath, orgUnitId = getOrgUnitId(cd, '/') return {orgUnitId: orgUnitPath} parentPath = encodeOrgUnitPath(makeOrgUnitPathRelative(parentPath)) orgUnits = {} while True: try: result = callGAPI(cd.orgunits(), 'get', throwReasons=GAPI.ORGUNIT_GET_THROW_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], orgUnitPath=parentPath, fields='orgUnitId,orgUnitPath,parentOrgUnitId') orgUnits[result['orgUnitId']] = result['orgUnitPath'] if 'parentOrgUnitId' not in result: break parentPath = result['parentOrgUnitId'] except (GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.backendError): entityDoesNotExistExit(Ent.ORGANIZATIONAL_UNIT, parentPath) except (GAPI.badRequest, GAPI.invalidCustomerId, GAPI.loginRequired): accessErrorExit(cd) return orgUnits def validateREPattern(patstr, flags=0): try: return re.compile(patstr, flags) except re.error as e: Cmd.Backup() usageErrorExit(f'{Cmd.OB_RE_PATTERN} {Msg.ERROR}: {e}') def getREPattern(flags=0): if Cmd.ArgumentsRemaining(): patstr = Cmd.Current() if patstr: Cmd.Advance() return validateREPattern(patstr, flags) missingArgumentExit(Cmd.OB_RE_PATTERN) def validateREPatternSubstitution(pattern, replacement): try: re.sub(pattern, replacement, '') return (pattern, replacement) except re.error as e: Cmd.Backup() usageErrorExit(f'{Cmd.OB_RE_SUBSTITUTION} {Msg.ERROR}: {e}') def getREPatternSubstitution(flags=0): pattern = getREPattern(flags) replacement = getString(Cmd.OB_RE_SUBSTITUTION, minLen=0) return validateREPatternSubstitution(pattern, replacement) def getSheetEntity(allowBlankSheet): if Cmd.ArgumentsRemaining(): sheet = Cmd.Current() if sheet or allowBlankSheet: cg = UID_PATTERN.match(sheet) if cg: if cg.group(1).isdigit(): Cmd.Advance() return {'sheetType': Ent.SHEET_ID, 'sheetValue': int(cg.group(1)), 'sheetId': int(cg.group(1)), 'sheetTitle': ''} else: Cmd.Advance() return {'sheetType': Ent.SHEET, 'sheetValue': sheet, 'sheetId': None, 'sheetTitle': sheet} missingArgumentExit(Cmd.OB_SHEET_ENTITY) def getSheetIdFromSheetEntity(spreadsheet, sheetEntity): if sheetEntity['sheetType'] == Ent.SHEET_ID: for sheet in spreadsheet['sheets']: if sheetEntity['sheetId'] == sheet['properties']['sheetId']: return sheet['properties']['sheetId'] else: sheetTitleLower = sheetEntity['sheetTitle'].lower() for sheet in spreadsheet['sheets']: if sheetTitleLower == sheet['properties']['title'].lower(): return sheet['properties']['sheetId'] return None def protectedSheetId(spreadsheet, sheetId): for sheet in spreadsheet['sheets']: for protectedRange in sheet.get('protectedRanges', []): if protectedRange.get('range', {}).get('sheetId', -1) == sheetId and not protectedRange.get('requestingUserCanEdit', False): return True return False def getString(item, checkBlank=False, optional=False, minLen=1, maxLen=None): if Cmd.ArgumentsRemaining(): argstr = Cmd.Current() if argstr: if checkBlank: if argstr.isspace(): blankArgumentExit(item) if (len(argstr) >= minLen) and ((maxLen is None) or (len(argstr) <= maxLen)): Cmd.Advance() return argstr invalidArgumentExit(f'{integerLimits(minLen, maxLen, Msg.STRING_LENGTH)} for {item}') if optional or (minLen == 0): Cmd.Advance() return '' emptyArgumentExit(item) elif optional: return '' missingArgumentExit(item) def escapeCRsNLs(value): return value.replace('\r', '\\r').replace('\n', '\\n') def unescapeCRsNLs(value): return value.replace('\\r', '\r').replace('\\n', '\n') def getStringWithCRsNLs(): return unescapeCRsNLs(getString(Cmd.OB_STRING, minLen=0)) def getStringReturnInList(item): argstr = getString(item, minLen=0).strip() if argstr: return [argstr] return [] SORF_SIG_ARGUMENTS = {'signature', 'sig', 'textsig', 'htmlsig'} SORF_MSG_ARGUMENTS = {'message', 'textmessage', 'htmlmessage'} SORF_FILE_ARGUMENTS = {'file', 'textfile', 'htmlfile', 'gdoc', 'ghtml', 'gcsdoc', 'gcshtml'} SORF_HTML_ARGUMENTS = {'htmlsig', 'htmlmessage', 'htmlfile', 'ghtml', 'gcshtml'} SORF_TEXT_ARGUMENTS = {'text', 'textfile', 'gdoc', 'gcsdoc'} SORF_SIG_FILE_ARGUMENTS = SORF_SIG_ARGUMENTS.union(SORF_FILE_ARGUMENTS) SORF_MSG_FILE_ARGUMENTS = SORF_MSG_ARGUMENTS.union(SORF_FILE_ARGUMENTS) def getStringOrFile(myarg, minLen=0, unescapeCRLF=False): if myarg in SORF_SIG_ARGUMENTS: if checkArgumentPresent(SORF_FILE_ARGUMENTS): myarg = Cmd.Previous().strip().lower().replace('_', '') html = myarg in SORF_HTML_ARGUMENTS if myarg in SORF_FILE_ARGUMENTS: if myarg in {'file', 'textfile', 'htmlfile'}: filename = getString(Cmd.OB_FILE_NAME) encoding = getCharSet() return (readFile(filename, encoding=encoding), encoding, html) if myarg in {'gdoc', 'ghtml'}: f = getGDocData(myarg) data = f.read() f.close() return (data, UTF8, html) return (getStorageFileData(myarg), UTF8, html) if not unescapeCRLF: return (getString(Cmd.OB_STRING, minLen=minLen), UTF8, html) return (unescapeCRsNLs(getString(Cmd.OB_STRING, minLen=minLen)), UTF8, html) def getStringWithCRsNLsOrFile(): if checkArgumentPresent(SORF_FILE_ARGUMENTS): return getStringOrFile(Cmd.Previous().strip().lower().replace('_', ''), minLen=0) return (unescapeCRsNLs(getString(Cmd.OB_STRING, minLen=0)), UTF8, False) def todaysDate(): return datetime.datetime(GM.Globals[GM.DATETIME_NOW].year, GM.Globals[GM.DATETIME_NOW].month, GM.Globals[GM.DATETIME_NOW].day, tzinfo=GC.Values[GC.TIMEZONE]) def todaysTime(): return datetime.datetime(GM.Globals[GM.DATETIME_NOW].year, GM.Globals[GM.DATETIME_NOW].month, GM.Globals[GM.DATETIME_NOW].day, GM.Globals[GM.DATETIME_NOW].hour, GM.Globals[GM.DATETIME_NOW].minute, tzinfo=GC.Values[GC.TIMEZONE]) def getDelta(argstr, pattern): if argstr == 'NOW': return todaysTime() if argstr == 'TODAY': return todaysDate() tg = pattern.match(argstr.lower()) if tg is None: return None sign = tg.group(1) delta = int(tg.group(2)) unit = tg.group(3) if unit == 'y': deltaTime = datetime.timedelta(days=delta*365) elif unit == 'w': deltaTime = datetime.timedelta(weeks=delta) elif unit == 'd': deltaTime = datetime.timedelta(days=delta) elif unit == 'h': deltaTime = datetime.timedelta(hours=delta) elif unit == 'm': deltaTime = datetime.timedelta(minutes=delta) baseTime = todaysDate() if unit in {'h', 'm'}: baseTime = baseTime+datetime.timedelta(hours=GM.Globals[GM.DATETIME_NOW].hour, minutes=GM.Globals[GM.DATETIME_NOW].minute) if sign == '-': return baseTime-deltaTime return baseTime+deltaTime DELTA_DATE_PATTERN = re.compile(r'^([+-])(\d+)([dwy])$') DELTA_DATE_FORMAT_REQUIRED = '(+|-)(d|w|y)' def getDeltaDate(argstr): deltaDate = getDelta(argstr, DELTA_DATE_PATTERN) if deltaDate is None: invalidArgumentExit(DELTA_DATE_FORMAT_REQUIRED) return deltaDate DELTA_TIME_PATTERN = re.compile(r'^([+-])(\d+)([mhdwy])$') DELTA_TIME_FORMAT_REQUIRED = '(+|-)(m|h|d|w|y)' def getDeltaTime(argstr): deltaTime = getDelta(argstr, DELTA_TIME_PATTERN) if deltaTime is None: invalidArgumentExit(DELTA_TIME_FORMAT_REQUIRED) return deltaTime YYYYMMDD_FORMAT = '%Y-%m-%d' YYYYMMDD_FORMAT_REQUIRED = 'yyyy-mm-dd' TODAY_NOW = {'TODAY', 'NOW'} PLUS_MINUS = {'+', '-'} def getYYYYMMDD(minLen=1, returnTimeStamp=False, returnDateTime=False, alternateValue=None): if Cmd.ArgumentsRemaining(): argstr = Cmd.Current().strip().upper() if argstr: if alternateValue is not None and argstr == alternateValue.upper(): Cmd.Advance() return None if argstr in TODAY_NOW or argstr[0] in PLUS_MINUS: if argstr == 'NOW': argstr = 'TODAY' argstr = getDeltaDate(argstr).strftime(YYYYMMDD_FORMAT) elif argstr == 'NEVER': argstr = NEVER_DATE try: dateTime = datetime.datetime.strptime(argstr, YYYYMMDD_FORMAT) Cmd.Advance() if returnTimeStamp: return time.mktime(dateTime.timetuple())*1000 if returnDateTime: return dateTime return argstr except ValueError: invalidArgumentExit(YYYYMMDD_FORMAT_REQUIRED) elif minLen == 0: Cmd.Advance() return '' missingArgumentExit(YYYYMMDD_FORMAT_REQUIRED) HHMM_FORMAT = '%H:%M' HHMM_FORMAT_REQUIRED = 'hh:mm' def getHHMM(): if Cmd.ArgumentsRemaining(): argstr = Cmd.Current().strip().upper() if argstr: try: datetime.datetime.strptime(argstr, HHMM_FORMAT) Cmd.Advance() return argstr except ValueError: invalidArgumentExit(HHMM_FORMAT_REQUIRED) missingArgumentExit(HHMM_FORMAT_REQUIRED) YYYYMMDD_HHMM_FORMAT = '%Y-%m-%d %H:%M' YYYYMMDD_HHMM_FORMAT_REQUIRED = 'yyyy-mm-dd hh:mm' def getYYYYMMDD_HHMM(): if Cmd.ArgumentsRemaining(): argstr = Cmd.Current().strip().upper() if argstr: if argstr in TODAY_NOW or argstr[0] in PLUS_MINUS: argstr = getDeltaTime(argstr).strftime(YYYYMMDD_HHMM_FORMAT) elif argstr == 'NEVER': argstr = NEVER_DATETIME argstr = argstr.replace('T', ' ') try: datetime.datetime.strptime(argstr, YYYYMMDD_HHMM_FORMAT) Cmd.Advance() return argstr except ValueError: invalidArgumentExit(YYYYMMDD_HHMM_FORMAT_REQUIRED) missingArgumentExit(YYYYMMDD_HHMM_FORMAT_REQUIRED) YYYYMMDDTHHMMSSZ_FORMAT = '%Y-%m-%dT%H:%M:%SZ' YYYYMMDD_PATTERN = re.compile(r'^[0-9]{4}-[0-9]{2}-[0-9]{2}$') def getDateOrDeltaFromNow(returnDateTime=False): if Cmd.ArgumentsRemaining(): argstr = Cmd.Current().strip().upper() if argstr: if argstr in TODAY_NOW or argstr[0] in PLUS_MINUS: if argstr == 'NOW': argstr = 'TODAY' argDate = getDeltaDate(argstr) elif argstr == 'NEVER': argDate = datetime.datetime.strptime(NEVER_DATE, YYYYMMDD_FORMAT) elif YYYYMMDD_PATTERN.match(argstr): try: argDate = datetime.datetime.strptime(argstr, YYYYMMDD_FORMAT) except ValueError: invalidArgumentExit(YYYYMMDD_FORMAT_REQUIRED) else: invalidArgumentExit(YYYYMMDD_FORMAT_REQUIRED) Cmd.Advance() if not returnDateTime: return argDate.strftime(YYYYMMDD_FORMAT) return (datetime.datetime(argDate.year, argDate.month, argDate.day, tzinfo=GC.Values[GC.TIMEZONE]), GC.Values[GC.TIMEZONE], argDate.strftime(YYYYMMDD_FORMAT)) missingArgumentExit(YYYYMMDD_FORMAT_REQUIRED) YYYYMMDDTHHMMSS_FORMAT_REQUIRED = 'yyyy-mm-ddThh:mm:ss[.fff](Z|(+|-(hh:mm)))' TIMEZONE_FORMAT_REQUIRED = 'Z|(+|-(hh:mm))' def getTimeOrDeltaFromNow(returnDateTime=False): if Cmd.ArgumentsRemaining(): argstr = Cmd.Current().strip().upper() if argstr: if argstr in TODAY_NOW or argstr[0] in PLUS_MINUS: argstr = ISOformatTimeStamp(getDeltaTime(argstr)) elif argstr == 'NEVER': argstr = NEVER_TIME elif YYYYMMDD_PATTERN.match(argstr): try: dateTime = datetime.datetime.strptime(argstr, YYYYMMDD_FORMAT) except ValueError: invalidArgumentExit(YYYYMMDD_FORMAT_REQUIRED) try: argstr = ISOformatTimeStamp(dateTime.replace(tzinfo=GC.Values[GC.TIMEZONE])) except OverflowError: pass try: fullDateTime, tz = iso8601.parse_date(argstr) Cmd.Advance() if not returnDateTime: return argstr.replace(' ', 'T') return (fullDateTime, tz, argstr.replace(' ', 'T')) except (iso8601.ParseError, OverflowError): pass invalidArgumentExit(YYYYMMDDTHHMMSS_FORMAT_REQUIRED) missingArgumentExit(YYYYMMDDTHHMMSS_FORMAT_REQUIRED) def getRowFilterDateOrDeltaFromNow(argstr): argstr = argstr.strip().upper() if argstr in TODAY_NOW or argstr[0] in PLUS_MINUS: if argstr == 'NOW': argstr = 'TODAY' deltaDate = getDelta(argstr, DELTA_DATE_PATTERN) if deltaDate is None: return (False, DELTA_DATE_FORMAT_REQUIRED) argstr = ISOformatTimeStamp(deltaDate.replace(tzinfo=iso8601.UTC)) elif argstr == 'NEVER' or YYYYMMDD_PATTERN.match(argstr): if argstr == 'NEVER': argstr = NEVER_DATE try: dateTime = datetime.datetime.strptime(argstr, YYYYMMDD_FORMAT) except ValueError: return (False, YYYYMMDD_FORMAT_REQUIRED) argstr = ISOformatTimeStamp(dateTime.replace(tzinfo=iso8601.UTC)) try: iso8601.parse_date(argstr) return (True, argstr.replace(' ', 'T')) except (iso8601.ParseError, OverflowError): return (False, YYYYMMDD_FORMAT_REQUIRED) def getRowFilterTimeOrDeltaFromNow(argstr): argstr = argstr.strip().upper() if argstr in TODAY_NOW or argstr[0] in PLUS_MINUS: deltaTime = getDelta(argstr, DELTA_TIME_PATTERN) if deltaTime is None: return (False, DELTA_TIME_FORMAT_REQUIRED) argstr = ISOformatTimeStamp(deltaTime) elif argstr == 'NEVER': argstr = NEVER_TIME elif YYYYMMDD_PATTERN.match(argstr): try: dateTime = datetime.datetime.strptime(argstr, YYYYMMDD_FORMAT) except ValueError: return (False, YYYYMMDD_FORMAT_REQUIRED) argstr = ISOformatTimeStamp(dateTime.replace(tzinfo=GC.Values[GC.TIMEZONE])) try: iso8601.parse_date(argstr) return (True, argstr.replace(' ', 'T')) except (iso8601.ParseError, OverflowError): return (False, YYYYMMDDTHHMMSS_FORMAT_REQUIRED) def mapQueryRelativeTimes(query, keywords): QUOTES = '\'"' for kw in keywords: pattern = re.compile(rf'({kw})\s*([<>]=?|=|!=)\s*[{QUOTES}]?(now|today|[+-]\d+[mhdwy])', re.IGNORECASE) pos = 0 while True: mg = pattern.search(query, pos) if not mg: break if mg.groups()[2] is not None: deltaTime = getDelta(mg.group(3).upper(), DELTA_TIME_PATTERN) if deltaTime: query = query[:mg.start(3)]+ISOformatTimeStamp(deltaTime)+query[mg.end(3):] pos = mg.end() return query class StartEndTime(): def __init__(self, startkw='starttime', endkw='endtime', mode='time'): self.startTime = self.endTime = self.startDateTime = self.endDateTime = None self._startkw = startkw self._endkw = endkw self._getValueOrDeltaFromNow = getTimeOrDeltaFromNow if mode == 'time' else getDateOrDeltaFromNow def Get(self, myarg): if myarg in {'start', self._startkw}: self.startDateTime, _, self.startTime = self._getValueOrDeltaFromNow(True) elif myarg in {'end', self._endkw}: self.endDateTime, _, self.endTime = self._getValueOrDeltaFromNow(True) elif myarg == 'yesterday': currDate = todaysDate() self.startDateTime = currDate+datetime.timedelta(days=-1) self.startTime = ISOformatTimeStamp(self.startDateTime) self.endDateTime = currDate+datetime.timedelta(seconds=-1) self.endTime = ISOformatTimeStamp(self.endDateTime) elif myarg == 'today': currDate = todaysDate() self.startDateTime = currDate self.startTime = ISOformatTimeStamp(self.startDateTime) elif myarg == 'range': self.startDateTime, _, self.startTime = self._getValueOrDeltaFromNow(True) self.endDateTime, _, self.endTime = self._getValueOrDeltaFromNow(True) else: #elif myarg in {'thismonth', 'previousmonths'} if myarg == 'thismonth': firstMonth = 0 else: firstMonth = getInteger(minVal=1, maxVal=6) currDate = todaysDate() self.startDateTime = currDate+relativedelta(months=-firstMonth, day=1, hour=0, minute=0, second=0, microsecond=0) self.startTime = ISOformatTimeStamp(self.startDateTime) if myarg == 'thismonth': self.endDateTime = todaysTime() else: self.endDateTime = currDate+relativedelta(day=1, hour=23, minute=59, second=59, microsecond=0)+relativedelta(days=-1) self.endTime = ISOformatTimeStamp(self.endDateTime) if self.startDateTime and self.endDateTime and self.endDateTime < self.startDateTime: Cmd.Backup() usageErrorExit(Msg.INVALID_DATE_TIME_RANGE.format(self._endkw, self.endTime, self._startkw, self.startTime)) EVENTID_PATTERN = re.compile(r'^[a-v0-9]{5,1024}$') EVENTID_FORMAT_REQUIRED = '[a-v0-9]{5,1024}' def getEventID(): if Cmd.ArgumentsRemaining(): tg = EVENTID_PATTERN.match(Cmd.Current().strip()) if tg: Cmd.Advance() return tg.group(0) invalidArgumentExit(EVENTID_FORMAT_REQUIRED) missingArgumentExit(EVENTID_FORMAT_REQUIRED) EVENT_TIME_FORMAT_REQUIRED = 'allday yyyy-mm-dd | '+YYYYMMDDTHHMMSS_FORMAT_REQUIRED def getEventTime(): if Cmd.ArgumentsRemaining(): if Cmd.Current().strip().lower() == 'allday': Cmd.Advance() return {'date': getYYYYMMDD()} return {'dateTime': getTimeOrDeltaFromNow()} missingArgumentExit(EVENT_TIME_FORMAT_REQUIRED) AGE_TIME_PATTERN = re.compile(r'^(\d+)([mhdw])$') AGE_TIME_FORMAT_REQUIRED = '(m|h|d|w)' def getAgeTime(): if Cmd.ArgumentsRemaining(): tg = AGE_TIME_PATTERN.match(Cmd.Current().strip().lower()) if tg: age = int(tg.group(1)) age_unit = tg.group(2) now = int(time.time()) if age_unit == 'm': age = now-(age*SECONDS_PER_MINUTE) elif age_unit == 'h': age = now-(age*SECONDS_PER_HOUR) elif age_unit == 'd': age = now-(age*SECONDS_PER_DAY) else: # age_unit == 'w': age = now-(age*SECONDS_PER_WEEK) Cmd.Advance() return age*1000 invalidArgumentExit(AGE_TIME_FORMAT_REQUIRED) missingArgumentExit(AGE_TIME_FORMAT_REQUIRED) CALENDAR_REMINDER_METHODS = ['email', 'popup'] CALENDAR_REMINDER_MAX_MINUTES = 40320 def getCalendarReminder(allowClearNone=False): methods = CALENDAR_REMINDER_METHODS[:] if allowClearNone: methods += Cmd.CLEAR_NONE_ARGUMENT if Cmd.ArgumentsRemaining(): method = Cmd.Current().strip() if not method.isdigit(): method = getChoice(methods) minutes = getInteger(minVal=0, maxVal=CALENDAR_REMINDER_MAX_MINUTES) else: minutes = getInteger(minVal=0, maxVal=CALENDAR_REMINDER_MAX_MINUTES) method = getChoice(methods) return {'method': method, 'minutes': minutes} missingChoiceExit(methods) def getCharacter(): if Cmd.ArgumentsRemaining(): argstr = codecs.escape_decode(bytes(Cmd.Current(), UTF8))[0].decode(UTF8) if argstr: if len(argstr) == 1: Cmd.Advance() return argstr invalidArgumentExit(f'{integerLimits(1, 1, Msg.STRING_LENGTH)} for {Cmd.OB_CHARACTER}') emptyArgumentExit(Cmd.OB_CHARACTER) missingArgumentExit(Cmd.OB_CHARACTER) def getDelimiter(): if not checkArgumentPresent('delimiter'): return None return getCharacter() def getJSON(deleteFields): if not checkArgumentPresent('file'): encoding = getCharSet() if not Cmd.ArgumentsRemaining(): missingArgumentExit(Cmd.OB_JSON_DATA) argstr = Cmd.Current() # argstr = Cmd.Current().replace(r'\\"', r'\"') Cmd.Advance() try: if encoding == UTF8: jsonData = json.loads(argstr) else: jsonData = json.loads(argstr.encode(encoding).decode(UTF8)) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: Cmd.Backup() usageErrorExit(f'{str(e)}: {argstr if encoding == UTF8 else argstr.encode(encoding).decode(UTF8)}') else: filename = getString(Cmd.OB_FILE_NAME) encoding = getCharSet() try: jsonData = json.loads(readFile(filename, encoding=encoding)) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: Cmd.Backup() usageErrorExit(Msg.JSON_ERROR.format(str(e), filename)) for field in deleteFields: jsonData.pop(field, None) return jsonData def getMatchSkipFields(fieldNames): matchFields = {} skipFields = {} while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg in {'matchfield', 'skipfield'}: matchField = getString(Cmd.OB_FIELD_NAME).strip('~') if (not matchField) or (matchField not in fieldNames): csvFieldErrorExit(matchField, fieldNames, backupArg=True) if myarg == 'matchfield': matchFields[matchField] = getREPattern() else: skipFields[matchField] = getREPattern() else: Cmd.Backup() break return (matchFields, skipFields) def checkMatchSkipFields(row, fieldnames, matchFields, skipFields): for matchField, matchPattern in iter(matchFields.items()): if (matchField not in row) or not matchPattern.search(row[matchField]): return False for skipField, matchPattern in iter(skipFields.items()): if (skipField in row) and matchPattern.search(row[skipField]): return False if fieldnames and (GC.Values[GC.CSV_INPUT_ROW_FILTER] or GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER]): return RowFilterMatch(row, fieldnames, GC.Values[GC.CSV_INPUT_ROW_FILTER], GC.Values[GC.CSV_INPUT_ROW_FILTER_MODE], GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER], GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER_MODE]) return True def checkSubkeyField(): if not GM.Globals[GM.CSV_SUBKEY_FIELD]: Cmd.Backup() usageErrorExit(Msg.NO_CSV_FILE_SUBKEYS_SAVED) chkSubkeyField = getString(Cmd.OB_FIELD_NAME, checkBlank=True) if chkSubkeyField != GM.Globals[GM.CSV_SUBKEY_FIELD]: Cmd.Backup() usageErrorExit(Msg.SUBKEY_FIELD_MISMATCH.format(chkSubkeyField, GM.Globals[GM.CSV_SUBKEY_FIELD])) def checkDataField(): if not GM.Globals[GM.CSV_DATA_FIELD]: Cmd.Backup() usageErrorExit(Msg.NO_CSV_FILE_DATA_SAVED) chkDataField = getString(Cmd.OB_FIELD_NAME, checkBlank=True) if chkDataField != GM.Globals[GM.CSV_DATA_FIELD]: Cmd.Backup() usageErrorExit(Msg.DATA_FIELD_MISMATCH.format(chkDataField, GM.Globals[GM.CSV_DATA_FIELD])) MAX_MESSAGE_BYTES_PATTERN = re.compile(r'^(\d+)([mkb]?)$') MAX_MESSAGE_BYTES_FORMAT_REQUIRED = '[m|k|b]' def getMaxMessageBytes(oneKiloBytes, oneMegaBytes): if Cmd.ArgumentsRemaining(): tg = MAX_MESSAGE_BYTES_PATTERN.match(Cmd.Current().strip().lower()) if tg: mmb = int(tg.group(1)) mmb_unit = tg.group(2) if mmb_unit == 'm': mmb *= oneMegaBytes elif mmb_unit == 'k': mmb *= oneKiloBytes Cmd.Advance() return mmb invalidArgumentExit(MAX_MESSAGE_BYTES_FORMAT_REQUIRED) missingArgumentExit(MAX_MESSAGE_BYTES_FORMAT_REQUIRED) # Get domain from email address def getEmailAddressDomain(emailAddress): atLoc = emailAddress.find('@') if atLoc == -1: return GC.Values[GC.DOMAIN] return emailAddress[atLoc+1:].lower() # Get user name from email address def getEmailAddressUsername(emailAddress): atLoc = emailAddress.find('@') if atLoc == -1: return emailAddress.lower() return emailAddress[:atLoc].lower() # Split email address into user and domain def splitEmailAddress(emailAddress): atLoc = emailAddress.find('@') if atLoc == -1: return (emailAddress.lower(), GC.Values[GC.DOMAIN]) return (emailAddress[:atLoc].lower(), emailAddress[atLoc+1:].lower()) def formatFileSize(fileSize): if fileSize == 0: return '0kb' if fileSize < ONE_KILO_10_BYTES: return '1kb' if fileSize < ONE_MEGA_10_BYTES: return f'{fileSize//ONE_KILO_10_BYTES}kb' if fileSize < ONE_GIGA_10_BYTES: return f'{fileSize//ONE_MEGA_10_BYTES}mb' return f'{fileSize//ONE_GIGA_10_BYTES}gb' def formatLocalTime(dateTimeStr): if dateTimeStr in {NEVER_TIME, NEVER_TIME_NOMS}: return GC.Values[GC.NEVER_TIME] try: timestamp, _ = iso8601.parse_date(dateTimeStr) if not GC.Values[GC.OUTPUT_TIMEFORMAT]: if GM.Globals[GM.CONVERT_TO_LOCAL_TIME]: return ISOformatTimeStamp(timestamp.astimezone(GC.Values[GC.TIMEZONE])) return timestamp.strftime(YYYYMMDDTHHMMSSZ_FORMAT) if GM.Globals[GM.CONVERT_TO_LOCAL_TIME]: return timestamp.astimezone(GC.Values[GC.TIMEZONE]).strftime(GC.Values[GC.OUTPUT_TIMEFORMAT]) return timestamp.strftime(GC.Values[GC.OUTPUT_TIMEFORMAT]) except (iso8601.ParseError, OverflowError): return dateTimeStr def formatLocalSecondsTimestamp(timestamp): if not GC.Values[GC.OUTPUT_TIMEFORMAT]: return ISOformatTimeStamp(datetime.datetime.fromtimestamp(int(timestamp), GC.Values[GC.TIMEZONE])) return datetime.datetime.fromtimestamp(int(timestamp), GC.Values[GC.TIMEZONE]).strftime(GC.Values[GC.OUTPUT_TIMEFORMAT]) def formatLocalTimestamp(timestamp): if not GC.Values[GC.OUTPUT_TIMEFORMAT]: return ISOformatTimeStamp(datetime.datetime.fromtimestamp(int(timestamp)//1000, GC.Values[GC.TIMEZONE])) return datetime.datetime.fromtimestamp(int(timestamp)//1000, GC.Values[GC.TIMEZONE]).strftime(GC.Values[GC.OUTPUT_TIMEFORMAT]) def formatLocalTimestampUTC(timestamp): return ISOformatTimeStamp(datetime.datetime.fromtimestamp(int(timestamp)//1000, iso8601.UTC)) def formatLocalDatestamp(timestamp): try: if not GC.Values[GC.OUTPUT_DATEFORMAT]: return datetime.datetime.fromtimestamp(int(timestamp)//1000, GC.Values[GC.TIMEZONE]).strftime(YYYYMMDD_FORMAT) return datetime.datetime.fromtimestamp(int(timestamp)//1000, GC.Values[GC.TIMEZONE]).strftime(GC.Values[GC.OUTPUT_DATEFORMAT]) except OverflowError: return NEVER_DATE def formatMaxMessageBytes(maxMessageBytes, oneKiloBytes, oneMegaBytes): if maxMessageBytes < oneKiloBytes: return maxMessageBytes if maxMessageBytes < oneMegaBytes: return f'{maxMessageBytes//oneKiloBytes}K' return f'{maxMessageBytes//oneMegaBytes}M' def formatMilliSeconds(millis): seconds, millis = divmod(millis, 1000) minutes, seconds = divmod(seconds, 60) hours, minutes = divmod(minutes, 60) return f'{hours:02d}:{minutes:02d}:{seconds:02d}' def getPhraseDNEorSNA(email): return Msg.DOES_NOT_EXIST if getEmailAddressDomain(email) == GC.Values[GC.DOMAIN] else Msg.SERVICE_NOT_APPLICABLE def formatHTTPError(http_status, reason, message): return f'{http_status}: {reason} - {message}' def getHTTPError(responses, http_status, reason, message): if reason in responses: return responses[reason] return formatHTTPError(http_status, reason, message) # Warnings def badRequestWarning(entityType, itemType, itemValue): printWarningMessage(BAD_REQUEST_RC, f'{Msg.GOT} 0 {Ent.Plural(entityType)}: {Msg.INVALID} {Ent.Singular(itemType)} - {itemValue}') def emptyQuery(query, entityType): return f'{Ent.Singular(Ent.QUERY)} ({query}) {Msg.NO_ENTITIES_FOUND.format(Ent.Plural(entityType))}' def invalidQuery(query): return f'{Ent.Singular(Ent.QUERY)} ({query}) {Msg.INVALID}' def invalidMember(query): if query: badRequestWarning(Ent.GROUP, Ent.QUERY, invalidQuery(query)) return True return False def invalidUserSchema(schema): if isinstance(schema, list): return f'{Ent.Singular(Ent.USER_SCHEMA)} ({",".join(schema)}) {Msg.INVALID}' return f'{Ent.Singular(Ent.USER_SCHEMA)} {schema}) {Msg.INVALID}' def userServiceNotEnabledWarning(entityName, service, i=0, count=0): setSysExitRC(SERVICE_NOT_APPLICABLE_RC) writeStderr(formatKeyValueList(Ind.Spaces(), [Ent.Singular(Ent.USER), entityName, Msg.SERVICE_NOT_ENABLED.format(service)], currentCountNL(i, count))) def userAlertsServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'Alerts', i, count) def userAnalyticsServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'Alerts', i, count) def userCalServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'Calendar', i, count) def userChatServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'Chat', i, count) def userContactDelegateServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'Contact Delegate', i, count) def userDriveServiceNotEnabledWarning(user, errMessage, i=0, count=0): # if errMessage.find('Drive apps') == -1 and errMessage.find('Active session is invalid') == -1: # entityServiceNotApplicableWarning(Ent.USER, user, i, count) if errMessage.find('Drive apps') >= 0 or errMessage.find('Active session is invalid') >= 0: userServiceNotEnabledWarning(user, 'Drive', i, count) else: entityActionNotPerformedWarning([Ent.USER, user], errMessage, i, count) def userKeepServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'Keep', i, count) def userGmailServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'Gmail', i, count) def userLookerStudioServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'Looker Studio', i, count) def userPeopleServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'People', i, count) def userTasksServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'Tasks', i, count) def userYouTubeServiceNotEnabledWarning(entityName, i=0, count=0): userServiceNotEnabledWarning(entityName, 'YouTube', i, count) def entityServiceNotApplicableWarning(entityType, entityName, i=0, count=0): setSysExitRC(SERVICE_NOT_APPLICABLE_RC) writeStderr(formatKeyValueList(Ind.Spaces(), [Ent.Singular(entityType), entityName, Msg.SERVICE_NOT_APPLICABLE], currentCountNL(i, count))) def entityDoesNotExistWarning(entityType, entityName, i=0, count=0): setSysExitRC(ENTITY_DOES_NOT_EXIST_RC) writeStderr(formatKeyValueList(Ind.Spaces(), [Ent.Singular(entityType), entityName, Msg.DOES_NOT_EXIST], currentCountNL(i, count))) def entityListDoesNotExistWarning(entityValueList, i=0, count=0): setSysExitRC(ENTITY_DOES_NOT_EXIST_RC) writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Msg.DOES_NOT_EXIST], currentCountNL(i, count))) def entityUnknownWarning(entityType, entityName, i=0, count=0): domain = getEmailAddressDomain(entityName) if (domain.endswith(GC.Values[GC.DOMAIN])) or (domain.endswith('google.com')): entityDoesNotExistWarning(entityType, entityName, i, count) else: entityServiceNotApplicableWarning(entityType, entityName, i, count) def entityOrEntityUnknownWarning(entity1Type, entity1Name, entity2Type, entity2Name, i=0, count=0): setSysExitRC(ENTITY_DOES_NOT_EXIST_RC) writeStderr(formatKeyValueList(Ind.Spaces(), [f'{Msg.EITHER} {Ent.Singular(entity1Type)}', entity1Name, getPhraseDNEorSNA(entity1Name), None, f'{Msg.OR} {Ent.Singular(entity2Type)}', entity2Name, getPhraseDNEorSNA(entity2Name)], currentCountNL(i, count))) def entityDoesNotHaveItemWarning(entityValueList, i=0, count=0): setSysExitRC(ENTITY_DOES_NOT_EXIST_RC) writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Msg.DOES_NOT_EXIST], currentCountNL(i, count))) def duplicateAliasGroupUserWarning(cd, entityValueList, i=0, count=0): email = entityValueList[1] try: result = callGAPI(cd.users(), 'get', throwReasons=GAPI.USER_GET_THROW_REASONS, userKey=email, fields='id,primaryEmail') if (result['primaryEmail'].lower() == email) or (result['id'] == email): kvList = [Ent.USER, email] else: kvList = [Ent.USER_ALIAS, email, Ent.USER, result['primaryEmail']] except (GAPI.userNotFound, GAPI.badRequest, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.backendError, GAPI.systemError): try: result = callGAPI(cd.groups(), 'get', throwReasons=GAPI.GROUP_GET_THROW_REASONS, groupKey=email, fields='id,email') if (result['email'].lower() == email) or (result['id'] == email): kvList = [Ent.GROUP, email] else: kvList = [Ent.GROUP_ALIAS, email, Ent.GROUP, result['email']] except (GAPI.groupNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest): kvList = [Ent.EMAIL, email] writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+ [Act.Failed(), Msg.DUPLICATE]+ Ent.FormatEntityValueList(kvList), currentCountNL(i, count))) setSysExitRC(ENTITY_DUPLICATE_RC) return kvList[0] def entityDuplicateWarning(entityValueList, i=0, count=0): setSysExitRC(ENTITY_DUPLICATE_RC) writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.Failed(), Msg.DUPLICATE], currentCountNL(i, count))) def entityActionFailedWarning(entityValueList, errMessage, i=0, count=0): setSysExitRC(ACTION_FAILED_RC) writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.Failed(), errMessage], currentCountNL(i, count))) def entityModifierItemValueListActionFailedWarning(entityValueList, modifier, infoTypeValueList, errMessage, i=0, count=0): setSysExitRC(ACTION_FAILED_RC) writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {modifier}', None]+Ent.FormatEntityValueList(infoTypeValueList)+[Act.Failed(), errMessage], currentCountNL(i, count))) def entityModifierActionFailedWarning(entityValueList, modifier, errMessage, i=0, count=0): setSysExitRC(ACTION_FAILED_RC) writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {modifier}', Act.Failed(), errMessage], currentCountNL(i, count))) def entityModifierNewValueActionFailedWarning(entityValueList, modifier, newValue, errMessage, i=0, count=0): setSysExitRC(ACTION_FAILED_RC) writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {modifier}', newValue, Act.Failed(), errMessage], currentCountNL(i, count))) def entityNumEntitiesActionFailedWarning(entityType, entityName, itemType, itemCount, errMessage, i=0, count=0): setSysExitRC(ACTION_FAILED_RC) writeStderr(formatKeyValueList(Ind.Spaces(), [Ent.Singular(entityType), entityName, Ent.Choose(itemType, itemCount), itemCount, Act.Failed(), errMessage], currentCountNL(i, count))) def entityActionNotPerformedWarning(entityValueList, errMessage, i=0, count=0): setSysExitRC(ACTION_NOT_PERFORMED_RC) writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.NotPerformed(), errMessage], currentCountNL(i, count))) def entityItemValueListActionNotPerformedWarning(entityValueList, infoTypeValueList, errMessage, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.NotPerformed(), '']+Ent.FormatEntityValueList(infoTypeValueList)+[errMessage], currentCountNL(i, count))) def entityModifierItemValueListActionNotPerformedWarning(entityValueList, modifier, infoTypeValueList, errMessage, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.NotPerformed()} {modifier}', None]+Ent.FormatEntityValueList(infoTypeValueList)+[errMessage], currentCountNL(i, count))) def entityNumEntitiesActionNotPerformedWarning(entityValueList, itemType, itemCount, errMessage, i=0, count=0): setSysExitRC(ACTION_NOT_PERFORMED_RC) writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Ent.Choose(itemType, itemCount), itemCount, Act.NotPerformed(), errMessage], currentCountNL(i, count))) def entityBadRequestWarning(entityValueList, errMessage, i=0, count=0): setSysExitRC(BAD_REQUEST_RC) writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[ERROR, errMessage], currentCountNL(i, count))) # Getting ... utilities def printGettingAllAccountEntities(entityType, query='', qualifier='', accountType=Ent.ACCOUNT): if GC.Values[GC.SHOW_GETTINGS]: if query: Ent.SetGettingQuery(entityType, query) elif qualifier: Ent.SetGettingQualifier(entityType, qualifier) else: Ent.SetGetting(entityType) writeStderr(f'{Msg.GETTING_ALL} {Ent.PluralGetting()}{Ent.GettingPreQualifier()}{Ent.MayTakeTime(accountType)}\n') def printGotAccountEntities(count): if GC.Values[GC.SHOW_GETTINGS]: writeStderr(f'{Msg.GOT} {count} {Ent.ChooseGetting(count)}{Ent.GettingPostQualifier()}\n') def setGettingAllEntityItemsForWhom(entityItem, forWhom, query='', qualifier=''): if GC.Values[GC.SHOW_GETTINGS]: if query: Ent.SetGettingQuery(entityItem, query) elif qualifier: Ent.SetGettingQualifier(entityItem, qualifier) else: Ent.SetGetting(entityItem) Ent.SetGettingForWhom(forWhom) def printGettingAllEntityItemsForWhom(entityItem, forWhom, i=0, count=0, query='', qualifier='', entityType=None): if GC.Values[GC.SHOW_GETTINGS]: setGettingAllEntityItemsForWhom(entityItem, forWhom, query=query, qualifier=qualifier) writeStderr(f'{Msg.GETTING_ALL} {Ent.PluralGetting()}{Ent.GettingPreQualifier()} {Msg.FOR} {forWhom}{Ent.MayTakeTime(entityType)}{currentCountNL(i, count)}') def printGotEntityItemsForWhom(count): if GC.Values[GC.SHOW_GETTINGS]: writeStderr(f'{Msg.GOT} {count} {Ent.ChooseGetting(count)}{Ent.GettingPostQualifier()} {Msg.FOR} {Ent.GettingForWhom()}\n') def printGettingEntityItem(entityType, entityItem, i=0, count=0): if GC.Values[GC.SHOW_GETTINGS]: writeStderr(f'{Msg.GETTING} {Ent.Singular(entityType)} {entityItem}{currentCountNL(i, count)}') def printGettingEntityItemForWhom(entityItem, forWhom, i=0, count=0): if GC.Values[GC.SHOW_GETTINGS]: Ent.SetGetting(entityItem) Ent.SetGettingForWhom(forWhom) writeStderr(f'{Msg.GETTING} {Ent.PluralGetting()} {Msg.FOR} {forWhom}{currentCountNL(i, count)}') def stderrEntityMessage(entityValueList, message, i=0, count=0): writeStderr(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[message], currentCountNL(i, count))) FIRST_ITEM_MARKER = '%%first_item%%' LAST_ITEM_MARKER = '%%last_item%%' TOTAL_ITEMS_MARKER = '%%total_items%%' def getPageMessage(showFirstLastItems=False, showDate=None): if not GC.Values[GC.SHOW_GETTINGS]: return None pageMessage = f'{Msg.GOT} {TOTAL_ITEMS_MARKER} {{0}}' if showDate: pageMessage += f' on {showDate}' if showFirstLastItems: pageMessage += f': {FIRST_ITEM_MARKER} - {LAST_ITEM_MARKER}' else: pageMessage += '...' if GC.Values[GC.SHOW_GETTINGS_GOT_NL]: pageMessage += '\n' else: GM.Globals[GM.LAST_GOT_MSG_LEN] = 0 return pageMessage def getPageMessageForWhom(forWhom=None, showFirstLastItems=False, showDate=None, clearLastGotMsgLen=True): if not GC.Values[GC.SHOW_GETTINGS]: return None if forWhom: Ent.SetGettingForWhom(forWhom) pageMessage = f'{Msg.GOT} {TOTAL_ITEMS_MARKER} {{0}}{Ent.GettingPostQualifier()} {Msg.FOR} {Ent.GettingForWhom()}' if showDate: pageMessage += f' on {showDate}' if showFirstLastItems: pageMessage += f': {FIRST_ITEM_MARKER} - {LAST_ITEM_MARKER}' else: pageMessage += '...' if GC.Values[GC.SHOW_GETTINGS_GOT_NL]: pageMessage += '\n' elif clearLastGotMsgLen: GM.Globals[GM.LAST_GOT_MSG_LEN] = 0 return pageMessage def printLine(message): writeStdout(message+'\n') def printBlankLine(): writeStdout('\n') def printKeyValueList(kvList): writeStdout(formatKeyValueList(Ind.Spaces(), kvList, '\n')) def printKeyValueListWithCount(kvList, i, count): writeStdout(formatKeyValueList(Ind.Spaces(), kvList, currentCountNL(i, count))) def printKeyValueDict(kvDict): for key, value in iter(kvDict.items()): writeStdout(formatKeyValueList(Ind.Spaces(), [key, value], '\n')) def printKeyValueWithCRsNLs(key, value): if value.find('\n') >= 0 or value.find('\r') >= 0: if GC.Values[GC.SHOW_CONVERT_CR_NL]: printKeyValueList([key, escapeCRsNLs(value)]) else: printKeyValueList([key, '']) Ind.Increment() printKeyValueList([Ind.MultiLineText(value)]) Ind.Decrement() else: printKeyValueList([key, value]) def printJSONKey(key): writeStdout(formatKeyValueList(Ind.Spaces(), [key, None], '')) def printJSONValue(value): writeStdout(formatKeyValueList(' ', [value], '\n')) def printEntity(entityValueList, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList), currentCountNL(i, count))) def printEntityMessage(entityValueList, message, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[message], currentCountNL(i, count))) def printEntitiesCount(entityType, entityList): writeStdout(formatKeyValueList(Ind.Spaces(), [Ent.Plural(entityType), None if entityList is None else f'({len(entityList)})'], '\n')) def printEntityKVList(entityValueList, infoKVList, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+infoKVList, currentCountNL(i, count))) def performAction(entityType, entityValue, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), [f'{Act.ToPerform()} {Ent.Singular(entityType)} {entityValue}'], currentCountNL(i, count))) def performActionNumItems(itemCount, itemType, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), [f'{Act.ToPerform()} {itemCount} {Ent.Choose(itemType, itemCount)}'], currentCountNL(i, count))) def performActionModifierNumItems(modifier, itemCount, itemType, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), [f'{Act.ToPerform()} {modifier} {itemCount} {Ent.Choose(itemType, itemCount)}'], currentCountNL(i, count))) def actionPerformedNumItems(itemCount, itemType, i=0, count=0): writeStderr(formatKeyValueList(Ind.Spaces(), [f'{itemCount} {Ent.Choose(itemType, itemCount)} {Act.Performed()} '], currentCountNL(i, count))) def actionFailedNumItems(itemCount, itemType, errMessage, i=0, count=0): writeStderr(formatKeyValueList(Ind.Spaces(), [f'{itemCount} {Ent.Choose(itemType, itemCount)} {Act.Failed()}: {errMessage} '], currentCountNL(i, count))) def actionNotPerformedNumItemsWarning(itemCount, itemType, errMessage, i=0, count=0): setSysExitRC(ACTION_NOT_PERFORMED_RC) writeStderr(formatKeyValueList(Ind.Spaces(), [Ent.Choose(itemType, itemCount), itemCount, Act.NotPerformed(), errMessage], currentCountNL(i, count))) def entityPerformAction(entityValueList, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()}'], currentCountNL(i, count))) def entityPerformActionNumItems(entityValueList, itemCount, itemType, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {itemCount} {Ent.Choose(itemType, itemCount)}'], currentCountNL(i, count))) def entityPerformActionModifierNumItems(entityValueList, modifier, itemCount, itemType, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {modifier} {itemCount} {Ent.Choose(itemType, itemCount)}'], currentCountNL(i, count))) def entityPerformActionNumItemsModifier(entityValueList, itemCount, itemType, modifier, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {itemCount} {Ent.Choose(itemType, itemCount)} {modifier}'], currentCountNL(i, count))) def entityPerformActionSubItemModifierNumItems(entityValueList, subitemType, modifier, itemCount, itemType, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {Ent.Plural(subitemType)} {modifier} {itemCount} {Ent.Choose(itemType, itemCount)}'], currentCountNL(i, count))) def entityPerformActionSubItemModifierNumItemsModifierNewValue(entityValueList, subitemType, modifier1, itemCount, itemType, modifier2, newValue, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+ [f'{Act.ToPerform()} {Ent.Plural(subitemType)} {modifier1} {itemCount} {Ent.Choose(itemType, itemCount)} {modifier2}', newValue], currentCountNL(i, count))) def entityPerformActionModifierNumItemsModifier(entityValueList, modifier1, itemCount, itemType, modifier2, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {modifier1} {itemCount} {Ent.Choose(itemType, itemCount)} {modifier2}'], currentCountNL(i, count))) def entityPerformActionModifierItemValueList(entityValueList, modifier, infoTypeValueList, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {modifier}', None]+Ent.FormatEntityValueList(infoTypeValueList), currentCountNL(i, count))) def entityPerformActionModifierNewValue(entityValueList, modifier, newValue, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {modifier}', newValue], currentCountNL(i, count))) def entityPerformActionModifierNewValueItemValueList(entityValueList, modifier, newValue, infoTypeValueList, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.ToPerform()} {modifier}', newValue]+Ent.FormatEntityValueList(infoTypeValueList), currentCountNL(i, count))) def entityPerformActionItemValue(entityValueList, itemType, itemValue, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.ToPerform(), None, Ent.Singular(itemType), itemValue], currentCountNL(i, count))) def entityPerformActionInfo(entityValueList, infoValue, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.ToPerform(), infoValue], currentCountNL(i, count))) def entityActionPerformed(entityValueList, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.Performed()], currentCountNL(i, count))) def entityActionPerformedMessage(entityValueList, message, i=0, count=0): if message: writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.Performed(), message], currentCountNL(i, count))) else: writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.Performed()], currentCountNL(i, count))) def entityNumItemsActionPerformed(entityValueList, itemCount, itemType, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{itemCount} {Ent.Choose(itemType, itemCount)} {Act.Performed()}'], currentCountNL(i, count))) def entityModifierActionPerformed(entityValueList, modifier, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.Performed()} {modifier}', None], currentCountNL(i, count))) def entityModifierItemValueListActionPerformed(entityValueList, modifier, infoTypeValueList, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.Performed()} {modifier}', None]+Ent.FormatEntityValueList(infoTypeValueList), currentCountNL(i, count))) def entityModifierNewValueActionPerformed(entityValueList, modifier, newValue, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.Performed()} {modifier}', newValue], currentCountNL(i, count))) def entityModifierNewValueItemValueListActionPerformed(entityValueList, modifier, newValue, infoTypeValueList, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.Performed()} {modifier}', newValue]+Ent.FormatEntityValueList(infoTypeValueList), currentCountNL(i, count))) def entityModifierNewValueKeyValueActionPerformed(entityValueList, modifier, newValue, infoKey, infoValue, i=0, count=0): writeStdout(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[f'{Act.Performed()} {modifier}', newValue, infoKey, infoValue], currentCountNL(i, count))) def cleanFilename(filename): return sanitize_filename(filename, '_') def setFilePath(fileName): if fileName.startswith('./') or fileName.startswith('.\\'): fileName = os.path.join(os.getcwd(), fileName[2:]) else: fileName = os.path.expanduser(fileName) if not os.path.isabs(fileName): fileName = os.path.join(GC.Values[GC.DRIVE_DIR], fileName) return fileName def uniqueFilename(targetFolder, filetitle, overwrite, extension=None): filename = filetitle y = 0 while True: if extension is not None and filename.lower()[-len(extension):] != extension.lower(): filename += extension filepath = os.path.join(targetFolder, filename) if overwrite or not os.path.isfile(filepath): return (filepath, filename) y += 1 filename = f'({y})-{filetitle}' def cleanFilepath(filepath): return sanitize_filepath(filepath, platform='auto') def fileErrorMessage(filename, e, entityType=Ent.FILE): return f'{Ent.Singular(entityType)}: {filename}, {str(e)}' def fdErrorMessage(f, defaultFilename, e): return fileErrorMessage(getattr(f, 'name') if hasattr(f, 'name') else defaultFilename, e) # Set file encoding to handle UTF8 BOM def setEncoding(mode, encoding): if 'b' in mode: return {} if not encoding: encoding = GM.Globals[GM.SYS_ENCODING] if 'r' in mode and encoding.lower().replace('-', '') == 'utf8': encoding = UTF8_SIG return {'encoding': encoding} def StringIOobject(initbuff=None): if initbuff is None: return io.StringIO() return io.StringIO(initbuff) # Open a file def openFile(filename, mode=DEFAULT_FILE_READ_MODE, encoding=None, errors=None, newline=None, continueOnError=False, displayError=True, stripUTFBOM=False): try: if filename != '-': kwargs = setEncoding(mode, encoding) f = open(os.path.expanduser(filename), mode, errors=errors, newline=newline, **kwargs) if stripUTFBOM: if 'b' in mode: if f.read(3) != b'\xef\xbb\xbf': f.seek(0) elif not kwargs['encoding'].lower().startswith('utf'): if f.read(3).encode('iso-8859-1', 'replace') != codecs.BOM_UTF8: f.seek(0) else: if f.read(1) != '\ufeff': f.seek(0) return f if 'r' in mode: return StringIOobject(str(sys.stdin.read())) if 'b' not in mode: return sys.stdout return os.fdopen(os.dup(sys.stdout.fileno()), 'wb') except (IOError, LookupError, UnicodeDecodeError, UnicodeError) as e: if continueOnError: if displayError: stderrWarningMsg(fileErrorMessage(filename, e)) setSysExitRC(FILE_ERROR_RC) return None systemErrorExit(FILE_ERROR_RC, fileErrorMessage(filename, e)) # Close a file def closeFile(f, forceFlush=False): try: if forceFlush: # Necessary to make sure file is flushed by both Python and OS # https://stackoverflow.com/a/13762137/1503886 f.flush() os.fsync(f.fileno()) f.close() return True except IOError as e: stderrErrorMsg(fdErrorMessage(f, UNKNOWN, e)) setSysExitRC(FILE_ERROR_RC) return False # Read a file def readFile(filename, mode=DEFAULT_FILE_READ_MODE, encoding=None, newline=None, continueOnError=False, displayError=True): try: if filename != '-': kwargs = setEncoding(mode, encoding) with open(os.path.expanduser(filename), mode, newline=newline, **kwargs) as f: return f.read() return str(sys.stdin.read()) except (IOError, LookupError, UnicodeDecodeError, UnicodeError) as e: if continueOnError: if displayError: stderrWarningMsg(fileErrorMessage(filename, e)) setSysExitRC(FILE_ERROR_RC) return None systemErrorExit(FILE_ERROR_RC, fileErrorMessage(filename, e)) # Write a file def writeFile(filename, data, mode=DEFAULT_FILE_WRITE_MODE, continueOnError=False, displayError=True): try: if filename != '-': kwargs = setEncoding(mode, None) with open(os.path.expanduser(filename), mode, **kwargs) as f: f.write(data) return True GM.Globals[GM.STDOUT].get(GM.REDIRECT_MULTI_FD, sys.stdout).write(data) return True except (IOError, LookupError, UnicodeDecodeError, UnicodeError) as e: if continueOnError: if displayError: stderrErrorMsg(fileErrorMessage(filename, e)) setSysExitRC(FILE_ERROR_RC) return False systemErrorExit(FILE_ERROR_RC, fileErrorMessage(filename, e)) # Write a file, return error def writeFileReturnError(filename, data, mode=DEFAULT_FILE_WRITE_MODE): try: kwargs = {'encoding': GM.Globals[GM.SYS_ENCODING]} if 'b' not in mode else {} with open(os.path.expanduser(filename), mode, **kwargs) as f: f.write(data) return (True, None) except (IOError, LookupError, UnicodeDecodeError, UnicodeError) as e: return (False, e) # Delete a file def deleteFile(filename, continueOnError=False, displayError=True): if os.path.isfile(filename): try: os.remove(filename) except OSError as e: if continueOnError: if displayError: stderrWarningMsg(fileErrorMessage(filename, e)) return systemErrorExit(FILE_ERROR_RC, fileErrorMessage(filename, e)) def getGDocSheetDataRetryWarning(entityValueList, errMsg, i=0, count=0): action = Act.Get() Act.Set(Act.RETRIEVE_DATA) stderrWarningMsg(formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.NotPerformed(), errMsg, 'Retry', ''], currentCountNL(i, count))) Act.Set(action) def getGDocSheetDataFailedExit(entityValueList, errMsg, i=0, count=0): Act.Set(Act.RETRIEVE_DATA) systemErrorExit(ACTION_FAILED_RC, formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.NotPerformed(), errMsg], currentCountNL(i, count))) GDOC_FORMAT_MIME_TYPES = { 'gcsv': MIMETYPE_TEXT_CSV, 'gdoc': MIMETYPE_TEXT_PLAIN, 'ghtml': MIMETYPE_TEXT_HTML, } # gdoc | def getGDocData(gformat): mimeType = GDOC_FORMAT_MIME_TYPES[gformat] user = getEmailAddress() fileIdEntity = getDriveFileEntity(queryShortcutsOK=False) user, drive, jcount = _validateUserGetFileIDs(user, 0, 0, fileIdEntity) if not drive: sys.exit(GM.Globals[GM.SYSEXITRC]) if jcount == 0: getGDocSheetDataFailedExit([Ent.USER, user], Msg.NO_ENTITIES_FOUND.format(Ent.Singular(Ent.DRIVE_FILE))) if jcount > 1: getGDocSheetDataFailedExit([Ent.USER, user], Msg.MULTIPLE_ENTITIES_FOUND.format(Ent.Plural(Ent.DRIVE_FILE), jcount, ','.join(fileIdEntity['list']))) fileId = fileIdEntity['list'][0] try: result = callGAPI(drive.files(), 'get', throwReasons=GAPI.DRIVE_GET_THROW_REASONS, fileId=fileId, fields='name,mimeType,exportLinks', supportsAllDrives=True) # Google Doc if 'exportLinks' in result: if mimeType not in result['exportLinks']: getGDocSheetDataFailedExit([Ent.USER, user, Ent.DRIVE_FILE, result['name']], Msg.INVALID_MIMETYPE.format(result['mimeType'], mimeType)) f = TemporaryFile(mode='w+', encoding=UTF8) _, content = drive._http.request(uri=result['exportLinks'][mimeType], method='GET') f.write(content.decode(UTF8_SIG)) f.seek(0) return f # Drive File if result['mimeType'] != mimeType: getGDocSheetDataFailedExit([Ent.USER, user, Ent.DRIVE_FILE, result['name']], Msg.INVALID_MIMETYPE.format(result['mimeType'], mimeType)) fb = TemporaryFile(mode='wb+') request = drive.files().get_media(fileId=fileId) downloader = googleapiclient.http.MediaIoBaseDownload(fb, request) done = False while not done: _, done = downloader.next_chunk() f = TemporaryFile(mode='w+', encoding=UTF8) fb.seek(0) f.write(fb.read().decode(UTF8_SIG)) fb.close() f.seek(0) return f except GAPI.fileNotFound: getGDocSheetDataFailedExit([Ent.USER, user, Ent.DOCUMENT, fileId], Msg.DOES_NOT_EXIST) except (IOError, httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: if f: f.close() getGDocSheetDataFailedExit([Ent.USER, user, Ent.DOCUMENT, fileId], str(e)) except (GAPI.serviceNotAvailable, GAPI.authError, GAPI.domainPolicy) as e: userDriveServiceNotEnabledWarning(user, str(e)) sys.exit(GM.Globals[GM.SYSEXITRC]) HTML_TITLE_PATTERN = re.compile(r'.*(.+)') # gsheet | def getGSheetData(): user = getEmailAddress() fileIdEntity = getDriveFileEntity(queryShortcutsOK=False) sheetEntity = getSheetEntity(False) user, drive, jcount = _validateUserGetFileIDs(user, 0, 0, fileIdEntity) if not drive: sys.exit(GM.Globals[GM.SYSEXITRC]) if jcount == 0: getGDocSheetDataFailedExit([Ent.USER, user], Msg.NO_ENTITIES_FOUND.format(Ent.Singular(Ent.DRIVE_FILE))) if jcount > 1: getGDocSheetDataFailedExit([Ent.USER, user], Msg.MULTIPLE_ENTITIES_FOUND.format(Ent.Plural(Ent.DRIVE_FILE), jcount, ','.join(fileIdEntity['list']))) _, sheet = buildGAPIServiceObject(API.SHEETS, user) if not sheet: sys.exit(GM.Globals[GM.SYSEXITRC]) fileId = fileIdEntity['list'][0] try: result = callGAPI(drive.files(), 'get', throwReasons=GAPI.DRIVE_GET_THROW_REASONS, fileId=fileId, fields='name,mimeType', supportsAllDrives=True) if result['mimeType'] != MIMETYPE_GA_SPREADSHEET: getGDocSheetDataFailedExit([Ent.USER, user, Ent.DRIVE_FILE, result['name']], Msg.INVALID_MIMETYPE.format(result['mimeType'], MIMETYPE_GA_SPREADSHEET)) spreadsheet = callGAPI(sheet.spreadsheets(), 'get', throwReasons=GAPI.SHEETS_ACCESS_THROW_REASONS, spreadsheetId=fileId, fields='spreadsheetUrl,sheets(properties(sheetId,title))') sheetId = getSheetIdFromSheetEntity(spreadsheet, sheetEntity) if sheetId is None: getGDocSheetDataFailedExit([Ent.USER, user, Ent.SPREADSHEET, result['name'], sheetEntity['sheetType'], sheetEntity['sheetValue']], Msg.NOT_FOUND) spreadsheetUrl = f'{re.sub("/edit.*$", "/export", spreadsheet["spreadsheetUrl"])}?format=csv&id={fileId}&gid={sheetId}' f = TemporaryFile(mode='w+', encoding=UTF8) if GC.Values[GC.DEBUG_LEVEL] > 0: sys.stderr.write(f'Debug: spreadsheetUrl: {spreadsheetUrl}\n') triesLimit = 3 for n in range(1, triesLimit+1): _, content = drive._http.request(uri=spreadsheetUrl, method='GET') # Check for HTML error message instead of data if content[0:15] != b'': break tg = HTML_TITLE_PATTERN.match(content[0:600].decode('utf-8')) errMsg = tg.group(1) if tg else 'Unknown error' getGDocSheetDataRetryWarning([Ent.USER, user, Ent.SPREADSHEET, result['name'], sheetEntity['sheetType'], sheetEntity['sheetValue']], errMsg, n, triesLimit) time.sleep(20) else: getGDocSheetDataFailedExit([Ent.USER, user, Ent.SPREADSHEET, result['name'], sheetEntity['sheetType'], sheetEntity['sheetValue']], errMsg) f.write(content.decode(UTF8_SIG)) f.seek(0) return f except GAPI.fileNotFound: getGDocSheetDataFailedExit([Ent.USER, user, Ent.SPREADSHEET, fileId], Msg.DOES_NOT_EXIST) except (GAPI.notFound, GAPI.forbidden, GAPI.permissionDenied, GAPI.internalError, GAPI.insufficientFilePermissions, GAPI.badRequest, GAPI.invalid, GAPI.invalidArgument, GAPI.failedPrecondition) as e: getGDocSheetDataFailedExit([Ent.USER, user, Ent.SPREADSHEET, fileId, sheetEntity['sheetType'], sheetEntity['sheetValue']], str(e)) except (IOError, httplib2.HttpLib2Error) as e: if f: f.close() getGDocSheetDataFailedExit([Ent.USER, user, Ent.SPREADSHEET, fileId, sheetEntity['sheetType'], sheetEntity['sheetValue']], str(e)) except (GAPI.serviceNotAvailable, GAPI.authError, GAPI.domainPolicy) as e: userDriveServiceNotEnabledWarning(user, str(e)) sys.exit(GM.Globals[GM.SYSEXITRC]) BUCKET_OBJECT_PATTERNS = [ {'pattern': re.compile(r'https://storage.(?:googleapis|cloud.google).com/(.+?)/(.+)'), 'unquote': True}, {'pattern': re.compile(r'gs://(.+?)/(.+)'), 'unquote': False}, {'pattern': re.compile(r'(.+?)/(.+)'), 'unquote': False}, ] def getBucketObjectName(): uri = getString(Cmd.OB_STRING) for pattern in BUCKET_OBJECT_PATTERNS: mg = re.search(pattern['pattern'], uri) if mg: bucket = mg.group(1) s_object = mg.group(2) if not pattern['unquote'] else unquote(mg.group(2)) return (bucket, s_object, f'{bucket}/{s_object}') systemErrorExit(ACTION_NOT_PERFORMED_RC, f'Invalid : {uri}') GCS_FORMAT_MIME_TYPES = { 'gcscsv': MIMETYPE_TEXT_CSV, 'gcsdoc': MIMETYPE_TEXT_PLAIN, 'gcshtml': MIMETYPE_TEXT_HTML, } # gcscsv|gcshtml|gcsdoc def getStorageFileData(gcsformat, returnData=True): mimeType = GCS_FORMAT_MIME_TYPES[gcsformat] bucket, s_object, bucketObject = getBucketObjectName() s = buildGAPIObject(API.STORAGEREAD) try: result = callGAPI(s.objects(), 'get', throwReasons=[GAPI.NOT_FOUND, GAPI.FORBIDDEN], bucket=bucket, object=s_object, projection='noAcl', fields='contentType') except GAPI.notFound: entityDoesNotExistExit(Ent.CLOUD_STORAGE_FILE, bucketObject) except GAPI.forbidden as e: entityActionFailedExit([Ent.CLOUD_STORAGE_FILE, bucketObject], str(e)) if result['contentType'] != mimeType: getGDocSheetDataFailedExit([Ent.CLOUD_STORAGE_FILE, bucketObject], Msg.INVALID_MIMETYPE.format(result['contentType'], mimeType)) fb = TemporaryFile(mode='wb+') try: request = s.objects().get_media(bucket=bucket, object=s_object) downloader = googleapiclient.http.MediaIoBaseDownload(fb, request) done = False while not done: _, done = downloader.next_chunk() fb.seek(0) if returnData: data = fb.read().decode(UTF8) fb.close() return data f = TemporaryFile(mode='w+', encoding=UTF8) f.write(fb.read().decode(UTF8_SIG)) fb.close() f.seek(0) return f except googleapiclient.http.HttpError as e: mg = HTTP_ERROR_PATTERN.match(str(e)) getGDocSheetDataFailedExit([Ent.CLOUD_STORAGE_FILE, bucketObject], mg.group(1) if mg else str(e)) # def openCSVFileReader(filename, fieldnames=None): filenameLower = filename.lower() if filenameLower == 'gsheet': f = getGSheetData() getCharSet() elif filenameLower in {'gcsv', 'gdoc'}: f = getGDocData(filenameLower) getCharSet() elif filenameLower in {'gcscsv', 'gcsdoc'}: f = getStorageFileData(filenameLower, False) getCharSet() else: encoding = getCharSet() f = openFile(filename, mode=DEFAULT_CSV_READ_MODE, encoding=encoding) if checkArgumentPresent('warnifnodata'): loc = f.tell() try: if not f.readline() or not f.readline(): stderrWarningMsg(fileErrorMessage(filename, Msg.NO_CSV_FILE_DATA_FOUND)) sys.exit(NO_ENTITIES_FOUND_RC) f.seek(loc) except (IOError, UnicodeDecodeError, UnicodeError) as e: systemErrorExit(FILE_ERROR_RC, fileErrorMessage(filename, e)) if checkArgumentPresent('columndelimiter'): columnDelimiter = getCharacter() else: columnDelimiter = GC.Values[GC.CSV_INPUT_COLUMN_DELIMITER] if checkArgumentPresent('noescapechar'): noEscapeChar = getBoolean() else: noEscapeChar = GC.Values[GC.CSV_INPUT_NO_ESCAPE_CHAR] if checkArgumentPresent('quotechar'): quotechar = getCharacter() else: quotechar = GC.Values[GC.CSV_INPUT_QUOTE_CHAR] if not checkArgumentPresent('endcsv') and checkArgumentPresent('fields'): fieldnames = shlexSplitList(getString(Cmd.OB_FIELD_NAME_LIST)) try: csvFile = csv.DictReader(f, fieldnames=fieldnames, delimiter=columnDelimiter, escapechar='\\' if not noEscapeChar else None, quotechar=quotechar) return (f, csvFile, csvFile.fieldnames if csvFile.fieldnames is not None else []) except (csv.Error, UnicodeDecodeError, UnicodeError) as e: systemErrorExit(FILE_ERROR_RC, e) def incrAPICallsRetryData(errMsg, delta): GM.Globals[GM.API_CALLS_RETRY_DATA].setdefault(errMsg, [0, 0.0]) GM.Globals[GM.API_CALLS_RETRY_DATA][errMsg][0] += 1 GM.Globals[GM.API_CALLS_RETRY_DATA][errMsg][1] += delta def initAPICallsRateCheck(): GM.Globals[GM.RATE_CHECK_COUNT] = 0 GM.Globals[GM.RATE_CHECK_START] = time.time() def checkAPICallsRate(): GM.Globals[GM.RATE_CHECK_COUNT] += 1 if GM.Globals[GM.RATE_CHECK_COUNT] >= GC.Values[GC.API_CALLS_RATE_LIMIT]: current = time.time() delta = int(current-GM.Globals[GM.RATE_CHECK_START]) if 0 <= delta < 60: delta = (60-delta)+3 error_message = f'API calls per 60 seconds limit {GC.Values[GC.API_CALLS_RATE_LIMIT]} exceeded' writeStderr(f'{WARNING_PREFIX}{error_message}: Backing off: {delta} seconds\n') flushStderr() time.sleep(delta) if GC.Values[GC.SHOW_API_CALLS_RETRY_DATA]: incrAPICallsRetryData(error_message, delta) GM.Globals[GM.RATE_CHECK_START] = time.time() else: GM.Globals[GM.RATE_CHECK_START] = current GM.Globals[GM.RATE_CHECK_COUNT] = 0 def openGAMCommandLog(Globals, name): try: Globals[GM.CMDLOG_LOGGER] = logging.getLogger(name) Globals[GM.CMDLOG_LOGGER].setLevel(logging.INFO) Globals[GM.CMDLOG_HANDLER] = RotatingFileHandler(GC.Values[GC.CMDLOG], maxBytes=1024*GC.Values[GC.CMDLOG_MAX_KILO_BYTES], backupCount=GC.Values[GC.CMDLOG_MAX_BACKUPS], encoding=GC.Values[GC.CHARSET]) Globals[GM.CMDLOG_LOGGER].addHandler(Globals[GM.CMDLOG_HANDLER]) except Exception as e: Globals[GM.CMDLOG_LOGGER] = None systemErrorExit(CONFIG_ERROR_RC, Msg.LOGGING_INITIALIZATION_ERROR.format(str(e))) def writeGAMCommandLog(Globals, logCmd, sysRC): Globals[GM.CMDLOG_LOGGER].info(f'{currentISOformatTimeStamp()},{sysRC},{logCmd}') def closeGAMCommandLog(Globals): try: Globals[GM.CMDLOG_HANDLER].flush() Globals[GM.CMDLOG_HANDLER].close() Globals[GM.CMDLOG_LOGGER].removeHandler(Globals[GM.CMDLOG_HANDLER]) except Exception: pass Globals[GM.CMDLOG_LOGGER] = None # Set global variables from config file # Return True if there are additional commands on the command line def SetGlobalVariables(): def _stringInQuotes(value): return (len(value) > 1) and (((value.startswith('"') and value.endswith('"'))) or ((value.startswith("'") and value.endswith("'")))) def _stripStringQuotes(value): if _stringInQuotes(value): return value[1:-1] return value def _quoteStringIfLeadingTrailingBlanks(value): if not value: return "''" if _stringInQuotes(value): return value if (value[0] != ' ') and (value[-1] != ' '): return value return f"'{value}'" def _getDefault(itemName, itemEntry, oldGamPath): if GC.VAR_SIGFILE in itemEntry: GC.Defaults[itemName] = itemEntry[GC.VAR_SFFT][os.path.isfile(os.path.join(oldGamPath, itemEntry[GC.VAR_SIGFILE]))] elif GC.VAR_ENVVAR in itemEntry: value = os.environ.get(itemEntry[GC.VAR_ENVVAR], GC.Defaults[itemName]) if itemEntry[GC.VAR_TYPE] in [GC.TYPE_INTEGER, GC.TYPE_FLOAT]: try: number = int(value) if itemEntry[GC.VAR_TYPE] == GC.TYPE_INTEGER else float(value) minVal, maxVal = itemEntry[GC.VAR_LIMITS] if (minVal is not None) and (number < minVal): number = minVal elif (maxVal is not None) and (number > maxVal): number = maxVal except ValueError: number = GC.Defaults[itemName] value = str(number) elif itemEntry[GC.VAR_TYPE] == GC.TYPE_STRING: value = _quoteStringIfLeadingTrailingBlanks(value) GC.Defaults[itemName] = value def _selectSection(): value = getString(Cmd.OB_SECTION_NAME, minLen=0) if (not value) or (value.upper() == configparser.DEFAULTSECT): return configparser.DEFAULTSECT if GM.Globals[GM.PARSER].has_section(value): return value Cmd.Backup() usageErrorExit(formatKeyValueList('', [Ent.Singular(Ent.SECTION), value, Msg.NOT_FOUND], '')) def _showSections(): printKeyValueList([Ent.Singular(Ent.CONFIG_FILE), GM.Globals[GM.GAM_CFG_FILE]]) Ind.Increment() for section in [configparser.DEFAULTSECT]+sorted(GM.Globals[GM.PARSER].sections()): printKeyValueList([f'{section}{" *" if section == sectionName else ""}']) Ind.Decrement() def _checkMakeDir(itemName): if not os.path.isdir(GC.Defaults[itemName]): try: os.makedirs(GC.Defaults[itemName]) printKeyValueList([Act.PerformedName(Act.CREATE), GC.Defaults[itemName]]) except OSError as e: if not os.path.isdir(GC.Defaults[itemName]): systemErrorExit(FILE_ERROR_RC, e) def _copyCfgFile(srcFile, targetDir, oldGamPath): if (not srcFile) or os.path.isabs(srcFile): return dstFile = os.path.join(GC.Defaults[targetDir], srcFile) if os.path.isfile(dstFile): return srcFile = os.path.join(oldGamPath, srcFile) if not os.path.isfile(srcFile): return data = readFile(srcFile, continueOnError=True, displayError=False) if (data is not None) and writeFile(dstFile, data, continueOnError=True): printKeyValueList([Act.PerformedName(Act.COPY), srcFile, Msg.TO, dstFile]) def _printValueError(sectionName, itemName, value, errMessage, sysRC=CONFIG_ERROR_RC): kvlMsg = formatKeyValueList('', [Ent.Singular(Ent.CONFIG_FILE), GM.Globals[GM.GAM_CFG_FILE], Ent.Singular(Ent.SECTION), sectionName, Ent.Singular(Ent.ITEM), itemName, Ent.Singular(Ent.VALUE), value, errMessage], '') if sysRC != 0: status['errors'] = True printErrorMessage(sysRC, kvlMsg) else: writeStderr(formatKeyValueList(Ind.Spaces(), [WARNING, kvlMsg], '\n')) def _getCfgBoolean(sectionName, itemName): value = GM.Globals[GM.PARSER].get(sectionName, itemName).lower() if value in TRUE_VALUES: return True if value in FALSE_VALUES: return False _printValueError(sectionName, itemName, value, f'{Msg.EXPECTED}: {formatChoiceList(TRUE_FALSE)}') return False def _getCfgCharacter(sectionName, itemName): value = codecs.escape_decode(bytes(_stripStringQuotes(GM.Globals[GM.PARSER].get(sectionName, itemName)), UTF8))[0].decode(UTF8) if not value and (itemName == 'csv_output_field_delimiter'): return ' ' if not value and (itemName in {'csv_input_escape_char', 'csv_output_escape_char'}): return None if len(value) == 1: return value _printValueError(sectionName, itemName, f'"{value}"', f'{Msg.EXPECTED}: {integerLimits(1, 1, Msg.STRING_LENGTH)}') return '' def _getCfgChoice(sectionName, itemName): value = _stripStringQuotes(GM.Globals[GM.PARSER].get(sectionName, itemName)).lower() choices = GC.VAR_INFO[itemName][GC.VAR_CHOICES] if value in choices: return choices[value] _printValueError(sectionName, itemName, f'"{value}"', f'{Msg.EXPECTED}: {",".join(choices)}') return '' def _getCfgLocale(sectionName, itemName): value = _stripStringQuotes(GM.Globals[GM.PARSER].get(sectionName, itemName)).lower().replace('_', '-') if value in LOCALE_CODES_MAP: return LOCALE_CODES_MAP[value] _printValueError(sectionName, itemName, f'"{value}"', f'{Msg.EXPECTED}: {",".join(LOCALE_CODES_MAP)}') return '' def _getCfgNumber(sectionName, itemName): value = GM.Globals[GM.PARSER].get(sectionName, itemName) minVal, maxVal = GC.VAR_INFO[itemName][GC.VAR_LIMITS] try: number = int(value) if GC.VAR_INFO[itemName][GC.VAR_TYPE] == GC.TYPE_INTEGER else float(value) if ((minVal is None) or (number >= minVal)) and ((maxVal is None) or (number <= maxVal)): return number if (minVal is not None) and (number < minVal): number = minVal else: number = maxVal _printValueError(sectionName, itemName, value, f'{Msg.EXPECTED}: {integerLimits(minVal, maxVal)}, {Msg.USED}: {number}', sysRC=0) return number except ValueError: pass _printValueError(sectionName, itemName, value, f'{Msg.EXPECTED}: {integerLimits(minVal, maxVal)}') return 0 def _getCfgHeaderFilter(sectionName, itemName): value = GM.Globals[GM.PARSER].get(sectionName, itemName) headerFilters = [] if not value or (len(value) == 2 and _stringInQuotes(value)): return headerFilters splitStatus, filters = shlexSplitListStatus(value) if splitStatus: for filterStr in filters: try: headerFilters.append(re.compile(filterStr, re.IGNORECASE)) except re.error as e: _printValueError(sectionName, itemName, f'"{filterStr}"', f'{Msg.INVALID_RE}: {e}') else: _printValueError(sectionName, itemName, f'"{value}"', f'{Msg.INVALID_LIST}: {filters}') return headerFilters def _getCfgHeaderFilterFromForce(sectionName, itemName): headerFilters = [] for filterStr in GC.Values[itemName]: try: headerFilters.append(re.compile(fr'^{filterStr}$')) except re.error as e: _printValueError(sectionName, itemName, f'"{filterStr}"', f'{Msg.INVALID_RE}: {e}') return headerFilters ROW_FILTER_ANY_ALL_PATTERN = re.compile(r'^(any:|all:)(.+)$', re.IGNORECASE) ROW_FILTER_COMP_PATTERN = re.compile(r'^(date|time|count|length)\s*([<>]=?|=|!=)(.+)$', re.IGNORECASE) ROW_FILTER_RANGE_PATTERN = re.compile(r'^(daterange|timerange|countrange|lengthrange)(=|!=)(\S+)/(\S+)$', re.IGNORECASE) ROW_FILTER_TIMEOFDAYRANGE_PATTERN = re.compile(r'^(timeofdayrange)(=|!=)(\d\d):(\d\d)/(\d\d):(\d\d)$', re.IGNORECASE) ROW_FILTER_BOOL_PATTERN = re.compile(r'^(boolean):(.+)$', re.IGNORECASE) ROW_FILTER_TEXT_PATTERN = re.compile(r'^(text)([<>]=?|=|!=)(.*)$', re.IGNORECASE) ROW_FILTER_TEXTRANGE_PATTERN = re.compile(r'^(textrange)(=|!=)(.*)/(.*)$', re.IGNORECASE) ROW_FILTER_RE_PATTERN = re.compile(r'^(regex|regexcs|notregex|notregexcs):(.*)$', re.IGNORECASE) ROW_FILTER_DATA_PATTERN = re.compile(r'^(data|notdata):(list|file|csvfile) +(.+)$', re.IGNORECASE) REGEX_CHARS = '^$*+|$[{(' def _getCfgRowFilter(sectionName, itemName): value = GM.Globals[GM.PARSER].get(sectionName, itemName) rowFilters = [] if not value: return rowFilters if value.startswith('{'): try: filterDict = json.loads(value.encode('unicode-escape').decode(UTF8)) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: _printValueError(sectionName, itemName, f'"{value}"', f'{Msg.FAILED_TO_PARSE_AS_JSON}: {str(e)}') return rowFilters else: filterDict = {} status, filterList = shlexSplitListStatus(value) if not status: _printValueError(sectionName, itemName, f'"{value}"', f'{Msg.FAILED_TO_PARSE_AS_LIST}: {str(filterList)}') return rowFilters for filterVal in filterList: if not filterVal: continue try: filterTokens = shlexSplitList(filterVal, ':') column = filterTokens[0] filterStr = ':'.join(filterTokens[1:]) except ValueError: _printValueError(sectionName, itemName, f'"{filterVal}"', f'{Msg.EXPECTED}: column:filter') continue filterDict[column] = filterStr for column, filterStr in iter(filterDict.items()): for c in REGEX_CHARS: if c in column: columnPat = column break else: columnPat = f'^{column}$' try: columnPat = re.compile(columnPat, re.IGNORECASE) except re.error as e: _printValueError(sectionName, itemName, f'"{column}"', f'{Msg.INVALID_RE}: {e}') continue anyMatch = True mg = ROW_FILTER_ANY_ALL_PATTERN.match(filterStr) if mg: anyMatch = mg.group(1).lower() == 'any:' filterStr = mg.group(2) mg = ROW_FILTER_COMP_PATTERN.match(filterStr) if mg: filterType = mg.group(1).lower() if filterType in {'date', 'time'}: if filterType == 'date': valid, filterValue = getRowFilterDateOrDeltaFromNow(mg.group(3)) else: valid, filterValue = getRowFilterTimeOrDeltaFromNow(mg.group(3)) if valid: rowFilters.append((columnPat, anyMatch, filterType, mg.group(2), filterValue)) else: _printValueError(sectionName, itemName, f'"{column}": "{filterStr}"', f'{Msg.EXPECTED}: {filterValue}') else: # filterType in {'count', 'length'}: if mg.group(3).isdigit(): rowFilters.append((columnPat, anyMatch, filterType, mg.group(2), int(mg.group(3)))) else: _printValueError(sectionName, itemName, f'"{column}": "{filterStr}"', f'{Msg.EXPECTED}: ') continue mg = ROW_FILTER_TEXT_PATTERN.match(filterStr) if mg: filterType = mg.group(1).lower() rowFilters.append((columnPat, anyMatch, filterType, mg.group(2), mg.group(3))) continue mg = ROW_FILTER_TEXTRANGE_PATTERN.match(filterStr) if mg: filterType = mg.group(1).lower() rowFilters.append((columnPat, anyMatch, filterType, mg.group(2), mg.group(3), mg.group(4))) continue mg = ROW_FILTER_RANGE_PATTERN.match(filterStr) if mg: filterType = mg.group(1).lower() if filterType in {'daterange', 'timerange'}: if filterType == 'daterange': valid1, filterValue1 = getRowFilterDateOrDeltaFromNow(mg.group(3)) valid2, filterValue2 = getRowFilterDateOrDeltaFromNow(mg.group(4)) else: valid1, filterValue1 = getRowFilterTimeOrDeltaFromNow(mg.group(3)) valid2, filterValue2 = getRowFilterTimeOrDeltaFromNow(mg.group(4)) if valid1 and valid2: rowFilters.append((columnPat, anyMatch, filterType, mg.group(2), filterValue1, filterValue2)) else: _printValueError(sectionName, itemName, f'"{column}": "{filterStr}"', f'{Msg.EXPECTED}: {filterValue1}/{filterValue2}') else: #countrange|lengthrange if mg.group(3).isdigit() and mg.group(4).isdigit(): rowFilters.append((columnPat, anyMatch, filterType, mg.group(2), int(mg.group(3)), int(mg.group(4)))) else: _printValueError(sectionName, itemName, f'"{column}": "{filterStr}"', f'{Msg.EXPECTED}: /') continue mg = ROW_FILTER_TIMEOFDAYRANGE_PATTERN.match(filterStr) if mg: filterType = mg.group(1).lower() startHour = int(mg.group(3)) startMinute = int(mg.group(4)) endHour = int(mg.group(5)) endMinute = int(mg.group(6)) if startHour > 23 or startMinute > 59 or endHour > 23 or endMinute > 59 or \ endHour < startHour or (endHour == startHour and endMinute < startMinute): Cmd.Backup() usageErrorExit(Msg.INVALID_TIMEOFDAY_RANGE.format(f'{startHour:02d}:{startMinute:02d}', f'{endHour:02d}:{endMinute:02d}')) rowFilters.append((columnPat, anyMatch, filterType, mg.group(2), f'{startHour:02d}:{startMinute:02d}', f'{endHour:02d}:{endMinute:02d}')) continue mg = ROW_FILTER_BOOL_PATTERN.match(filterStr) if mg: filterType = mg.group(1).lower() filterValue = mg.group(2).lower() if filterValue in TRUE_VALUES: rowFilters.append((columnPat, anyMatch, filterType, True)) elif filterValue in FALSE_VALUES: rowFilters.append((columnPat, anyMatch, filterType, False)) else: _printValueError(sectionName, itemName, f'"{column}": "{filterStr}"', f'{Msg.EXPECTED}: ') continue mg = ROW_FILTER_RE_PATTERN.match(filterStr) if mg: filterType = mg.group(1).lower() try: if filterType.endswith('cs'): filterType = filterType[0:-2] flags = 0 else: flags = re.IGNORECASE rowFilters.append((columnPat, anyMatch, filterType, re.compile(mg.group(2), flags))) except re.error as e: _printValueError(sectionName, itemName, f'"{column}": "{filterStr}"', f'{Msg.INVALID_RE}: {e}') continue mg = ROW_FILTER_DATA_PATTERN.match(filterStr) if mg: filterType = mg.group(1).lower() filterSubType = mg.group(2).lower() if filterSubType == 'list': rowFilters.append((columnPat, anyMatch, filterType, set(shlexSplitList(mg.group(3))))) continue Cmd.MergeArguments(shlexSplitList(mg.group(3), ' ')) if filterSubType == 'file': rowFilters.append((columnPat, anyMatch, filterType, getEntitiesFromFile(False, returnSet=True))) else: #elif filterSubType == 'csvfile': rowFilters.append((columnPat, anyMatch, filterType, getEntitiesFromCSVFile(False, returnSet=True))) Cmd.RestoreArguments() continue _printValueError(sectionName, itemName, f'"{column}": "{filterStr}"', f'{Msg.EXPECTED}: ') return rowFilters def _getCfgSection(sectionName, itemName): value = _stripStringQuotes(GM.Globals[GM.PARSER].get(sectionName, itemName)) if (not value) or (value.upper() == configparser.DEFAULTSECT): return configparser.DEFAULTSECT if GM.Globals[GM.PARSER].has_section(value): return value _printValueError(sectionName, itemName, value, Msg.NOT_FOUND) return configparser.DEFAULTSECT def _getCfgPassword(sectionName, itemName): value = GM.Globals[GM.PARSER].get(sectionName, itemName) if isinstance(value, bytes): return value value = _stripStringQuotes(value) if value.startswith("b'") and value.endswith("'"): return bytes(value[2:-1], UTF8) if value: return value return '' def _validateLicenseSKUs(sectionName, itemName, skuList): GM.Globals[GM.LICENSE_SKUS] = [] for sku in skuList.split(','): if '/' not in sku: productId, sku = SKU.getProductAndSKU(sku) if not productId: _printValueError(sectionName, itemName, sku, f'{Msg.EXPECTED}: {",".join(SKU.getSortedSKUList())}') else: (productId, sku) = sku.split('/') if (productId, sku) not in GM.Globals[GM.LICENSE_SKUS]: GM.Globals[GM.LICENSE_SKUS].append((productId, sku)) def _getCfgString(sectionName, itemName): value = _stripStringQuotes(GM.Globals[GM.PARSER].get(sectionName, itemName)) if itemName == GC.DOMAIN: value = value.strip() minLen, maxLen = GC.VAR_INFO[itemName].get(GC.VAR_LIMITS, (None, None)) if ((minLen is None) or (len(value) >= minLen)) and ((maxLen is None) or (len(value) <= maxLen)): if itemName == GC.LICENSE_SKUS and value: _validateLicenseSKUs(sectionName, itemName, value) return value _printValueError(sectionName, itemName, f'"{value}"', f'{Msg.EXPECTED}: {integerLimits(minLen, maxLen, Msg.STRING_LENGTH)}') return '' def _getCfgStringList(sectionName, itemName): value = GM.Globals[GM.PARSER].get(sectionName, itemName) stringlist = [] if not value or (len(value) == 2 and _stringInQuotes(value)): return stringlist splitStatus, stringlist = shlexSplitListStatus(value) if not splitStatus: _printValueError(sectionName, itemName, f'"{value}"', f'{Msg.INVALID_LIST}: {stringlist}') return stringlist def _getCfgTimezone(sectionName, itemName): value = _stripStringQuotes(GM.Globals[GM.PARSER].get(sectionName, itemName).lower()) if value == 'utc': GM.Globals[GM.CONVERT_TO_LOCAL_TIME] = False return iso8601.UTC GM.Globals[GM.CONVERT_TO_LOCAL_TIME] = True if value == 'local': return iso8601.Local try: return iso8601.parse_timezone_str(value) except (iso8601.ParseError, OverflowError): _printValueError(sectionName, itemName, value, f'{Msg.EXPECTED}: {TIMEZONE_FORMAT_REQUIRED}') GM.Globals[GM.CONVERT_TO_LOCAL_TIME] = False return iso8601.UTC def _getCfgDirectory(sectionName, itemName): dirPath = os.path.expanduser(_stripStringQuotes(GM.Globals[GM.PARSER].get(sectionName, itemName))) if (not dirPath) and (itemName in {GC.GMAIL_CSE_INCERT_DIR, GC.GMAIL_CSE_INKEY_DIR}): return dirPath if (not dirPath) or (not os.path.isabs(dirPath) and dirPath != '.'): if (sectionName != configparser.DEFAULTSECT) and (GM.Globals[GM.PARSER].has_option(sectionName, itemName)): dirPath = os.path.join(os.path.expanduser(_stripStringQuotes(GM.Globals[GM.PARSER].get(configparser.DEFAULTSECT, itemName))), dirPath) if not os.path.isabs(dirPath): dirPath = os.path.join(GM.Globals[GM.GAM_CFG_PATH], dirPath) return dirPath def _getCfgFile(sectionName, itemName): value = os.path.expanduser(_stripStringQuotes(GM.Globals[GM.PARSER].get(sectionName, itemName))) if value and not os.path.isabs(value): value = os.path.expanduser(os.path.join(_getCfgDirectory(sectionName, GC.CONFIG_DIR), value)) elif not value and itemName == GC.CACERTS_PEM: if hasattr(sys, '_MEIPASS'): value = os.path.join(sys._MEIPASS, GC.FN_CACERTS_PEM) #pylint: disable=no-member else: value = os.path.join(GM.Globals[GM.GAM_PATH], GC.FN_CACERTS_PEM) return value def _readGamCfgFile(config, fileName): try: with open(fileName, DEFAULT_FILE_READ_MODE, encoding=GM.Globals[GM.SYS_ENCODING]) as f: config.read_file(f) except (configparser.DuplicateOptionError, configparser.DuplicateSectionError, configparser.MissingSectionHeaderError, configparser.ParsingError) as e: systemErrorExit(CONFIG_ERROR_RC, formatKeyValueList('', [Ent.Singular(Ent.CONFIG_FILE), fileName, Msg.INVALID, str(e)], '')) except IOError as e: systemErrorExit(FILE_ERROR_RC, fileErrorMessage(fileName, e, Ent.CONFIG_FILE)) def _writeGamCfgFile(config, fileName, action): GM.Globals[GM.SECTION] = None # No need to save section for inner gams try: with open(fileName, DEFAULT_FILE_WRITE_MODE, encoding=GM.Globals[GM.SYS_ENCODING]) as f: config.write(f) printKeyValueList([Ent.Singular(Ent.CONFIG_FILE), fileName, Act.PerformedName(action)]) except IOError as e: stderrErrorMsg(fileErrorMessage(fileName, e, Ent.CONFIG_FILE)) def _verifyValues(sectionName, inputFilterSectionName, outputFilterSectionName): printKeyValueList([Ent.Singular(Ent.SECTION), sectionName]) # Do not use printEntity Ind.Increment() for itemName, itemEntry in iter(GC.VAR_INFO.items()): sectName = sectionName if itemName in GC.CSV_INPUT_ROW_FILTER_ITEMS: if inputFilterSectionName: sectName = inputFilterSectionName elif itemName in GC.CSV_OUTPUT_ROW_FILTER_ITEMS: if outputFilterSectionName: sectName = outputFilterSectionName cfgValue = GM.Globals[GM.PARSER].get(sectName, itemName) varType = itemEntry[GC.VAR_TYPE] if varType == GC.TYPE_CHOICE: for choice, value in iter(itemEntry[GC.VAR_CHOICES].items()): if cfgValue == value: cfgValue = choice break elif varType not in [GC.TYPE_BOOLEAN, GC.TYPE_INTEGER, GC.TYPE_FLOAT, GC.TYPE_PASSWORD]: cfgValue = _quoteStringIfLeadingTrailingBlanks(cfgValue) if varType == GC.TYPE_FILE: expdValue = _getCfgFile(sectName, itemName) if cfgValue not in ("''", expdValue): cfgValue = f'{cfgValue} ; {expdValue}' elif varType == GC.TYPE_DIRECTORY: expdValue = _getCfgDirectory(sectName, itemName) if cfgValue not in ("''", expdValue): cfgValue = f'{cfgValue} ; {expdValue}' elif (itemName == GC.SECTION) and (sectName != configparser.DEFAULTSECT): continue printLine(f'{Ind.Spaces()}{itemName} = {cfgValue}') Ind.Decrement() def _chkCfgDirectories(sectionName): for itemName, itemEntry in iter(GC.VAR_INFO.items()): if itemEntry[GC.VAR_TYPE] == GC.TYPE_DIRECTORY: dirPath = GC.Values[itemName] if (not dirPath) and (itemName in {GC.GMAIL_CSE_INCERT_DIR, GC.GMAIL_CSE_INKEY_DIR}): return if (itemName != GC.CACHE_DIR or not GC.Values[GC.NO_CACHE]) and not os.path.isdir(dirPath): writeStderr(formatKeyValueList(WARNING_PREFIX, [Ent.Singular(Ent.CONFIG_FILE), GM.Globals[GM.GAM_CFG_FILE], Ent.Singular(Ent.SECTION), sectionName, Ent.Singular(Ent.ITEM), itemName, Ent.Singular(Ent.VALUE), dirPath, Msg.INVALID_PATH], '\n')) def _chkCfgFiles(sectionName): for itemName, itemEntry in iter(GC.VAR_INFO.items()): if itemEntry[GC.VAR_TYPE] == GC.TYPE_FILE: fileName = GC.Values[itemName] if (not fileName) and (itemName in {GC.EXTRA_ARGS, GC.CMDLOG}): continue if itemName == GC.CLIENT_SECRETS_JSON: # Added 6.57.01 continue if GC.Values[GC.ENABLE_DASA] and itemName == GC.OAUTH2_TXT: continue if not os.path.isfile(fileName): writeStderr(formatKeyValueList([WARNING_PREFIX, ERROR_PREFIX][itemName == GC.CACERTS_PEM], [Ent.Singular(Ent.CONFIG_FILE), GM.Globals[GM.GAM_CFG_FILE], Ent.Singular(Ent.SECTION), sectionName, Ent.Singular(Ent.ITEM), itemName, Ent.Singular(Ent.VALUE), fileName, Msg.NOT_FOUND], '\n')) if itemName == GC.CACERTS_PEM: status['errors'] = True elif not os.access(fileName, itemEntry[GC.VAR_ACCESS]): if itemEntry[GC.VAR_ACCESS] == os.R_OK | os.W_OK: accessMsg = Msg.NEED_READ_WRITE_ACCESS elif itemEntry[GC.VAR_ACCESS] == os.R_OK: accessMsg = Msg.NEED_READ_ACCESS else: accessMsg = Msg.NEED_WRITE_ACCESS writeStderr(formatKeyValueList(ERROR_PREFIX, [Ent.Singular(Ent.CONFIG_FILE), GM.Globals[GM.GAM_CFG_FILE], Ent.Singular(Ent.SECTION), sectionName, Ent.Singular(Ent.ITEM), itemName, Ent.Singular(Ent.VALUE), fileName, accessMsg], '\n')) status['errors'] = True def _setCSVFile(fileName, mode, encoding, writeHeader, multi): if fileName != '-': fileName = setFilePath(fileName) GM.Globals[GM.CSVFILE][GM.REDIRECT_NAME] = fileName GM.Globals[GM.CSVFILE][GM.REDIRECT_MODE] = mode GM.Globals[GM.CSVFILE][GM.REDIRECT_ENCODING] = encoding GM.Globals[GM.CSVFILE][GM.REDIRECT_WRITE_HEADER] = writeHeader GM.Globals[GM.CSVFILE][GM.REDIRECT_MULTIPROCESS] = multi GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE] = None def _setSTDFile(stdtype, fileName, mode, multi): if stdtype == GM.STDOUT: GM.Globals[GM.SAVED_STDOUT] = None GM.Globals[stdtype][GM.REDIRECT_STD] = False if fileName == 'null': GM.Globals[stdtype][GM.REDIRECT_FD] = open(os.devnull, mode, encoding=UTF8) elif fileName == '-': GM.Globals[stdtype][GM.REDIRECT_STD] = True if stdtype == GM.STDOUT: GM.Globals[stdtype][GM.REDIRECT_FD] = os.fdopen(os.dup(sys.stdout.fileno()), mode, encoding=GM.Globals[GM.SYS_ENCODING]) else: GM.Globals[stdtype][GM.REDIRECT_FD] = os.fdopen(os.dup(sys.stderr.fileno()), mode, encoding=GM.Globals[GM.SYS_ENCODING]) else: fileName = setFilePath(fileName) if multi and mode == DEFAULT_FILE_WRITE_MODE: deleteFile(fileName) mode = DEFAULT_FILE_APPEND_MODE GM.Globals[stdtype][GM.REDIRECT_FD] = openFile(fileName, mode) GM.Globals[stdtype][GM.REDIRECT_MULTI_FD] = GM.Globals[stdtype][GM.REDIRECT_FD] if not multi else StringIOobject() if (stdtype == GM.STDOUT) and (GC.Values[GC.DEBUG_LEVEL] > 0): GM.Globals[GM.SAVED_STDOUT] = sys.stdout sys.stdout = GM.Globals[stdtype][GM.REDIRECT_MULTI_FD] GM.Globals[stdtype][GM.REDIRECT_NAME] = fileName GM.Globals[stdtype][GM.REDIRECT_MODE] = mode GM.Globals[stdtype][GM.REDIRECT_MULTIPROCESS] = multi GM.Globals[stdtype][GM.REDIRECT_QUEUE] = 'stdout' if stdtype == GM.STDOUT else 'stderr' MULTIPROCESS_EXIT_COMP_PATTERN = re.compile(r'^rc([<>]=?|=|!=)(.+)$', re.IGNORECASE) MULTIPROCESS_EXIT_RANGE_PATTERN = re.compile(r'^rcrange(=|!=)(\S+)/(\S+)$', re.IGNORECASE) def _setMultiprocessExit(): rcStr = getString(Cmd.OB_STRING) mg = MULTIPROCESS_EXIT_COMP_PATTERN.match(rcStr) if mg: if not mg.group(2).isdigit(): usageErrorExit(f'{Msg.EXPECTED}: rc') GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION] = {'comp': mg.group(1), 'value': int(mg.group(2))} return mg = MULTIPROCESS_EXIT_RANGE_PATTERN.match(rcStr) if mg: if not mg.group(2).isdigit() or not mg.group(3).isdigit(): usageErrorExit(f'{Msg.EXPECTED}: rcrange/Value>') GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION] = {'range': mg.group(1), 'low': int(mg.group(2)), 'high': int(mg.group(3))} return usageErrorExit(f'{Msg.EXPECTED}: (rc)|(rcrange/Value>)') if not GM.Globals[GM.PARSER]: homePath = os.path.expanduser('~') GM.Globals[GM.GAM_CFG_PATH] = os.environ.get(EV_GAMCFGDIR, None) if GM.Globals[GM.GAM_CFG_PATH]: GM.Globals[GM.GAM_CFG_PATH] = os.path.expanduser(GM.Globals[GM.GAM_CFG_PATH]) else: GM.Globals[GM.GAM_CFG_PATH] = os.path.join(homePath, '.gam') GC.Defaults[GC.CONFIG_DIR] = GM.Globals[GM.GAM_CFG_PATH] GC.Defaults[GC.CACHE_DIR] = os.path.join(GM.Globals[GM.GAM_CFG_PATH], 'gamcache') GC.Defaults[GC.DRIVE_DIR] = os.path.join(homePath, 'Downloads') GM.Globals[GM.GAM_CFG_FILE] = os.path.join(GM.Globals[GM.GAM_CFG_PATH], FN_GAM_CFG) if not os.path.isfile(GM.Globals[GM.GAM_CFG_FILE]): for itemName, itemEntry in iter(GC.VAR_INFO.items()): if itemEntry[GC.VAR_TYPE] == GC.TYPE_DIRECTORY: _getDefault(itemName, itemEntry, None) oldGamPath = os.environ.get(EV_OLDGAMPATH, GC.Defaults[GC.CONFIG_DIR]) for itemName, itemEntry in iter(GC.VAR_INFO.items()): if itemEntry[GC.VAR_TYPE] != GC.TYPE_DIRECTORY: _getDefault(itemName, itemEntry, oldGamPath) GM.Globals[GM.PARSER] = configparser.RawConfigParser(defaults=collections.OrderedDict(sorted(list(GC.Defaults.items()), key=lambda t: t[0]))) _checkMakeDir(GC.CONFIG_DIR) _checkMakeDir(GC.CACHE_DIR) _checkMakeDir(GC.DRIVE_DIR) for itemName, itemEntry in iter(GC.VAR_INFO.items()): if itemEntry[GC.VAR_TYPE] == GC.TYPE_FILE: srcFile = os.path.expanduser(_stripStringQuotes(GM.Globals[GM.PARSER].get(configparser.DEFAULTSECT, itemName))) _copyCfgFile(srcFile, GC.CONFIG_DIR, oldGamPath) _writeGamCfgFile(GM.Globals[GM.PARSER], GM.Globals[GM.GAM_CFG_FILE], Act.INITIALIZE) else: GM.Globals[GM.PARSER] = configparser.RawConfigParser(defaults=collections.OrderedDict(sorted(list(GC.Defaults.items()), key=lambda t: t[0]))) _readGamCfgFile(GM.Globals[GM.PARSER], GM.Globals[GM.GAM_CFG_FILE]) status = {'errors': False} inputFilterSectionName = outputFilterSectionName = None GM.Globals[GM.GAM_CFG_SECTION] = os.environ.get(EV_GAMCFGSECTION, None) if GM.Globals[GM.GAM_CFG_SECTION]: sectionName = GM.Globals[GM.GAM_CFG_SECTION] GM.Globals[GM.SECTION] = sectionName # Save section for inner gams if not GM.Globals[GM.PARSER].has_section(sectionName): usageErrorExit(formatKeyValueList('', [EV_GAMCFGSECTION, sectionName, Msg.NOT_FOUND], '')) if checkArgumentPresent(Cmd.SELECT_CMD): Cmd.Backup() usageErrorExit(formatKeyValueList('', [EV_GAMCFGSECTION, sectionName, 'select', Msg.NOT_ALLOWED], '')) else: sectionName = _getCfgSection(configparser.DEFAULTSECT, GC.SECTION) # select [save] [verify] if checkArgumentPresent(Cmd.SELECT_CMD): sectionName = _selectSection() GM.Globals[GM.SECTION] = sectionName # Save section for inner gams while Cmd.ArgumentsRemaining(): if checkArgumentPresent('save'): GM.Globals[GM.PARSER].set(configparser.DEFAULTSECT, GC.SECTION, sectionName) _writeGamCfgFile(GM.Globals[GM.PARSER], GM.Globals[GM.GAM_CFG_FILE], Act.SAVE) elif checkArgumentPresent('verify'): _verifyValues(sectionName, inputFilterSectionName, outputFilterSectionName) else: break GM.Globals[GM.GAM_CFG_SECTION_NAME] = sectionName # showsections if checkArgumentPresent(Cmd.SHOWSECTIONS_CMD): _showSections() # selectfilter|selectoutputfilter|selectinputfilter while True: filterCommand = getChoice([Cmd.SELECTFILTER_CMD, Cmd.SELECTOUTPUTFILTER_CMD, Cmd.SELECTINPUTFILTER_CMD], defaultChoice=None) if filterCommand is None: break if filterCommand != Cmd.SELECTINPUTFILTER_CMD: outputFilterSectionName = _selectSection() else: inputFilterSectionName = _selectSection() # Handle todrive_nobrowser and todrive_noemail if not present value = GM.Globals[GM.PARSER].get(configparser.DEFAULTSECT, GC.TODRIVE_NOBROWSER) if value == '': GM.Globals[GM.PARSER].set(configparser.DEFAULTSECT, GC.TODRIVE_NOBROWSER, str(_getCfgBoolean(configparser.DEFAULTSECT, GC.NO_BROWSER)).lower()) value = GM.Globals[GM.PARSER].get(configparser.DEFAULTSECT, GC.TODRIVE_NOEMAIL) if value == '': GM.Globals[GM.PARSER].set(configparser.DEFAULTSECT, GC.TODRIVE_NOEMAIL, str(not _getCfgBoolean(configparser.DEFAULTSECT, GC.NO_BROWSER)).lower()) # Handle todrive_sheet_timestamp and todrive_sheet_timeformat if not present for section in [sectionName, configparser.DEFAULTSECT]: value = GM.Globals[GM.PARSER].get(section, GC.TODRIVE_SHEET_TIMESTAMP) if value == 'copy': GM.Globals[GM.PARSER].set(section, GC.TODRIVE_SHEET_TIMESTAMP, str(_getCfgBoolean(section, GC.TODRIVE_TIMESTAMP)).lower()) value = GM.Globals[GM.PARSER].get(section, GC.TODRIVE_SHEET_TIMEFORMAT) if value == 'copy': GM.Globals[GM.PARSER].set(section, GC.TODRIVE_SHEET_TIMEFORMAT, _getCfgString(section, GC.TODRIVE_TIMEFORMAT)) # Fix mistyped keyword cmdlog_max__backups for section in [configparser.DEFAULTSECT, sectionName]: if GM.Globals[GM.PARSER].has_option(section, GC.CMDLOG_MAX__BACKUPS): GM.Globals[GM.PARSER].set(section, GC.CMDLOG_MAX_BACKUPS, GM.Globals[GM.PARSER].get(section, GC.CMDLOG_MAX__BACKUPS)) GM.Globals[GM.PARSER].remove_option(section, GC.CMDLOG_MAX__BACKUPS) # config ( [=] )* [save] [verify] if checkArgumentPresent(Cmd.CONFIG_CMD): while Cmd.ArgumentsRemaining(): if checkArgumentPresent('save'): _writeGamCfgFile(GM.Globals[GM.PARSER], GM.Globals[GM.GAM_CFG_FILE], Act.SAVE) elif checkArgumentPresent('verify'): _verifyValues(sectionName, inputFilterSectionName, outputFilterSectionName) else: itemName = getChoice(GC.VAR_INFO, defaultChoice=None) if itemName is None: break itemEntry = GC.VAR_INFO[itemName] checkArgumentPresent('=') varType = itemEntry[GC.VAR_TYPE] if varType == GC.TYPE_BOOLEAN: value = TRUE if getBoolean(None) else FALSE elif varType == GC.TYPE_CHARACTER: value = getCharacter() elif varType == GC.TYPE_CHOICE: value = getChoice(itemEntry[GC.VAR_CHOICES]) elif varType == GC.TYPE_INTEGER: minVal, maxVal = itemEntry[GC.VAR_LIMITS] value = str(getInteger(minVal=minVal, maxVal=maxVal)) elif varType == GC.TYPE_FLOAT: minVal, maxVal = itemEntry[GC.VAR_LIMITS] value = str(getFloat(minVal=minVal, maxVal=maxVal)) elif varType == GC.TYPE_LOCALE: value = getLanguageCode(LOCALE_CODES_MAP) elif varType == GC.TYPE_PASSWORD: minLen, maxLen = itemEntry[GC.VAR_LIMITS] value = getString(Cmd.OB_STRING, checkBlank=True, minLen=minLen, maxLen=maxLen) if value and value.startswith("b'") and value.endswith("'"): value = bytes(value[2:-1], UTF8) elif varType == GC.TYPE_TIMEZONE: value = getString(Cmd.OB_STRING, checkBlank=True) else: # GC.TYPE_STRING, GC.TYPE_STRINGLIST minLen, maxLen = itemEntry.get(GC.VAR_LIMITS, (0, None)) value = _quoteStringIfLeadingTrailingBlanks(getString(Cmd.OB_STRING, minLen=minLen, maxLen=maxLen)) GM.Globals[GM.PARSER].set(sectionName, itemName, value) prevExtraArgsTxt = GC.Values.get(GC.EXTRA_ARGS, None) prevOauth2serviceJson = GC.Values.get(GC.OAUTH2SERVICE_JSON, None) # Assign global variables, directories, timezone first as other variables depend on them for itemName, itemEntry in sorted(iter(GC.VAR_INFO.items())): varType = itemEntry[GC.VAR_TYPE] if varType == GC.TYPE_DIRECTORY: GC.Values[itemName] = _getCfgDirectory(sectionName, itemName) elif varType == GC.TYPE_TIMEZONE: GC.Values[itemName] = _getCfgTimezone(sectionName, itemName) GM.Globals[GM.DATETIME_NOW] = datetime.datetime.now(GC.Values[GC.TIMEZONE]) # Everything else except row filters for itemName, itemEntry in sorted(iter(GC.VAR_INFO.items())): varType = itemEntry[GC.VAR_TYPE] if varType == GC.TYPE_BOOLEAN: GC.Values[itemName] = _getCfgBoolean(sectionName, itemName) elif varType == GC.TYPE_CHARACTER: GC.Values[itemName] = _getCfgCharacter(sectionName, itemName) elif varType == GC.TYPE_CHOICE: GC.Values[itemName] = _getCfgChoice(sectionName, itemName) elif varType in [GC.TYPE_INTEGER, GC.TYPE_FLOAT]: GC.Values[itemName] = _getCfgNumber(sectionName, itemName) elif varType == GC.TYPE_HEADERFILTER: GC.Values[itemName] = _getCfgHeaderFilter(sectionName, itemName) elif varType == GC.TYPE_LOCALE: GC.Values[itemName] = _getCfgLocale(sectionName, itemName) elif varType == GC.TYPE_PASSWORD: GC.Values[itemName] = _getCfgPassword(sectionName, itemName) elif varType == GC.TYPE_STRING: GC.Values[itemName] = _getCfgString(sectionName, itemName) elif varType in {GC.TYPE_STRINGLIST, GC.TYPE_HEADERFORCE, GC.TYPE_HEADERORDER}: GC.Values[itemName] = _getCfgStringList(sectionName, itemName) elif varType == GC.TYPE_FILE: GC.Values[itemName] = _getCfgFile(sectionName, itemName) # Row filters for itemName, itemEntry in sorted(iter(GC.VAR_INFO.items())): varType = itemEntry[GC.VAR_TYPE] if varType == GC.TYPE_ROWFILTER: GC.Values[itemName] = _getCfgRowFilter(sectionName, itemName) # Process selectfilter|selectoutputfilter|selectinputfilter if inputFilterSectionName: GC.Values[GC.CSV_INPUT_ROW_FILTER] = _getCfgRowFilter(inputFilterSectionName, GC.CSV_INPUT_ROW_FILTER) GC.Values[GC.CSV_INPUT_ROW_FILTER_MODE] = _getCfgChoice(inputFilterSectionName, GC.CSV_INPUT_ROW_FILTER_MODE) GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER] = _getCfgRowFilter(inputFilterSectionName, GC.CSV_INPUT_ROW_DROP_FILTER) GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER_MODE] = _getCfgChoice(inputFilterSectionName, GC.CSV_INPUT_ROW_DROP_FILTER_MODE) GC.Values[GC.CSV_INPUT_ROW_LIMIT] = _getCfgNumber(inputFilterSectionName, GC.CSV_INPUT_ROW_LIMIT) if outputFilterSectionName: GC.Values[GC.CSV_OUTPUT_HEADER_FORCE] = _getCfgStringList(outputFilterSectionName, GC.CSV_OUTPUT_HEADER_FORCE) if GC.Values[GC.CSV_OUTPUT_HEADER_FORCE]: GC.Values[GC.CSV_OUTPUT_HEADER_FILTER] = _getCfgHeaderFilterFromForce(outputFilterSectionName, GC.CSV_OUTPUT_HEADER_FORCE) else: GC.Values[GC.CSV_OUTPUT_HEADER_FILTER] = _getCfgHeaderFilter(outputFilterSectionName, GC.CSV_OUTPUT_HEADER_FILTER) GC.Values[GC.CSV_OUTPUT_HEADER_DROP_FILTER] = _getCfgHeaderFilter(outputFilterSectionName, GC.CSV_OUTPUT_HEADER_DROP_FILTER) GC.Values[GC.CSV_OUTPUT_HEADER_ORDER] = _getCfgStringList(outputFilterSectionName, GC.CSV_OUTPUT_HEADER_ORDER) GC.Values[GC.CSV_OUTPUT_ROW_FILTER] = _getCfgRowFilter(outputFilterSectionName, GC.CSV_OUTPUT_ROW_FILTER) GC.Values[GC.CSV_OUTPUT_ROW_FILTER_MODE] = _getCfgChoice(outputFilterSectionName, GC.CSV_OUTPUT_ROW_FILTER_MODE) GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER] = _getCfgRowFilter(outputFilterSectionName, GC.CSV_OUTPUT_ROW_DROP_FILTER) GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER_MODE] = _getCfgChoice(outputFilterSectionName, GC.CSV_OUTPUT_ROW_DROP_FILTER_MODE) GC.Values[GC.CSV_OUTPUT_ROW_LIMIT] = _getCfgNumber(outputFilterSectionName, GC.CSV_OUTPUT_ROW_LIMIT) GC.Values[GC.CSV_OUTPUT_SORT_HEADERS] = _getCfgStringList(outputFilterSectionName, GC.CSV_OUTPUT_SORT_HEADERS) elif GC.Values[GC.CSV_OUTPUT_HEADER_FORCE]: GC.Values[GC.CSV_OUTPUT_HEADER_FILTER] = _getCfgHeaderFilterFromForce(sectionName, GC.CSV_OUTPUT_HEADER_FORCE) if status['errors']: sys.exit(CONFIG_ERROR_RC) # Global values cleanup GC.Values[GC.DOMAIN] = GC.Values[GC.DOMAIN].lower() if not GC.Values[GC.SMTP_FQDN]: GC.Values[GC.SMTP_FQDN] = None # Inherit debug_level, output_dateformat, output_timeformat if not locally defined if GM.Globals[GM.PID] != 0: if GC.Values[GC.DEBUG_LEVEL] == 0: GC.Values[GC.DEBUG_LEVEL] = GM.Globals[GM.DEBUG_LEVEL] if not GC.Values[GC.OUTPUT_DATEFORMAT]: GC.Values[GC.OUTPUT_DATEFORMAT] = GM.Globals[GM.OUTPUT_DATEFORMAT] if not GC.Values[GC.OUTPUT_TIMEFORMAT]: GC.Values[GC.OUTPUT_TIMEFORMAT] = GM.Globals[GM.OUTPUT_TIMEFORMAT] # Define lockfile: oauth2.txt.lock GM.Globals[GM.OAUTH2_TXT_LOCK] = f'{GC.Values[GC.OAUTH2_TXT]}.lock' # Override httplib2 settings httplib2.debuglevel = GC.Values[GC.DEBUG_LEVEL] # Reset global variables if required if prevExtraArgsTxt != GC.Values[GC.EXTRA_ARGS]: GM.Globals[GM.EXTRA_ARGS_LIST] = [('prettyPrint', GC.Values[GC.DEBUG_LEVEL] > 0)] if GC.Values[GC.EXTRA_ARGS]: ea_config = configparser.ConfigParser() ea_config.optionxform = str ea_config.read(GC.Values[GC.EXTRA_ARGS]) GM.Globals[GM.EXTRA_ARGS_LIST].extend(ea_config.items('extra-args')) if prevOauth2serviceJson != GC.Values[GC.OAUTH2SERVICE_JSON]: GM.Globals[GM.OAUTH2SERVICE_JSON_DATA] = {} GM.Globals[GM.OAUTH2SERVICE_CLIENT_ID] = None Cmd.SetEncoding(GM.Globals[GM.SYS_ENCODING]) # multiprocessexit (rc)|(rcrange=/)|(rcrange!=/) if checkArgumentPresent(Cmd.MULTIPROCESSEXIT_CMD): _setMultiprocessExit() # redirect csv [multiprocess] [append] [noheader] [charset ] # [columndelimiter ] [quotechar ]] [noescapechar []] # [sortheaders ] [timestampcolumn ] [transpose []] # [todrive *] # redirect stdout [multiprocess] [append] # redirect stdout null # redirect stderr [multiprocess] [append] # redirect stderr stdout # redirect stderr null while checkArgumentPresent(Cmd.REDIRECT_CMD): myarg = getChoice(['csv', 'stdout', 'stderr']) filename = re.sub(r'{{Section}}', sectionName, getString(Cmd.OB_FILE_NAME, checkBlank=True)) if myarg == 'csv': multi = checkArgumentPresent('multiprocess') mode = DEFAULT_FILE_APPEND_MODE if checkArgumentPresent('append') else DEFAULT_FILE_WRITE_MODE writeHeader = not checkArgumentPresent('noheader') encoding = getCharSet() if checkArgumentPresent('columndelimiter'): GM.Globals[GM.CSV_OUTPUT_COLUMN_DELIMITER] = GC.Values[GC.CSV_OUTPUT_COLUMN_DELIMITER] = getCharacter() if checkArgumentPresent('quotechar'): GM.Globals[GM.CSV_OUTPUT_QUOTE_CHAR] = GC.Values[GC.CSV_OUTPUT_QUOTE_CHAR] = getCharacter() if checkArgumentPresent('noescapechar'): GM.Globals[GM.CSV_OUTPUT_NO_ESCAPE_CHAR] = GC.Values[GC.CSV_OUTPUT_NO_ESCAPE_CHAR] = getBoolean() if checkArgumentPresent('sortheaders'): GM.Globals[GM.CSV_OUTPUT_SORT_HEADERS] = GC.Values[GC.CSV_OUTPUT_SORT_HEADERS] = getString(Cmd.OB_STRING_LIST, minLen=0).replace(',', ' ').split() if checkArgumentPresent('timestampcolumn'): GM.Globals[GM.CSV_OUTPUT_TIMESTAMP_COLUMN] = GC.Values[GC.CSV_OUTPUT_TIMESTAMP_COLUMN] = getString(Cmd.OB_STRING, minLen=0) if checkArgumentPresent('transpose'): GM.Globals[GM.CSV_OUTPUT_TRANSPOSE] = getBoolean() _setCSVFile(filename, mode, encoding, writeHeader, multi) GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE_CSVPF] = CSVPrintFile() if checkArgumentPresent('todrive'): GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE_CSVPF].GetTodriveParameters() GM.Globals[GM.CSV_TODRIVE] = GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE_CSVPF].todrive.copy() elif myarg == 'stdout': if filename.lower() == 'null': multi = checkArgumentPresent('multiprocess') _setSTDFile(GM.STDOUT, 'null', DEFAULT_FILE_WRITE_MODE, multi) else: multi = checkArgumentPresent('multiprocess') mode = DEFAULT_FILE_APPEND_MODE if checkArgumentPresent('append') else DEFAULT_FILE_WRITE_MODE _setSTDFile(GM.STDOUT, filename, mode, multi) else: # myarg == 'stderr' if filename.lower() == 'null': multi = checkArgumentPresent('multiprocess') _setSTDFile(GM.STDERR, 'null', DEFAULT_FILE_WRITE_MODE, multi) elif filename.lower() != 'stdout': multi = checkArgumentPresent('multiprocess') mode = DEFAULT_FILE_APPEND_MODE if checkArgumentPresent('append') else DEFAULT_FILE_WRITE_MODE _setSTDFile(GM.STDERR, filename, mode, multi) else: multi = checkArgumentPresent('multiprocess') if not GM.Globals[GM.STDOUT]: _setSTDFile(GM.STDOUT, '-', DEFAULT_FILE_WRITE_MODE, multi) GM.Globals[GM.STDERR] = GM.Globals[GM.STDOUT].copy() GM.Globals[GM.STDERR][GM.REDIRECT_NAME] = 'stdout' if not GM.Globals[GM.STDOUT]: _setSTDFile(GM.STDOUT, '-', DEFAULT_FILE_WRITE_MODE, False) if not GM.Globals[GM.STDERR]: _setSTDFile(GM.STDERR, '-', DEFAULT_FILE_WRITE_MODE, False) # If both csv and stdout are redirected to - with same multiprocess setting and csv doesn't have any todrive parameters, collapse csv onto stdout if (GM.Globals[GM.PID] == 0 and GM.Globals[GM.CSVFILE] and GM.Globals[GM.CSVFILE][GM.REDIRECT_NAME] == '-' and GM.Globals[GM.STDOUT][GM.REDIRECT_NAME] == '-' and GM.Globals[GM.CSVFILE][GM.REDIRECT_MULTIPROCESS] == GM.Globals[GM.STDOUT][GM.REDIRECT_MULTIPROCESS] and GM.Globals[GM.CSVFILE].get(GM.REDIRECT_QUEUE_CSVPF) and not GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE_CSVPF].todrive): _setCSVFile('-', GM.Globals[GM.STDOUT].get(GM.REDIRECT_MODE, DEFAULT_FILE_WRITE_MODE), GC.Values[GC.CHARSET], GM.Globals[GM.CSVFILE].get(GM.REDIRECT_WRITE_HEADER, True), GM.Globals[GM.STDOUT][GM.REDIRECT_MULTIPROCESS]) elif not GM.Globals[GM.CSVFILE]: _setCSVFile('-', GM.Globals[GM.STDOUT].get(GM.REDIRECT_MODE, DEFAULT_FILE_WRITE_MODE), GC.Values[GC.CHARSET], True, False) initAPICallsRateCheck() # Main process # Clear input row filters/limit from parser, children can define but shouldn't inherit global value # Clear output header/row filters/limit from parser, children can define or they will inherit global value if not defined if GM.Globals[GM.PID] == 0: for itemName, itemEntry in sorted(iter(GC.VAR_INFO.items())): varType = itemEntry[GC.VAR_TYPE] if varType in {GC.TYPE_HEADERFILTER, GC.TYPE_HEADERFORCE, GC.TYPE_HEADERORDER, GC.TYPE_ROWFILTER}: GM.Globals[GM.PARSER].set(sectionName, itemName, '') elif (varType == GC.TYPE_INTEGER) and itemName in {GC.CSV_INPUT_ROW_LIMIT, GC.CSV_OUTPUT_ROW_LIMIT}: GM.Globals[GM.PARSER].set(sectionName, itemName, '0') # Child process # Inherit main process output header/row filters/limit, print defaults if not locally defined else: if not GC.Values[GC.CSV_OUTPUT_HEADER_FILTER]: GC.Values[GC.CSV_OUTPUT_HEADER_FILTER] = GM.Globals[GM.CSV_OUTPUT_HEADER_FILTER][:] if not GC.Values[GC.CSV_OUTPUT_HEADER_DROP_FILTER]: GC.Values[GC.CSV_OUTPUT_HEADER_DROP_FILTER] = GM.Globals[GM.CSV_OUTPUT_HEADER_DROP_FILTER][:] if not GC.Values[GC.CSV_OUTPUT_HEADER_FORCE]: GC.Values[GC.CSV_OUTPUT_HEADER_FORCE] = GM.Globals[GM.CSV_OUTPUT_HEADER_FORCE][:] if not GC.Values[GC.CSV_OUTPUT_HEADER_ORDER]: GC.Values[GC.CSV_OUTPUT_HEADER_ORDER] = GM.Globals[GM.CSV_OUTPUT_HEADER_ORDER][:] if not GC.Values[GC.CSV_OUTPUT_ROW_FILTER]: GC.Values[GC.CSV_OUTPUT_ROW_FILTER] = GM.Globals[GM.CSV_OUTPUT_ROW_FILTER][:] GC.Values[GC.CSV_OUTPUT_ROW_FILTER_MODE] = GM.Globals[GM.CSV_OUTPUT_ROW_FILTER_MODE] if not GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER]: GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER] = GM.Globals[GM.CSV_OUTPUT_ROW_DROP_FILTER][:] GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER_MODE] = GM.Globals[GM.CSV_OUTPUT_ROW_DROP_FILTER_MODE] if not GC.Values[GC.CSV_OUTPUT_ROW_LIMIT]: GC.Values[GC.CSV_OUTPUT_ROW_LIMIT] = GM.Globals[GM.CSV_OUTPUT_ROW_LIMIT] if not GC.Values[GC.PRINT_AGU_DOMAINS]: GC.Values[GC.PRINT_AGU_DOMAINS] = GM.Globals[GM.PRINT_AGU_DOMAINS] if not GC.Values[GC.PRINT_CROS_OUS]: GC.Values[GC.PRINT_CROS_OUS] = GM.Globals[GM.PRINT_CROS_OUS] if not GC.Values[GC.PRINT_CROS_OUS_AND_CHILDREN]: GC.Values[GC.PRINT_CROS_OUS_AND_CHILDREN] = GM.Globals[GM.PRINT_CROS_OUS_AND_CHILDREN] GC.Values[GC.SHOW_GETTINGS] = GM.Globals[GM.SHOW_GETTINGS] GC.Values[GC.SHOW_GETTINGS_GOT_NL] = GM.Globals[GM.SHOW_GETTINGS_GOT_NL] # customer_id, domain and admin_email must be set when enable_dasa = true if GC.Values[GC.ENABLE_DASA]: errors = 0 for itemName in [GC.CUSTOMER_ID, GC.DOMAIN, GC.ADMIN_EMAIL]: if not GC.Values[itemName] or (itemName == GC.CUSTOMER_ID and GC.Values[itemName] == GC.MY_CUSTOMER): stderrErrorMsg(formatKeyValueList('', [Ent.Singular(Ent.CONFIG_FILE), GM.Globals[GM.GAM_CFG_FILE], Ent.Singular(Ent.SECTION), sectionName, itemName, GC.Values[itemName] or '""', GC.ENABLE_DASA, GC.Values[GC.ENABLE_DASA], Msg.NOT_COMPATIBLE], '\n')) errors += 1 if errors: sys.exit(USAGE_ERROR_RC) # If no select/options commands were executed or some were and there are more arguments on the command line, # warn if the json files are missing and return True if (Cmd.Location() == 1) or (Cmd.ArgumentsRemaining()): _chkCfgDirectories(sectionName) _chkCfgFiles(sectionName) if status['errors']: sys.exit(CONFIG_ERROR_RC) if GC.Values[GC.NO_CACHE]: GM.Globals[GM.CACHE_DIR] = None GM.Globals[GM.CACHE_DISCOVERY_ONLY] = False else: GM.Globals[GM.CACHE_DIR] = GC.Values[GC.CACHE_DIR] GM.Globals[GM.CACHE_DISCOVERY_ONLY] = GC.Values[GC.CACHE_DISCOVERY_ONLY] # Set environment variables so GData API can find cacerts.pem os.environ['REQUESTS_CA_BUNDLE'] = GC.Values[GC.CACERTS_PEM] os.environ['DEFAULT_CA_BUNDLE_PATH'] = GC.Values[GC.CACERTS_PEM] os.environ['HTTPLIB2_CA_CERTS'] = GC.Values[GC.CACERTS_PEM] os.environ['SSL_CERT_FILE'] = GC.Values[GC.CACERTS_PEM] httplib2.CA_CERTS = GC.Values[GC.CACERTS_PEM] # Needs to be set so oauthlib doesn't puke when Google changes our scopes os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE'] = 'true' # Set up command logging at top level only if (GM.Globals[GM.PID] == 0) and GC.Values[GC.CMDLOG]: openGAMCommandLog(GM.Globals, 'mainlog') return True # We're done, nothing else to do return False def handleServerError(e): errMsg = str(e) if 'setting tls' not in errMsg: systemErrorExit(NETWORK_ERROR_RC, errMsg) stderrErrorMsg(errMsg) writeStderr(Msg.DISABLE_TLS_MIN_MAX) systemErrorExit(NETWORK_ERROR_RC, None) def getHttpObj(cache=None, timeout=None, override_min_tls=None, override_max_tls=None): tls_minimum_version = override_min_tls if override_min_tls else GC.Values[GC.TLS_MIN_VERSION] if GC.Values[GC.TLS_MIN_VERSION] else None tls_maximum_version = override_max_tls if override_max_tls else GC.Values[GC.TLS_MAX_VERSION] if GC.Values[GC.TLS_MAX_VERSION] else None httpObj = httplib2.Http(cache=cache, timeout=timeout, ca_certs=GC.Values[GC.CACERTS_PEM], disable_ssl_certificate_validation=GC.Values[GC.NO_VERIFY_SSL], tls_maximum_version=tls_maximum_version, tls_minimum_version=tls_minimum_version) httpObj.redirect_codes = set(httpObj.redirect_codes) - {308} return httpObj def _force_user_agent(user_agent): """Creates a decorator which can force a user agent in HTTP headers.""" def decorator(request_method): """Wraps a request method to insert a user-agent in HTTP headers.""" def wrapped_request_method(*args, **kwargs): """Modifies HTTP headers to include a specified user-agent.""" if kwargs.get('headers') is not None: if kwargs['headers'].get('user-agent'): if user_agent not in kwargs['headers']['user-agent']: # Save the existing user-agent header and tack on our own. kwargs['headers']['user-agent'] = f'{user_agent} {kwargs["headers"]["user-agent"]}' else: kwargs['headers']['user-agent'] = user_agent else: kwargs['headers'] = {'user-agent': user_agent} return request_method(*args, **kwargs) return wrapped_request_method return decorator class transportAgentRequest(google_auth_httplib2.Request): """A Request which forces a user agent.""" @_force_user_agent(GAM_USER_AGENT) def __call__(self, *args, **kwargs): #pylint: disable=arguments-differ """Inserts the GAM user-agent header in requests.""" return super().__call__(*args, **kwargs) class transportAuthorizedHttp(google_auth_httplib2.AuthorizedHttp): """An AuthorizedHttp which forces a user agent during requests.""" @_force_user_agent(GAM_USER_AGENT) def request(self, *args, **kwargs): #pylint: disable=arguments-differ """Inserts the GAM user-agent header in requests.""" return super().request(*args, **kwargs) def transportCreateRequest(httpObj=None): """Creates a uniform Request object with a default http, if not provided. Args: httpObj: Optional httplib2.Http compatible object to be used with the request. If not provided, a default HTTP will be used. Returns: Request: A google_auth_httplib2.Request compatible Request. """ if not httpObj: httpObj = getHttpObj() return transportAgentRequest(httpObj) def doGAMCheckForUpdates(forceCheck): def _gamLatestVersionNotAvailable(): if forceCheck: systemErrorExit(NETWORK_ERROR_RC, Msg.GAM_LATEST_VERSION_NOT_AVAILABLE) try: _, c = getHttpObj(timeout=10).request(GAM_LATEST_RELEASE, 'GET', headers={'Accept': 'application/vnd.github.v3.text+json'}) try: release_data = json.loads(c) except (IndexError, KeyError, SyntaxError, TypeError, ValueError): _gamLatestVersionNotAvailable() return if not isinstance(release_data, dict) or 'tag_name' not in release_data: _gamLatestVersionNotAvailable() return current_version = __version__ latest_version = release_data['tag_name'] if latest_version[0].lower() == 'v': latest_version = latest_version[1:] printKeyValueList(['Version Check', None]) Ind.Increment() printKeyValueList(['Current', current_version]) printKeyValueList([' Latest', latest_version]) Ind.Decrement() if forceCheck < 0: setSysExitRC(1 if latest_version > current_version else 0) return except (httplib2.HttpLib2Error, httplib2.ServerNotFoundError, google.auth.exceptions.TransportError, RuntimeError, ConnectionError, OSError) as e: if forceCheck: handleServerError(e) _DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds class signjwtJWTCredentials(google.auth.jwt.Credentials): ''' Class used for DASA ''' def _make_jwt(self): now = datetime.datetime.utcnow() lifetime = datetime.timedelta(seconds=self._token_lifetime) expiry = now + lifetime payload = { "iat": google.auth._helpers.datetime_to_secs(now), "exp": google.auth._helpers.datetime_to_secs(expiry), "iss": self._issuer, "sub": self._subject, } if self._audience: payload["aud"] = self._audience payload.update(self._additional_claims) jwt = self._signer.sign(payload) return jwt, expiry # Some Workforce Identity Federation endpoints such as GitHub Actions # only allow TLS 1.2 as of April 2023. def getTLSv1_2Request(): httpc = getHttpObj(override_min_tls='TLSv1_2') return transportCreateRequest(httpc) class signjwtCredentials(google.oauth2.service_account.Credentials): ''' Class used for DwD ''' def _make_authorization_grant_assertion(self): now = datetime.datetime.utcnow() lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS) expiry = now + lifetime payload = { "iat": google.auth._helpers.datetime_to_secs(now), "exp": google.auth._helpers.datetime_to_secs(expiry), "iss": self._service_account_email, "aud": API.GOOGLE_OAUTH2_TOKEN_ENDPOINT, "scope": google.auth._helpers.scopes_to_string(self._scopes or ()), } payload.update(self._additional_claims) # The subject can be a user email for domain-wide delegation. if self._subject: payload.setdefault("sub", self._subject) token = self._signer(payload) return token class signjwtSignJwt(google.auth.crypt.Signer): ''' Signer class for SignJWT ''' def __init__(self, service_account_info): self.service_account_email = service_account_info['client_email'] self.name = f'projects/-/serviceAccounts/{self.service_account_email}' self._key_id = None @property # type: ignore def key_id(self): return self._key_id def sign(self, message): ''' Call IAM Credentials SignJWT API to get our signed JWT ''' try: credentials, _ = google.auth.default(scopes=[API.IAM_SCOPE], request=getTLSv1_2Request()) except (google.auth.exceptions.DefaultCredentialsError, google.auth.exceptions.RefreshError) as e: systemErrorExit(API_ACCESS_DENIED_RC, str(e)) httpObj = transportAuthorizedHttp(credentials, http=getHttpObj(override_min_tls='TLSv1_2')) iamc = getService(API.IAM_CREDENTIALS, httpObj) response = callGAPI(iamc.projects().serviceAccounts(), 'signJwt', name=self.name, body={'payload': json.dumps(message)}) signed_jwt = response.get('signedJwt') return signed_jwt def handleOAuthTokenError(e, softErrors, displayError=False, i=0, count=0): errMsg = str(e).replace('.', '') if ((errMsg in API.OAUTH2_TOKEN_ERRORS) or errMsg.startswith('Invalid response') or errMsg.startswith('invalid_request: Invalid impersonation "sub" field')): if not GM.Globals[GM.CURRENT_SVCACCT_USER]: ClientAPIAccessDeniedExit() if softErrors: entityActionFailedWarning([Ent.USER, GM.Globals[GM.CURRENT_SVCACCT_USER], Ent.USER, None], errMsg, i, count) return None systemErrorExit(SERVICE_NOT_APPLICABLE_RC, Msg.SERVICE_NOT_APPLICABLE_THIS_ADDRESS.format(GM.Globals[GM.CURRENT_SVCACCT_USER])) if errMsg in API.OAUTH2_UNAUTHORIZED_ERRORS: if not GM.Globals[GM.CURRENT_SVCACCT_USER]: ClientAPIAccessDeniedExit() if softErrors: if displayError: apiOrScopes = API.getAPIName(GM.Globals[GM.CURRENT_SVCACCT_API]) if GM.Globals[GM.CURRENT_SVCACCT_API] else ','.join(sorted(GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES])) userServiceNotEnabledWarning(GM.Globals[GM.CURRENT_SVCACCT_USER], apiOrScopes, i, count) return None SvcAcctAPIAccessDeniedExit() if errMsg in API.REFRESH_PERM_ERRORS: if softErrors: return None if not GM.Globals[GM.CURRENT_SVCACCT_USER]: expiredRevokedOauth2TxtExit() stderrErrorMsg(f'Authentication Token Error - {errMsg}') APIAccessDeniedExit() def getOauth2TxtCredentials(exitOnError=True, api=None, noDASA=False, refreshOnly=False, noScopes=False): if not noDASA and GC.Values[GC.ENABLE_DASA]: jsonData = readFile(GC.Values[GC.OAUTH2SERVICE_JSON], continueOnError=True, displayError=False) if jsonData: try: if api in API.APIS_NEEDING_ACCESS_TOKEN: return (False, getSvcAcctCredentials(API.APIS_NEEDING_ACCESS_TOKEN[api], userEmail=None, forceOauth=True)) jsonDict = json.loads(jsonData) api, _, _ = API.getVersion(api) audience = f'https://{api}.googleapis.com/' key_type = jsonDict.get('key_type', 'default') if key_type == 'default': return (True, JWTCredentials.from_service_account_info(jsonDict, audience=audience)) if key_type == 'yubikey': yksigner = yubikey.YubiKey(jsonDict) return (True, JWTCredentials._from_signer_and_info(yksigner, jsonDict, audience=audience)) if key_type == 'signjwt': sjsigner = signjwtSignJwt(jsonDict) return (True, signjwtJWTCredentials._from_signer_and_info(sjsigner, jsonDict, audience=audience)) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: invalidOauth2serviceJsonExit(str(e)) invalidOauth2serviceJsonExit(Msg.NO_DATA) jsonData = readFile(GC.Values[GC.OAUTH2_TXT], continueOnError=True, displayError=False) if jsonData: try: jsonDict = json.loads(jsonData) if noScopes: jsonDict['scopes'] = [] if 'client_id' in jsonDict: if not refreshOnly: if set(jsonDict.get('scopes', API.REQUIRED_SCOPES)) == API.REQUIRED_SCOPES_SET: if exitOnError: systemErrorExit(OAUTH2_TXT_REQUIRED_RC, Msg.NO_CLIENT_ACCESS_ALLOWED) return (False, None) else: GM.Globals[GM.CREDENTIALS_SCOPES] = set(jsonDict.pop('scopes', API.REQUIRED_SCOPES)) token_expiry = jsonDict.get('token_expiry', REFRESH_EXPIRY) if GC.Values[GC.TRUNCATE_CLIENT_ID]: # chop off .apps.googleusercontent.com suffix as it's not needed and we need to keep things short for the Auth URL. jsonDict['client_id'] = re.sub(r'\.apps\.googleusercontent\.com$', '', jsonDict['client_id']) creds = google.oauth2.credentials.Credentials.from_authorized_user_info(jsonDict) if 'id_token_jwt' not in jsonDict: creds.token = jsonDict['token'] creds._id_token = jsonDict['id_token'] GM.Globals[GM.DECODED_ID_TOKEN] = jsonDict['decoded_id_token'] else: creds.token = jsonDict['access_token'] creds._id_token = jsonDict['id_token_jwt'] GM.Globals[GM.DECODED_ID_TOKEN] = jsonDict['id_token'] creds.expiry = datetime.datetime.strptime(token_expiry, YYYYMMDDTHHMMSSZ_FORMAT) return (not noScopes, creds) if jsonDict and exitOnError: invalidOauth2TxtExit(Msg.INVALID) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: if exitOnError: invalidOauth2TxtExit(str(e)) if exitOnError: systemErrorExit(OAUTH2_TXT_REQUIRED_RC, Msg.NO_CLIENT_ACCESS_ALLOWED) return (False, None) def _getValueFromOAuth(field, credentials=None): if not GM.Globals[GM.DECODED_ID_TOKEN]: request = transportCreateRequest() if credentials is None: credentials = getClientCredentials(refreshOnly=True) elif credentials.expired: credentials.refresh(request) try: GM.Globals[GM.DECODED_ID_TOKEN] = google.oauth2.id_token.verify_oauth2_token(credentials.id_token, request, clock_skew_in_seconds=GC.Values[GC.CLOCK_SKEW_IN_SECONDS]) except ValueError as e: if 'Token used too early' in str(e): stderrErrorMsg(Msg.PLEASE_CORRECT_YOUR_SYSTEM_TIME) systemErrorExit(SYSTEM_ERROR_RC, str(e)) return GM.Globals[GM.DECODED_ID_TOKEN].get(field, UNKNOWN) def _getAdminEmail(): if GC.Values[GC.ADMIN_EMAIL]: return GC.Values[GC.ADMIN_EMAIL] return _getValueFromOAuth('email') def writeClientCredentials(creds, filename): creds_data = { 'client_id': creds.client_id, 'client_secret': creds.client_secret, 'id_token': creds.id_token, 'refresh_token': creds.refresh_token, 'scopes': sorted(creds.scopes or GM.Globals[GM.CREDENTIALS_SCOPES]), 'token': creds.token, 'token_expiry': creds.expiry.strftime(YYYYMMDDTHHMMSSZ_FORMAT), 'token_uri': creds.token_uri, } expected_iss = ['https://accounts.google.com', 'accounts.google.com'] if _getValueFromOAuth('iss', creds) not in expected_iss: systemErrorExit(OAUTH2_TXT_REQUIRED_RC, f'Wrong OAuth 2.0 credentials issuer. Got {_getValueFromOAuth("iss", creds)} expected one of {", ".join(expected_iss)}') request = transportCreateRequest() try: creds_data['decoded_id_token'] = google.oauth2.id_token.verify_oauth2_token(creds.id_token, request, clock_skew_in_seconds=GC.Values[GC.CLOCK_SKEW_IN_SECONDS]) except ValueError as e: if 'Token used too early' in str(e): stderrErrorMsg(Msg.PLEASE_CORRECT_YOUR_SYSTEM_TIME) systemErrorExit(SYSTEM_ERROR_RC, str(e)) GM.Globals[GM.DECODED_ID_TOKEN] = creds_data['decoded_id_token'] if filename != '-': writeFile(filename, json.dumps(creds_data, indent=2, sort_keys=True)+'\n') else: writeStdout(json.dumps(creds_data, ensure_ascii=False, sort_keys=True, indent=2)+'\n') URL_SHORTENER_ENDPOINT = 'https://gam-shortn.appspot.com/create' def shortenURL(long_url): if GC.Values[GC.NO_SHORT_URLS]: return long_url httpObj = getHttpObj(timeout=10) try: payload = json.dumps({'long_url': long_url}) resp, content = httpObj.request(URL_SHORTENER_ENDPOINT, 'POST', payload, headers={'Content-Type': 'application/json', 'User-Agent': GAM_USER_AGENT}) except: return long_url if resp.status != 200: return long_url try: if isinstance(content, bytes): content = content.decode() return json.loads(content).get('short_url', long_url) except: return long_url def runSqliteQuery(db_file, query): conn = sqlite3.connect(db_file) curr = conn.cursor() curr.execute(query) return curr.fetchone()[0] def refreshCredentialsWithReauth(credentials): def gcloudError(): writeStderr(f'Failed to run gcloud as {admin_email}. Please make sure it\'s setup') e = Msg.REAUTHENTICATION_IS_NEEDED handleOAuthTokenError(e, False) writeStderr(Msg.CALLING_GCLOUD_FOR_REAUTH) if 'termios' in sys.modules: old_settings = termios.tcgetattr(sys.stdin) admin_email = _getAdminEmail() # First makes sure gcloud has a valid access token and thus # should also have a valid RAPT token try: devnull = open(os.devnull, 'w', encoding=UTF8) subprocess.run(['gcloud', 'auth', 'print-identity-token', '--no-user-output-enabled'], stderr=devnull, check=False) devnull.close() # now determine gcloud's config path and token file gcloud_path_result = subprocess.run(['gcloud', 'info', '--format=value(config.paths.global_config_dir)'], capture_output=True, check=False) except KeyboardInterrupt as e: # avoids loss of terminal echo on *nix if 'termios' in sys.modules: termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings) printBlankLine() raise KeyboardInterrupt from e token_path = gcloud_path_result.stdout.decode().strip() if not token_path: gcloudError() token_file = f'{token_path}/access_tokens.db' try: credentials._rapt_token = runSqliteQuery(token_file, f'SELECT rapt_token FROM access_tokens WHERE account_id = "{admin_email}"') except TypeError: gcloudError() if not credentials._rapt_token: systemErrorExit(SYSTEM_ERROR_RC, 'Failed to retrieve reauth token from gcloud. You may need to wait until gcloud is also prompted for reauth.') def getClientCredentials(forceRefresh=False, forceWrite=False, filename=None, api=None, noDASA=False, refreshOnly=False, noScopes=False): """Gets OAuth2 credentials which are guaranteed to be fresh and valid. Locks during read and possible write so that only one process will attempt refresh/write when running in parallel. """ lock = FileLock(GM.Globals[GM.OAUTH2_TXT_LOCK]) with lock: writeCreds, credentials = getOauth2TxtCredentials(api=api, noDASA=noDASA, refreshOnly=refreshOnly, noScopes=noScopes) if not credentials: invalidOauth2TxtExit('') if credentials.expired or forceRefresh: triesLimit = 3 for n in range(1, triesLimit+1): try: credentials.refresh(transportCreateRequest()) if writeCreds or forceWrite: writeClientCredentials(credentials, filename or GC.Values[GC.OAUTH2_TXT]) break except (httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: if n != triesLimit: waitOnFailure(n, triesLimit, NETWORK_ERROR_RC, str(e)) continue handleServerError(e) except google.auth.exceptions.RefreshError as e: if isinstance(e.args, tuple): e = e.args[0] if 'Reauthentication is needed' in str(e): if GC.Values[GC.ENABLE_GCLOUD_REAUTH]: refreshCredentialsWithReauth(credentials) continue e = Msg.REAUTHENTICATION_IS_NEEDED handleOAuthTokenError(e, False) return credentials def waitOnFailure(n, triesLimit, error_code, error_message): delta = min(2 ** n, 60)+float(random.randint(1, 1000))/1000 if n > 3: writeStderr(f'Temporary error: {error_code} - {error_message}, Backing off: {int(delta)} seconds, Retry: {n}/{triesLimit}\n') flushStderr() time.sleep(delta) if GC.Values[GC.SHOW_API_CALLS_RETRY_DATA]: incrAPICallsRetryData(error_message, delta) def clearServiceCache(service): if hasattr(service._http, 'http') and hasattr(service._http.http, 'cache'): if service._http.http.cache is None: return False service._http.http.cache = None return True if hasattr(service._http, 'cache'): if service._http.cache is None: return False service._http.cache = None return True return False DISCOVERY_URIS = [googleapiclient.discovery.V1_DISCOVERY_URI, googleapiclient.discovery.V2_DISCOVERY_URI] # Used for API.CLOUDRESOURCEMANAGER, API.SERVICEUSAGE, API.IAM def getAPIService(api, httpObj): api, version, v2discovery = API.getVersion(api) return googleapiclient.discovery.build(api, version, http=httpObj, cache_discovery=False, discoveryServiceUrl=DISCOVERY_URIS[v2discovery], static_discovery=False) def getService(api, httpObj): ### Drive v3beta # mapDriveURL = api == API.DRIVE3 and GC.Values[GC.DRIVE_V3_BETA] hasLocalJSON = API.hasLocalJSON(api) api, version, v2discovery = API.getVersion(api) if api in GM.Globals[GM.CURRENT_API_SERVICES] and version in GM.Globals[GM.CURRENT_API_SERVICES][api]: service = googleapiclient.discovery.build_from_document(GM.Globals[GM.CURRENT_API_SERVICES][api][version], http=httpObj) if GM.Globals[GM.CACHE_DISCOVERY_ONLY]: clearServiceCache(service) return service if not hasLocalJSON: triesLimit = 3 for n in range(1, triesLimit+1): try: service = googleapiclient.discovery.build(api, version, http=httpObj, cache_discovery=False, discoveryServiceUrl=DISCOVERY_URIS[v2discovery], static_discovery=False) GM.Globals[GM.CURRENT_API_SERVICES].setdefault(api, {}) GM.Globals[GM.CURRENT_API_SERVICES][api][version] = service._rootDesc.copy() ### Drive v3beta # if mapDriveURL: # setattr(service, '_baseUrl', getattr(service, '_baseUrl').replace('/v3/', '/v3beta/')) if GM.Globals[GM.CACHE_DISCOVERY_ONLY]: clearServiceCache(service) return service except googleapiclient.errors.UnknownApiNameOrVersion as e: systemErrorExit(GOOGLE_API_ERROR_RC, Msg.UNKNOWN_API_OR_VERSION.format(str(e), __author__)) except (googleapiclient.errors.InvalidJsonError, KeyError, ValueError) as e: if n != triesLimit: waitOnFailure(n, triesLimit, INVALID_JSON_RC, str(e)) continue systemErrorExit(INVALID_JSON_RC, str(e)) except (http_client.ResponseNotReady, OSError, googleapiclient.errors.HttpError) as e: errMsg = f'Connection error: {str(e) or repr(e)}' if n != triesLimit: waitOnFailure(n, triesLimit, SOCKET_ERROR_RC, errMsg) continue systemErrorExit(SOCKET_ERROR_RC, errMsg) except (httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: if n != triesLimit: httpObj.connections = {} waitOnFailure(n, triesLimit, NETWORK_ERROR_RC, str(e)) continue handleServerError(e) disc_file, discovery = readDiscoveryFile(f'{api}-{version}') try: service = googleapiclient.discovery.build_from_document(discovery, http=httpObj) GM.Globals[GM.CURRENT_API_SERVICES].setdefault(api, {}) GM.Globals[GM.CURRENT_API_SERVICES][api][version] = service._rootDesc.copy() if GM.Globals[GM.CACHE_DISCOVERY_ONLY]: clearServiceCache(service) return service except (googleapiclient.errors.InvalidJsonError, KeyError, ValueError) as e: invalidDiscoveryJsonExit(disc_file, str(e)) except IOError as e: systemErrorExit(FILE_ERROR_RC, str(e)) def defaultSvcAcctScopes(): scopesList = API.getSvcAcctScopesList(GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY], False) saScopes = {} for scope in scopesList: saScopes.setdefault(scope['api'], []) saScopes[scope['api']].append(scope['scope']) saScopes[API.DRIVEACTIVITY].append(API.DRIVE_SCOPE) saScopes[API.DRIVE2] = saScopes[API.DRIVE3] saScopes[API.DRIVETD] = saScopes[API.DRIVE3] saScopes[API.SHEETSTD] = saScopes[API.SHEETS] return saScopes def _getSvcAcctData(): if not GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]: jsonData = readFile(GC.Values[GC.OAUTH2SERVICE_JSON], continueOnError=True, displayError=True) if not jsonData: invalidOauth2serviceJsonExit(Msg.NO_DATA) try: GM.Globals[GM.OAUTH2SERVICE_JSON_DATA] = json.loads(jsonData) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: invalidOauth2serviceJsonExit(str(e)) if not GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]: systemErrorExit(OAUTH2SERVICE_JSON_REQUIRED_RC, Msg.NO_SVCACCT_ACCESS_ALLOWED) requiredFields = ['client_email', 'client_id', 'project_id', 'token_uri'] key_type = GM.Globals[GM.OAUTH2SERVICE_JSON_DATA].get('key_type', 'default') if key_type == 'default': requiredFields.extend(['private_key', 'private_key_id']) missingFields = [] for field in requiredFields: if field not in GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]: missingFields.append(field) if missingFields: invalidOauth2serviceJsonExit(Msg.MISSING_FIELDS.format(','.join(missingFields))) # Some old oauth2service.json files have: 'https://accounts.google.com/o/oauth2/auth' which no longer works if GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['token_uri'] == 'https://accounts.google.com/o/oauth2/auth': GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['token_uri'] = API.GOOGLE_OAUTH2_TOKEN_ENDPOINT if API.OAUTH2SA_SCOPES not in GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]: GM.Globals[GM.SVCACCT_SCOPES_DEFINED] = False GM.Globals[GM.SVCACCT_SCOPES] = defaultSvcAcctScopes() else: GM.Globals[GM.SVCACCT_SCOPES_DEFINED] = True GM.Globals[GM.SVCACCT_SCOPES] = GM.Globals[GM.OAUTH2SERVICE_JSON_DATA].pop(API.OAUTH2SA_SCOPES) def getSvcAcctCredentials(scopesOrAPI, userEmail, softErrors=False, forceOauth=False): _getSvcAcctData() if isinstance(scopesOrAPI, str): GM.Globals[GM.CURRENT_SVCACCT_API] = scopesOrAPI if scopesOrAPI not in API.JWT_APIS: GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES] = GM.Globals[GM.SVCACCT_SCOPES].get(scopesOrAPI, []) else: GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES] = API.JWT_APIS[scopesOrAPI] if scopesOrAPI != API.CHAT_EVENTS and not GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES]: if softErrors: return None SvcAcctAPIAccessDeniedExit() if scopesOrAPI in {API.PEOPLE, API.PEOPLE_DIRECTORY, API.PEOPLE_OTHERCONTACTS}: GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES].append(API.USERINFO_PROFILE_SCOPE) if scopesOrAPI in {API.PEOPLE_OTHERCONTACTS}: GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES].append(API.PEOPLE_SCOPE) elif scopesOrAPI == API.CHAT_EVENTS: for chatAPI in [API.CHAT_SPACES, API.CHAT_MEMBERSHIPS, API.CHAT_MESSAGES]: GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES].extend(GM.Globals[GM.SVCACCT_SCOPES].get(chatAPI, [])) else: GM.Globals[GM.CURRENT_SVCACCT_API] = '' GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES] = scopesOrAPI key_type = GM.Globals[GM.OAUTH2SERVICE_JSON_DATA].get('key_type', 'default') if not GM.Globals[GM.CURRENT_SVCACCT_API] or scopesOrAPI not in API.JWT_APIS or forceOauth: try: if key_type == 'default': credentials = google.oauth2.service_account.Credentials.from_service_account_info(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]) elif key_type == 'yubikey': yksigner = yubikey.YubiKey(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]) credentials = google.oauth2.service_account.Credentials._from_signer_and_info(yksigner, GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]) elif key_type == 'signjwt': sjsigner = signjwtSignJwt(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]) credentials = signjwtCredentials._from_signer_and_info(sjsigner.sign, GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]) except (ValueError, IndexError, KeyError) as e: if softErrors: return None invalidOauth2serviceJsonExit(str(e)) credentials = credentials.with_scopes(GM.Globals[GM.CURRENT_SVCACCT_API_SCOPES]) else: audience = f'https://{scopesOrAPI}.googleapis.com/' try: if key_type == 'default': credentials = JWTCredentials.from_service_account_info(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA], audience=audience) elif key_type == 'yubikey': yksigner = yubikey.YubiKey(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]) credentials = JWTCredentials._from_signer_and_info(yksigner, GM.Globals[GM.OAUTH2SERVICE_JSON_DATA], audience=audience) elif key_type == 'signjwt': sjsigner = signjwtSignJwt(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]) credentials = signjwtJWTCredentials._from_signer_and_info(sjsigner, GM.Globals[GM.OAUTH2SERVICE_JSON_DATA], audience=audience) credentials.project_id = GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['project_id'] except (ValueError, IndexError, KeyError) as e: if softErrors: return None invalidOauth2serviceJsonExit(str(e)) GM.Globals[GM.CURRENT_SVCACCT_USER] = userEmail if userEmail: credentials = credentials.with_subject(userEmail) GM.Globals[GM.ADMIN] = GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['client_email'] GM.Globals[GM.OAUTH2SERVICE_CLIENT_ID] = GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['client_id'] return credentials def getGDataOAuthToken(gdataObj, credentials=None): if not credentials: credentials = getClientCredentials(refreshOnly=True) try: credentials.refresh(transportCreateRequest()) except (httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: handleServerError(e) except google.auth.exceptions.RefreshError as e: if isinstance(e.args, tuple): e = e.args[0] handleOAuthTokenError(e, False) gdataObj.additional_headers['Authorization'] = f'Bearer {credentials.token}' if not GC.Values[GC.DOMAIN]: GC.Values[GC.DOMAIN] = GM.Globals[GM.DECODED_ID_TOKEN].get('hd', 'UNKNOWN').lower() if not GC.Values[GC.CUSTOMER_ID]: GC.Values[GC.CUSTOMER_ID] = GC.MY_CUSTOMER GM.Globals[GM.ADMIN] = GM.Globals[GM.DECODED_ID_TOKEN].get('email', 'UNKNOWN').lower() GM.Globals[GM.OAUTH2_CLIENT_ID] = credentials.client_id gdataObj.domain = GC.Values[GC.DOMAIN] gdataObj.source = GAM_USER_AGENT return True def checkGDataError(e, service): error = e.args reason = error[0].get('reason', '') body = error[0].get('body', '').decode(UTF8) # First check for errors that need special handling if reason in ['Token invalid - Invalid token: Stateless token expired', 'Token invalid - Invalid token: Token not found', 'gone']: keep_domain = service.domain getGDataOAuthToken(service) service.domain = keep_domain return (GDATA.TOKEN_EXPIRED, reason) error_code = getattr(e, 'error_code', 600) if GC.Values[GC.DEBUG_LEVEL] > 0: writeStdout(f'{ERROR_PREFIX} {error_code}: {reason}, {body}\n') if error_code == 600: if (body.startswith('Quota exceeded for the current request') or body.startswith('Quota exceeded for quota metric') or body.startswith('Request rate higher than configured')): return (GDATA.QUOTA_EXCEEDED, body) if (body.startswith('Photo delete failed') or body.startswith('Upload photo failed') or body.startswith('Photo query failed')): return (GDATA.NOT_FOUND, body) if body.startswith(GDATA.API_DEPRECATED_MSG): return (GDATA.API_DEPRECATED, body) if reason == 'Too Many Requests': return (GDATA.QUOTA_EXCEEDED, reason) if reason == 'Bad Gateway': return (GDATA.BAD_GATEWAY, reason) if reason == 'Gateway Timeout': return (GDATA.GATEWAY_TIMEOUT, reason) if reason == 'Service Unavailable': return (GDATA.SERVICE_UNAVAILABLE, reason) if reason == 'Service disabled by G Suite admin.': return (GDATA.FORBIDDEN, reason) if reason == 'Internal Server Error': return (GDATA.INTERNAL_SERVER_ERROR, reason) if reason == 'Token invalid - Invalid token: Token disabled, revoked, or expired.': return (GDATA.TOKEN_INVALID, 'Token disabled, revoked, or expired. Please delete and re-create oauth.txt') if reason == 'Token invalid - AuthSub token has wrong scope': return (GDATA.INSUFFICIENT_PERMISSIONS, reason) if reason.startswith('Only administrators can request entries belonging to'): return (GDATA.INSUFFICIENT_PERMISSIONS, reason) if reason == 'You are not authorized to access this API': return (GDATA.INSUFFICIENT_PERMISSIONS, reason) if reason == 'Invalid domain.': return (GDATA.INVALID_DOMAIN, reason) if reason.startswith('You are not authorized to perform operations on the domain'): return (GDATA.INVALID_DOMAIN, reason) if reason == 'Bad Request': if 'already exists' in body: return (GDATA.ENTITY_EXISTS, Msg.DUPLICATE) return (GDATA.BAD_REQUEST, body) if reason == 'Forbidden': return (GDATA.FORBIDDEN, body) if reason == 'Not Found': return (GDATA.NOT_FOUND, Msg.DOES_NOT_EXIST) if reason == 'Not Implemented': return (GDATA.NOT_IMPLEMENTED, body) if reason == 'Precondition Failed': return (GDATA.PRECONDITION_FAILED, reason) elif error_code == 602: if body.startswith(GDATA.API_DEPRECATED_MSG): return (GDATA.API_DEPRECATED, body) if reason == 'Bad Request': return (GDATA.BAD_REQUEST, body) elif error_code == 610: if reason == 'Service disabled by G Suite admin.': return (GDATA.FORBIDDEN, reason) # We got a "normal" error, define the mapping below error_code_map = { 1000: reason, 1001: reason, 1002: 'Unauthorized and forbidden', 1100: 'User deleted recently', 1200: 'Domain user limit exceeded', 1201: 'Domain alias limit exceeded', 1202: 'Domain suspended', 1203: 'Domain feature unavailable', 1300: f'Entity {getattr(e, "invalidInput", "")} exists', 1301: f'Entity {getattr(e, "invalidInput", "")} Does Not Exist', 1302: 'Entity Name Is Reserved', 1303: f'Entity {getattr(e, "invalidInput", "")} name not valid', 1306: f'{getattr(e, "invalidInput", "")} has members. Cannot delete.', 1317: f'Invalid input {getattr(e, "invalidInput", "")}, reason {getattr(e, "reason", "")}', 1400: 'Invalid Given Name', 1401: 'Invalid Family Name', 1402: 'Invalid Password', 1403: 'Invalid Username', 1404: 'Invalid Hash Function Name', 1405: 'Invalid Hash Digest Length', 1406: 'Invalid Email Address', 1407: 'Invalid Query Parameter Value', 1408: 'Invalid SSO Signing Key', 1409: 'Invalid Encryption Public Key', 1410: 'Feature Unavailable For User', 1411: 'Invalid Encryption Public Key Format', 1500: 'Too Many Recipients On Email List', 1501: 'Too Many Aliases For User', 1502: 'Too Many Delegates For User', 1601: 'Duplicate Destinations', 1602: 'Too Many Destinations', 1603: 'Invalid Route Address', 1700: 'Group Cannot Contain Cycle', 1800: 'Group Cannot Contain Cycle', 1801: f'Invalid value {getattr(e, "invalidInput", "")}', } return (error_code, error_code_map.get(error_code, f'Unknown Error: {str(e)}')) def callGData(service, function, bailOnInternalServerError=False, softErrors=False, throwErrors=None, retryErrors=None, triesLimit=0, **kwargs): if throwErrors is None: throwErrors = [] if retryErrors is None: retryErrors = [] if triesLimit == 0: triesLimit = GC.Values[GC.API_CALLS_TRIES_LIMIT] allRetryErrors = GDATA.NON_TERMINATING_ERRORS+retryErrors method = getattr(service, function) if GC.Values[GC.API_CALLS_RATE_CHECK]: checkAPICallsRate() for n in range(1, triesLimit+1): try: return method(**kwargs) except (gdata.service.RequestError, gdata.apps.service.AppsForYourDomainException) as e: error_code, error_message = checkGDataError(e, service) if (n != triesLimit) and (error_code in allRetryErrors): if (error_code == GDATA.INTERNAL_SERVER_ERROR and bailOnInternalServerError and n == GC.Values[GC.BAIL_ON_INTERNAL_ERROR_TRIES]): raise GDATA.ERROR_CODE_EXCEPTION_MAP[error_code](error_message) waitOnFailure(n, triesLimit, error_code, error_message) continue if error_code in throwErrors: if error_code in GDATA.ERROR_CODE_EXCEPTION_MAP: raise GDATA.ERROR_CODE_EXCEPTION_MAP[error_code](error_message) raise if softErrors: stderrErrorMsg(f'{error_code} - {error_message}{["", ": Giving up."][n > 1]}') return None if error_code == GDATA.INSUFFICIENT_PERMISSIONS: APIAccessDeniedExit() systemErrorExit(GOOGLE_API_ERROR_RC, f'{error_code} - {error_message}') except (httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: if n != triesLimit: waitOnFailure(n, triesLimit, NETWORK_ERROR_RC, str(e)) continue handleServerError(e) except google.auth.exceptions.RefreshError as e: if isinstance(e.args, tuple): e = e.args[0] handleOAuthTokenError(e, GDATA.SERVICE_NOT_APPLICABLE in throwErrors) raise GDATA.ERROR_CODE_EXCEPTION_MAP[GDATA.SERVICE_NOT_APPLICABLE](str(e)) except (http_client.ResponseNotReady, OSError) as e: errMsg = f'Connection error: {str(e) or repr(e)}' if n != triesLimit: waitOnFailure(n, triesLimit, SOCKET_ERROR_RC, errMsg) continue if softErrors: writeStderr(f'\n{ERROR_PREFIX}{errMsg} - Giving up.\n') return None systemErrorExit(SOCKET_ERROR_RC, errMsg) def writeGotMessage(msg): if GC.Values[GC.SHOW_GETTINGS_GOT_NL]: writeStderr(msg) else: writeStderr('\r') msgLen = len(msg) if msgLen < GM.Globals[GM.LAST_GOT_MSG_LEN]: writeStderr(msg+' '*(GM.Globals[GM.LAST_GOT_MSG_LEN]-msgLen)) else: writeStderr(msg) GM.Globals[GM.LAST_GOT_MSG_LEN] = msgLen flushStderr() def callGDataPages(service, function, pageMessage=None, softErrors=False, throwErrors=None, retryErrors=None, uri=None, **kwargs): if throwErrors is None: throwErrors = [] if retryErrors is None: retryErrors = [] nextLink = None allResults = [] totalItems = 0 while True: this_page = callGData(service, function, softErrors=softErrors, throwErrors=throwErrors, retryErrors=retryErrors, uri=uri, **kwargs) if this_page: nextLink = this_page.GetNextLink() pageItems = len(this_page.entry) if pageItems == 0: nextLink = None totalItems += pageItems allResults.extend(this_page.entry) else: nextLink = None pageItems = 0 if pageMessage: show_message = pageMessage.replace(TOTAL_ITEMS_MARKER, str(totalItems)) writeGotMessage(show_message.format(Ent.ChooseGetting(totalItems))) if nextLink is None: if pageMessage and (pageMessage[-1] != '\n'): writeStderr('\r\n') flushStderr() return allResults uri = nextLink.href if 'url_params' in kwargs: kwargs['url_params'].pop('start-index', None) def checkGAPIError(e, softErrors=False, retryOnHttpError=False, mapNotFound=True): def makeErrorDict(code, reason, message): return {'error': {'code': code, 'errors': [{'reason': reason, 'message': message}]}} try: error = json.loads(e.content.decode(UTF8)) if GC.Values[GC.DEBUG_LEVEL] > 0: writeStdout(f'{ERROR_PREFIX} JSON: {str(error)}\n') except (IndexError, KeyError, SyntaxError, TypeError, ValueError): eContent = e.content.decode(UTF8) if isinstance(e.content, bytes) else e.content lContent = eContent.lower() if GC.Values[GC.DEBUG_LEVEL] > 0: writeStdout(f'{ERROR_PREFIX} HTTP: {str(eContent)}\n') if eContent[0:15] != '': if (e.resp['status'] == '403') and (lContent.startswith('request rate higher than configured')): return (e.resp['status'], GAPI.QUOTA_EXCEEDED, eContent) if (e.resp['status'] == '429') and (lContent.startswith('quota exceeded for quota metric')): return (e.resp['status'], GAPI.QUOTA_EXCEEDED, eContent) if (e.resp['status'] == '502') and ('bad gateway' in lContent): return (e.resp['status'], GAPI.BAD_GATEWAY, eContent) if (e.resp['status'] == '503') and (lContent.startswith('quota exceeded for the current request')): return (e.resp['status'], GAPI.QUOTA_EXCEEDED, eContent) if (e.resp['status'] == '504') and ('gateway timeout' in lContent): return (e.resp['status'], GAPI.GATEWAY_TIMEOUT, eContent) else: tg = HTML_TITLE_PATTERN.match(lContent) lContent = tg.group(1) if tg else 'bad request' if (e.resp['status'] == '403') and ('invalid domain.' in lContent): error = makeErrorDict(403, GAPI.NOT_FOUND, 'Domain not found') elif (e.resp['status'] == '403') and ('domain cannot use apis.' in lContent): error = makeErrorDict(403, GAPI.DOMAIN_CANNOT_USE_APIS, 'Domain cannot use apis') elif (e.resp['status'] == '400') and ('invalidssosigningkey' in lContent): error = makeErrorDict(400, GAPI.INVALID, 'InvalidSsoSigningKey') elif (e.resp['status'] == '400') and ('unknownerror' in lContent): error = makeErrorDict(400, GAPI.INVALID, 'UnknownError') elif (e.resp['status'] == '400') and ('featureunavailableforuser' in lContent): error = makeErrorDict(400, GAPI.SERVICE_NOT_AVAILABLE, 'Feature Unavailable For User') elif (e.resp['status'] == '400') and ('entitydoesnotexist' in lContent): error = makeErrorDict(400, GAPI.NOT_FOUND, 'Entity Does Not Exist') elif (e.resp['status'] == '400') and ('entitynamenotvalid' in lContent): error = makeErrorDict(400, GAPI.INVALID_INPUT, 'Entity Name Not Valid') elif (e.resp['status'] == '400') and ('failed to parse Content-Range header' in lContent): error = makeErrorDict(400, GAPI.BAD_REQUEST, 'Failed to parse Content-Range header') elif (e.resp['status'] == '400') and ('request contains an invalid argument' in lContent): error = makeErrorDict(400, GAPI.INVALID_ARGUMENT, 'Request contains an invalid argument') elif (e.resp['status'] == '404') and ('not found' in lContent): error = makeErrorDict(404, GAPI.NOT_FOUND, lContent) elif (e.resp['status'] == '404') and ('bad request' in lContent): error = makeErrorDict(404, GAPI.BAD_REQUEST, lContent) elif retryOnHttpError: return (-1, None, eContent) elif softErrors: stderrErrorMsg(eContent) return (0, None, None) else: systemErrorExit(HTTP_ERROR_RC, eContent) if 'error' in error: http_status = error['error']['code'] reason = '' if 'errors' in error['error'] and 'message' in error['error']['errors'][0]: message = error['error']['errors'][0]['message'] if 'reason' in error['error']['errors'][0]: reason = error['error']['errors'][0]['reason'] elif 'errors' in error['error'] and 'Unknown Error' in error['error']['message'] and 'reason' in error['error']['errors'][0]: message = error['error']['errors'][0]['reason'] else: message = error['error']['message'] status = error['error'].get('status', '') lmessage = message.lower() if message is not None else '' if http_status == 500: if not lmessage or status == 'UNKNOWN': if not lmessage: message = Msg.UNKNOWN error = makeErrorDict(http_status, GAPI.UNKNOWN_ERROR, message) elif 'backend error' in lmessage: error = makeErrorDict(http_status, GAPI.BACKEND_ERROR, message) elif 'internal error encountered' in lmessage: error = makeErrorDict(http_status, GAPI.INTERNAL_ERROR, message) elif 'role assignment exists: roleassignment' in lmessage: error = makeErrorDict(http_status, GAPI.DUPLICATE, message) elif 'role assignment exists: roleid' in lmessage: error = makeErrorDict(http_status, GAPI.DUPLICATE, message) elif 'operation not supported' in lmessage: error = makeErrorDict(http_status, GAPI.OPERATION_NOT_SUPPORTED, message) elif 'failed status in update settings response' in lmessage: error = makeErrorDict(http_status, GAPI.INVALID_INPUT, message) elif 'cannot delete a field in use.resource.fields' in lmessage: error = makeErrorDict(http_status, GAPI.FIELD_IN_USE, message) elif status == 'INTERNAL': error = makeErrorDict(http_status, GAPI.INTERNAL_ERROR, message) elif http_status == 502: if 'bad gateway' in lmessage: error = makeErrorDict(http_status, GAPI.BAD_GATEWAY, message) elif http_status == 503: if message.startswith('quota exceeded for the current request'): error = makeErrorDict(http_status, GAPI.QUOTA_EXCEEDED, message) elif status == 'UNAVAILABLE' or 'the service is currently unavailable' in lmessage: error = makeErrorDict(http_status, GAPI.SERVICE_NOT_AVAILABLE, message) elif http_status == 504: if 'gateway timeout' in lmessage: error = makeErrorDict(http_status, GAPI.GATEWAY_TIMEOUT, message) elif http_status == 400: if '@attachmentnotvisible' in lmessage: error = makeErrorDict(http_status, GAPI.BAD_REQUEST, message) elif status == 'INVALID_ARGUMENT': error = makeErrorDict(http_status, GAPI.INVALID_ARGUMENT, message) elif status == 'FAILED_PRECONDITION' or 'precondition check failed' in lmessage: error = makeErrorDict(http_status, GAPI.FAILED_PRECONDITION, message) elif 'does not match' in lmessage or 'invalid' in lmessage: error = makeErrorDict(http_status, GAPI.INVALID, message) elif http_status == 401: if 'active session is invalid' in lmessage and reason == 'authError': # message += ' Drive SDK API access disabled' # message = Msg.SERVICE_NOT_ENABLED.format('Drive') error = makeErrorDict(http_status, GAPI.AUTH_ERROR, message) elif status == 'PERMISSION_DENIED': error = makeErrorDict(http_status, GAPI.PERMISSION_DENIED, message) elif status == 'UNAUTHENTICATED': error = makeErrorDict(http_status, GAPI.AUTH_ERROR, message) elif http_status == 403: if 'quota exceeded for quota metric' in lmessage: error = makeErrorDict(http_status, GAPI.QUOTA_EXCEEDED, message) elif 'the authenticated user cannot access this service' in lmessage: error = makeErrorDict(http_status, GAPI.SERVICE_NOT_AVAILABLE, message) elif status == 'PERMISSION_DENIED' or 'the caller does not have permission' in lmessage or 'permission iam.serviceaccountkeys' in lmessage: error = makeErrorDict(http_status, GAPI.PERMISSION_DENIED, message) elif http_status == 404: if status == 'NOT_FOUND' or 'requested entity was not found' in lmessage or 'does not exist' in lmessage: error = makeErrorDict(http_status, GAPI.NOT_FOUND, message) elif http_status == 409: if status == 'ALREADY_EXISTS' or 'requested entity already exists' in lmessage: error = makeErrorDict(http_status, GAPI.ALREADY_EXISTS, message) elif status == 'ABORTED' or 'the operation was aborted' in lmessage: error = makeErrorDict(http_status, GAPI.ABORTED, message) elif http_status == 412: if 'insufficient archived user licenses' in lmessage: error = makeErrorDict(http_status, GAPI.INSUFFICIENT_ARCHIVED_USER_LICENSES, message) elif http_status == 413: if 'request too large' in lmessage: error = makeErrorDict(http_status, GAPI.UPLOAD_TOO_LARGE, message) elif http_status == 429: if status == 'RESOURCE_EXHAUSTED' or 'quota exceeded' in lmessage or 'insufficient quota' in lmessage: error = makeErrorDict(http_status, GAPI.QUOTA_EXCEEDED, message) else: if 'error_description' in error: if error['error_description'] == 'Invalid Value': message = error['error_description'] http_status = 400 error = makeErrorDict(http_status, GAPI.INVALID, message) else: systemErrorExit(GOOGLE_API_ERROR_RC, str(error)) else: systemErrorExit(GOOGLE_API_ERROR_RC, str(error)) try: reason = error['error']['errors'][0]['reason'] for messageItem in GAPI.REASON_MESSAGE_MAP.get(reason, []): if messageItem[0] in message: if reason in [GAPI.NOT_FOUND, GAPI.RESOURCE_NOT_FOUND] and mapNotFound: message = Msg.DOES_NOT_EXIST reason = messageItem[1] break if reason == GAPI.INVALID_SHARING_REQUEST: loc = message.find('User message: ') if loc != -1: message = message[loc+15:] else: loc = message.find('User message: ""') if loc != -1: message = message[:loc+14]+f'"{reason}"' except KeyError: reason = f'{http_status}' return (http_status, reason, message) def callGAPI(service, function, bailOnInternalError=False, bailOnTransientError=False, bailOnInvalidError=False, softErrors=False, mapNotFound=True, throwReasons=None, retryReasons=None, triesLimit=0, **kwargs): if throwReasons is None: throwReasons = [] if retryReasons is None: retryReasons = [] if triesLimit == 0: triesLimit = GC.Values[GC.API_CALLS_TRIES_LIMIT] allRetryReasons = GAPI.DEFAULT_RETRY_REASONS+retryReasons method = getattr(service, function) svcparms = dict(list(kwargs.items())+GM.Globals[GM.EXTRA_ARGS_LIST]) if GC.Values[GC.API_CALLS_RATE_CHECK]: checkAPICallsRate() for n in range(1, triesLimit+1): try: return method(**svcparms).execute() except googleapiclient.errors.HttpError as e: http_status, reason, message = checkGAPIError(e, softErrors=softErrors, retryOnHttpError=n < 3, mapNotFound=mapNotFound) if http_status == -1: # The error detail indicated that we should retry this request # We'll refresh credentials and make another pass try: # service._http.credentials.refresh(getHttpObj()) service._http.credentials.refresh(transportCreateRequest()) except TypeError: systemErrorExit(HTTP_ERROR_RC, message) continue if http_status == 0: return None if (n != triesLimit) and ((reason in allRetryReasons) or (GC.Values[GC.RETRY_API_SERVICE_NOT_AVAILABLE] and (reason == GAPI.SERVICE_NOT_AVAILABLE))): if (reason in [GAPI.INTERNAL_ERROR, GAPI.BACKEND_ERROR] and bailOnInternalError and n == GC.Values[GC.BAIL_ON_INTERNAL_ERROR_TRIES]): raise GAPI.REASON_EXCEPTION_MAP[reason](message) if (reason in [GAPI.INVALID] and bailOnInvalidError and n == GC.Values[GC.BAIL_ON_INTERNAL_ERROR_TRIES]): raise GAPI.REASON_EXCEPTION_MAP[reason](message) waitOnFailure(n, triesLimit, reason, message) if reason == GAPI.TRANSIENT_ERROR and bailOnTransientError: raise GAPI.REASON_EXCEPTION_MAP[reason](message) continue if reason in throwReasons: if reason in GAPI.REASON_EXCEPTION_MAP: raise GAPI.REASON_EXCEPTION_MAP[reason](message) raise e if softErrors: stderrErrorMsg(f'{http_status}: {reason} - {message}{["", ": Giving up."][n > 1]}') return None if reason == GAPI.INSUFFICIENT_PERMISSIONS: APIAccessDeniedExit() systemErrorExit(HTTP_ERROR_RC, formatHTTPError(http_status, reason, message)) except googleapiclient.errors.MediaUploadSizeError as e: raise e except (httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: if n != triesLimit: service._http.connections = {} waitOnFailure(n, triesLimit, NETWORK_ERROR_RC, str(e)) continue handleServerError(e) except google.auth.exceptions.RefreshError as e: if isinstance(e.args, tuple): e = e.args[0] handleOAuthTokenError(e, GAPI.SERVICE_NOT_AVAILABLE in throwReasons) raise GAPI.REASON_EXCEPTION_MAP[GAPI.SERVICE_NOT_AVAILABLE](str(e)) except (http_client.ResponseNotReady, OSError) as e: errMsg = f'Connection error: {str(e) or repr(e)}' if n != triesLimit: waitOnFailure(n, triesLimit, SOCKET_ERROR_RC, errMsg) continue if softErrors: writeStderr(f'\n{ERROR_PREFIX}{errMsg} - Giving up.\n') return None systemErrorExit(SOCKET_ERROR_RC, errMsg) except ValueError as e: if clearServiceCache(service): continue systemErrorExit(GOOGLE_API_ERROR_RC, str(e)) except TypeError as e: systemErrorExit(GOOGLE_API_ERROR_RC, str(e)) def _showGAPIpagesResult(results, pageItems, totalItems, pageMessage, messageAttribute, entityType): showMessage = pageMessage.replace(TOTAL_ITEMS_MARKER, str(totalItems)) if pageItems: if messageAttribute: firstItem = results[0] if pageItems > 0 else {} lastItem = results[-1] if pageItems > 1 else firstItem if isinstance(messageAttribute, str): firstItem = str(firstItem.get(messageAttribute, '')) lastItem = str(lastItem.get(messageAttribute, '')) else: for attr in messageAttribute: firstItem = firstItem.get(attr, {}) lastItem = lastItem.get(attr, {}) firstItem = str(firstItem) lastItem = str(lastItem) showMessage = showMessage.replace(FIRST_ITEM_MARKER, firstItem) showMessage = showMessage.replace(LAST_ITEM_MARKER, lastItem) else: showMessage = showMessage.replace(FIRST_ITEM_MARKER, '') showMessage = showMessage.replace(LAST_ITEM_MARKER, '') writeGotMessage(showMessage.replace('{0}', str(Ent.Choose(entityType, totalItems)))) def _processGAPIpagesResult(results, items, allResults, totalItems, pageMessage, messageAttribute, entityType): if results: pageToken = results.get('nextPageToken') if items in results: pageItems = len(results[items]) totalItems += pageItems if allResults is not None: allResults.extend(results[items]) else: results = {items: []} pageItems = 0 else: pageToken = None results = {items: []} pageItems = 0 if pageMessage: _showGAPIpagesResult(results[items], pageItems, totalItems, pageMessage, messageAttribute, entityType) return (pageToken, totalItems) def _finalizeGAPIpagesResult(pageMessage): if pageMessage and (pageMessage[-1] != '\n'): writeStderr('\r\n') flushStderr() def callGAPIpages(service, function, items, pageMessage=None, messageAttribute=None, maxItems=0, noFinalize=False, throwReasons=None, retryReasons=None, pageArgsInBody=False, **kwargs): if throwReasons is None: throwReasons = [] if retryReasons is None: retryReasons = [] allResults = [] totalItems = 0 maxArg = '' if maxItems: maxResults = kwargs.get('maxResults', 0) if maxResults: maxArg = 'maxResults' else: maxResults = kwargs.get('pageSize', 0) if maxResults: maxArg = 'pageSize' if pageArgsInBody: kwargs.setdefault('body', {}) entityType = Ent.Getting() if pageMessage else None while True: if maxArg and maxItems-totalItems < maxResults: kwargs[maxArg] = maxItems-totalItems results = callGAPI(service, function, throwReasons=throwReasons, retryReasons=retryReasons, **kwargs) pageToken, totalItems = _processGAPIpagesResult(results, items, allResults, totalItems, pageMessage, messageAttribute, entityType) if not pageToken or (maxItems and totalItems >= maxItems): if not noFinalize: _finalizeGAPIpagesResult(pageMessage) return allResults if pageArgsInBody: kwargs['body']['pageToken'] = pageToken else: kwargs['pageToken'] = pageToken def yieldGAPIpages(service, function, items, pageMessage=None, messageAttribute=None, maxItems=0, noFinalize=False, throwReasons=None, retryReasons=None, pageArgsInBody=False, **kwargs): if throwReasons is None: throwReasons = [] if retryReasons is None: retryReasons = [] totalItems = 0 maxArg = '' if maxItems: maxResults = kwargs.get('maxResults', 0) if maxResults: maxArg = 'maxResults' else: maxResults = kwargs.get('pageSize', 0) if maxResults: maxArg = 'pageSize' if pageArgsInBody: kwargs.setdefault('body', {}) entityType = Ent.Getting() if pageMessage else None while True: if maxArg and maxItems-totalItems < maxResults: kwargs[maxArg] = maxItems-totalItems results = callGAPI(service, function, throwReasons=throwReasons, retryReasons=retryReasons, **kwargs) if results: pageToken = results.get('nextPageToken') if items in results: pageItems = len(results[items]) totalItems += pageItems else: results = {items: []} pageItems = 0 else: pageToken = None results = {items: []} pageItems = 0 if pageMessage: _showGAPIpagesResult(results[items], pageItems, totalItems, pageMessage, messageAttribute, entityType) yield results[items] if not pageToken or (maxItems and totalItems >= maxItems): if not noFinalize: _finalizeGAPIpagesResult(pageMessage) return if pageArgsInBody: kwargs['body']['pageToken'] = pageToken else: kwargs['pageToken'] = pageToken def callGAPIitems(service, function, items, throwReasons=None, retryReasons=None, **kwargs): if throwReasons is None: throwReasons = [] if retryReasons is None: retryReasons = [] results = callGAPI(service, function, throwReasons=throwReasons, retryReasons=retryReasons, **kwargs) if results: return results.get(items, []) return [] def readDiscoveryFile(api_version): disc_filename = f'{api_version}.json' disc_file = os.path.join(GM.Globals[GM.GAM_PATH], disc_filename) if hasattr(sys, '_MEIPASS'): json_string = readFile(os.path.join(sys._MEIPASS, disc_filename), continueOnError=True, displayError=True) #pylint: disable=no-member elif os.path.isfile(disc_file): json_string = readFile(disc_file, continueOnError=True, displayError=True) else: json_string = None if not json_string: invalidDiscoveryJsonExit(disc_file, Msg.NO_DATA) try: discovery = json.loads(json_string) return (disc_file, discovery) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: invalidDiscoveryJsonExit(disc_file, str(e)) def buildGAPIObject(api, credentials=None): if credentials is None: credentials = getClientCredentials(api=api, refreshOnly=True) httpObj = transportAuthorizedHttp(credentials, http=getHttpObj(cache=GM.Globals[GM.CACHE_DIR])) service = getService(api, httpObj) if not GC.Values[GC.ENABLE_DASA]: try: API_Scopes = set(list(service._rootDesc['auth']['oauth2']['scopes'])) except KeyError: API_Scopes = set(API.VAULT_SCOPES) if api == API.VAULT else set() GM.Globals[GM.CURRENT_CLIENT_API] = api GM.Globals[GM.CURRENT_CLIENT_API_SCOPES] = API_Scopes.intersection(GM.Globals[GM.CREDENTIALS_SCOPES]) if api not in API.SCOPELESS_APIS and not GM.Globals[GM.CURRENT_CLIENT_API_SCOPES]: systemErrorExit(NO_SCOPES_FOR_API_RC, Msg.NO_SCOPES_FOR_API.format(API.getAPIName(api))) if not GC.Values[GC.DOMAIN]: GC.Values[GC.DOMAIN] = GM.Globals[GM.DECODED_ID_TOKEN].get('hd', 'UNKNOWN').lower() if not GC.Values[GC.CUSTOMER_ID]: GC.Values[GC.CUSTOMER_ID] = GC.MY_CUSTOMER GM.Globals[GM.ADMIN] = GM.Globals[GM.DECODED_ID_TOKEN].get('email', 'UNKNOWN').lower() GM.Globals[GM.OAUTH2_CLIENT_ID] = credentials.client_id return service def getSaUser(user): currentClientAPI = GM.Globals[GM.CURRENT_CLIENT_API] currentClientAPIScopes = GM.Globals[GM.CURRENT_CLIENT_API_SCOPES] userEmail = convertUIDtoEmailAddress(user) if user else None GM.Globals[GM.CURRENT_CLIENT_API] = currentClientAPI GM.Globals[GM.CURRENT_CLIENT_API_SCOPES] = currentClientAPIScopes return userEmail def buildGAPIServiceObject(api, user, i=0, count=0, displayError=True): userEmail = getSaUser(user) httpObj = getHttpObj(cache=GM.Globals[GM.CACHE_DIR]) service = getService(api, httpObj) if api == API.MEET_BETA: api = API.MEET credentials = getSvcAcctCredentials(api, userEmail) request = transportCreateRequest(httpObj) triesLimit = 3 for n in range(1, triesLimit+1): try: credentials.refresh(request) service._http = transportAuthorizedHttp(credentials, http=httpObj) return (userEmail, service) except (httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: if n != triesLimit: httpObj.connections = {} waitOnFailure(n, triesLimit, NETWORK_ERROR_RC, str(e)) continue handleServerError(e) except google.auth.exceptions.RefreshError as e: if isinstance(e.args, tuple): e = e.args[0] if n < triesLimit: if isinstance(e, str): eContent = e else: eContent = e.content.decode(UTF8) if isinstance(e.content, bytes) else e.content if eContent[0:15] == '': if GC.Values[GC.DEBUG_LEVEL] > 0: writeStdout(f'{ERROR_PREFIX} HTTP: {str(eContent)}\n') lContent = eContent.lower() tg = HTML_TITLE_PATTERN.match(lContent) lContent = tg.group(1) if tg else '' if lContent.startswith('Error 502 (Server Error)'): time.sleep(30) continue handleOAuthTokenError(e, True, displayError, i, count) return (userEmail, None) def buildGAPIObjectNoAuthentication(api): httpObj = getHttpObj(cache=GM.Globals[GM.CACHE_DIR]) service = getService(api, httpObj) return service def initGDataObject(gdataObj, api): GM.Globals[GM.CURRENT_CLIENT_API] = api credentials = getClientCredentials(noDASA=True, refreshOnly=True) GM.Globals[GM.CURRENT_CLIENT_API_SCOPES] = API.getClientScopesSet(api).intersection(GM.Globals[GM.CREDENTIALS_SCOPES]) if not GM.Globals[GM.CURRENT_CLIENT_API_SCOPES]: systemErrorExit(NO_SCOPES_FOR_API_RC, Msg.NO_SCOPES_FOR_API.format(API.getAPIName(api))) getGDataOAuthToken(gdataObj, credentials) if GC.Values[GC.DEBUG_LEVEL] > 0: gdataObj.debug = True return gdataObj def getGDataUserCredentials(api, user, i, count): userEmail = getSaUser(user) credentials = getSvcAcctCredentials(api, userEmail) request = transportCreateRequest() try: credentials.refresh(request) return (userEmail, credentials) except (httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: handleServerError(e) except google.auth.exceptions.RefreshError as e: if isinstance(e.args, tuple): e = e.args[0] handleOAuthTokenError(e, True, True, i, count) return (userEmail, None) def getContactsObject(contactFeed): contactsObject = initGDataObject(gdata.apps.contacts.service.ContactsService(contactFeed=contactFeed), API.CONTACTS) return (GC.Values[GC.DOMAIN], contactsObject) def getContactsQuery(**kwargs): if GC.Values[GC.NO_VERIFY_SSL]: ssl._create_default_https_context = ssl._create_unverified_context return gdata.apps.contacts.service.ContactsQuery(**kwargs) def getEmailAuditObject(): return initGDataObject(gdata.apps.audit.service.AuditService(), API.EMAIL_AUDIT) def getUserEmailFromID(uid, cd): try: result = callGAPI(cd.users(), 'get', throwReasons=GAPI.USER_GET_THROW_REASONS, userKey=uid, fields='primaryEmail') return result.get('primaryEmail') except (GAPI.userNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.backendError, GAPI.systemError): return None def getGroupEmailFromID(uid, cd): try: result = callGAPI(cd.groups(), 'get', throwReasons=GAPI.GROUP_GET_THROW_REASONS, groupKey=uid, fields='email') return result.get('email') except (GAPI.groupNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest): return None def getServiceAccountEmailFromID(account_id, sal=None): if sal is None: sal = buildGAPIObject(API.SERVICEACCOUNTLOOKUP) try: certs = callGAPI(sal.serviceaccounts(), 'lookup', throwReasons = [GAPI.BAD_REQUEST, GAPI.NOT_FOUND, GAPI.RESOURCE_NOT_FOUND, GAPI.INVALID_ARGUMENT], account=account_id) except (GAPI.badRequest, GAPI.notFound, GAPI.resourceNotFound, GAPI.invalidArgument): return None sa_cn_rx = r'CN=(.+)\.(.+)\.iam\.gservice.*' sa_emails = [] for _, raw_cert in certs.items(): cert = x509.load_pem_x509_certificate(raw_cert.encode(), default_backend()) # suppress crytography warning due to long service account email with warnings.catch_warnings(): warnings.filterwarnings('ignore', message='.*Attribute\'s length.*') mg = re.match(sa_cn_rx, cert.issuer.rfc4514_string()) if mg: sa_email = f'{mg.group(1)}@{mg.group(2)}.iam.gserviceaccount.com' if sa_email not in sa_emails: sa_emails.append(sa_email) return GC.Values[GC.CSV_OUTPUT_FIELD_DELIMITER].join(sa_emails) # Convert UID to email address and type def convertUIDtoEmailAddressWithType(emailAddressOrUID, cd=None, sal=None, emailTypes=None, checkForCustomerId=False, ciGroupsAPI=False, aliasAllowed=True): if emailTypes is None: emailTypes = ['user'] elif not isinstance(emailTypes, list): emailTypes = [emailTypes] if emailTypes != 'any' else ['user', 'group'] if checkForCustomerId and (emailAddressOrUID == GC.Values[GC.CUSTOMER_ID]): return (emailAddressOrUID, 'email') normalizedEmailAddressOrUID = normalizeEmailAddressOrUID(emailAddressOrUID, ciGroupsAPI=ciGroupsAPI) if ciGroupsAPI and emailAddressOrUID.startswith('groups/'): return emailAddressOrUID if normalizedEmailAddressOrUID.find('@') > 0 and aliasAllowed: return (normalizedEmailAddressOrUID, 'email') if cd is None: cd = buildGAPIObject(API.DIRECTORY) if 'user' in emailTypes and 'group' in emailTypes: # Google User IDs *TEND* to be integers while groups tend to have letters # thus we can optimize which check we try first. We'll still check # both since there is no guarantee this will always be true. if normalizedEmailAddressOrUID.isdigit(): uid = getUserEmailFromID(normalizedEmailAddressOrUID, cd) if uid: return (uid, 'user') uid = getGroupEmailFromID(normalizedEmailAddressOrUID, cd) if uid: return (uid, 'group') else: uid = getGroupEmailFromID(normalizedEmailAddressOrUID, cd) if uid: return (uid, 'group') uid = getUserEmailFromID(normalizedEmailAddressOrUID, cd) if uid: return (uid, 'user') elif 'user' in emailTypes: uid = getUserEmailFromID(normalizedEmailAddressOrUID, cd) if uid: return (uid, 'user') elif 'group' in emailTypes: uid = getGroupEmailFromID(normalizedEmailAddressOrUID, cd) if uid: return (uid, 'group') if 'resource' in emailTypes: try: result = callGAPI(cd.resources().calendars(), 'get', throwReasons=[GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], calendarResourceId=normalizedEmailAddressOrUID, customer=GC.Values[GC.CUSTOMER_ID], fields='resourceEmail') if 'resourceEmail' in result: return (result['resourceEmail'].lower(), 'resource') except (GAPI.badRequest, GAPI.resourceNotFound, GAPI.forbidden): pass if 'serviceaccount' in emailTypes: uid = getServiceAccountEmailFromID(normalizedEmailAddressOrUID, sal) if uid: return (uid, 'serviceaccount') return (normalizedEmailAddressOrUID, 'unknown') NON_EMAIL_MEMBER_PREFIXES = ( "cbcm-browser.", "chrome-os-device.", ) # Convert UID to email address def convertUIDtoEmailAddress(emailAddressOrUID, cd=None, emailTypes=None, checkForCustomerId=False, ciGroupsAPI=False, aliasAllowed=True): if ciGroupsAPI: if emailAddressOrUID.startswith(NON_EMAIL_MEMBER_PREFIXES): return emailAddressOrUID normalizedEmailAddressOrUID = normalizeEmailAddressOrUID(emailAddressOrUID, ciGroupsAPI=ciGroupsAPI) if normalizedEmailAddressOrUID.startswith(NON_EMAIL_MEMBER_PREFIXES): return normalizedEmailAddressOrUID email, _ = convertUIDtoEmailAddressWithType(emailAddressOrUID, cd, emailTypes, checkForCustomerId, ciGroupsAPI, aliasAllowed) return email # Convert email address to User/Group UID; called immediately after getting email address from command line def convertEmailAddressToUID(emailAddressOrUID, cd=None, emailType='user', savedLocation=None): normalizedEmailAddressOrUID = normalizeEmailAddressOrUID(emailAddressOrUID) if normalizedEmailAddressOrUID.find('@') == -1: return normalizedEmailAddressOrUID if cd is None: cd = buildGAPIObject(API.DIRECTORY) if emailType != 'group': try: return callGAPI(cd.users(), 'get', throwReasons=GAPI.USER_GET_THROW_REASONS, userKey=normalizedEmailAddressOrUID, fields='id')['id'] except (GAPI.userNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.backendError, GAPI.systemError): if emailType == 'user': if savedLocation is not None: Cmd.SetLocation(savedLocation) entityDoesNotExistExit(Ent.USER, normalizedEmailAddressOrUID, errMsg=getPhraseDNEorSNA(normalizedEmailAddressOrUID)) try: return callGAPI(cd.groups(), 'get', throwReasons=GAPI.GROUP_GET_THROW_REASONS, retryReasons=GAPI.GROUP_GET_RETRY_REASONS, groupKey=normalizedEmailAddressOrUID, fields='id')['id'] except (GAPI.groupNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.invalid, GAPI.systemError): if savedLocation is not None: Cmd.SetLocation(savedLocation) entityDoesNotExistExit([Ent.USER, Ent.GROUP][emailType == 'group'], normalizedEmailAddressOrUID, errMsg=getPhraseDNEorSNA(normalizedEmailAddressOrUID)) # Convert User UID from API call to email address def convertUserIDtoEmail(uid, cd=None): primaryEmail = GM.Globals[GM.MAP_USER_ID_TO_NAME].get(uid) if not primaryEmail: if cd is None: cd = buildGAPIObject(API.DIRECTORY) try: primaryEmail = callGAPI(cd.users(), 'get', throwReasons=GAPI.USER_GET_THROW_REASONS, userKey=uid, fields='primaryEmail')['primaryEmail'] except (GAPI.userNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.backendError, GAPI.systemError): primaryEmail = f'uid:{uid}' GM.Globals[GM.MAP_USER_ID_TO_NAME][uid] = primaryEmail return primaryEmail # Convert UID to split email address # Return (foo@bar.com, foo, bar.com) def splitEmailAddressOrUID(emailAddressOrUID): normalizedEmailAddressOrUID = normalizeEmailAddressOrUID(emailAddressOrUID) atLoc = normalizedEmailAddressOrUID.find('@') if atLoc > 0: return (normalizedEmailAddressOrUID, normalizedEmailAddressOrUID[:atLoc], normalizedEmailAddressOrUID[atLoc+1:]) try: cd = buildGAPIObject(API.DIRECTORY) result = callGAPI(cd.users(), 'get', throwReasons=GAPI.USER_GET_THROW_REASONS, userKey=normalizedEmailAddressOrUID, fields='primaryEmail') if 'primaryEmail' in result: normalizedEmailAddressOrUID = result['primaryEmail'].lower() atLoc = normalizedEmailAddressOrUID.find('@') return (normalizedEmailAddressOrUID, normalizedEmailAddressOrUID[:atLoc], normalizedEmailAddressOrUID[atLoc+1:]) except (GAPI.userNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.backendError, GAPI.systemError): pass return (normalizedEmailAddressOrUID, normalizedEmailAddressOrUID, GC.Values[GC.DOMAIN]) # Convert Org Unit Id to Org Unit Path def convertOrgUnitIDtoPath(cd, orgUnitId): if orgUnitId.lower().startswith('orgunits/'): orgUnitId = f'id:{orgUnitId[9:]}' orgUnitPath = GM.Globals[GM.MAP_ORGUNIT_ID_TO_NAME].get(orgUnitId) if not orgUnitPath: if cd is None: cd = buildGAPIObject(API.DIRECTORY) try: orgUnitPath = callGAPI(cd.orgunits(), 'get', throwReasons=GAPI.ORGUNIT_GET_THROW_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], orgUnitPath=orgUnitId, fields='orgUnitPath')['orgUnitPath'] except (GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.backendError, GAPI.badRequest, GAPI.invalidCustomerId, GAPI.loginRequired): orgUnitPath = orgUnitId GM.Globals[GM.MAP_ORGUNIT_ID_TO_NAME][orgUnitId] = orgUnitPath return orgUnitPath def shlexSplitList(entity, dataDelimiter=' ,'): lexer = shlex.shlex(entity, posix=True) lexer.whitespace = dataDelimiter lexer.whitespace_split = True try: return list(lexer) except ValueError as e: Cmd.Backup() usageErrorExit(str(e)) def shlexSplitListStatus(entity, dataDelimiter=' ,'): lexer = shlex.shlex(entity, posix=True) lexer.whitespace = dataDelimiter lexer.whitespace_split = True try: return (True, list(lexer)) except ValueError as e: return (False, str(e)) def getQueries(myarg): if myarg in {'query', 'filter'}: return [getString(Cmd.OB_QUERY)] return shlexSplitList(getString(Cmd.OB_QUERY_LIST)) def convertEntityToList(entity, shlexSplit=False, nonListEntityType=False): if not entity: return [] if isinstance(entity, (list, set, dict)): return list(entity) if nonListEntityType: return [entity.strip()] if not shlexSplit: return entity.replace(',', ' ').split() return shlexSplitList(entity) GROUP_ROLES_MAP = { 'owner': Ent.ROLE_OWNER, 'owners': Ent.ROLE_OWNER, 'manager': Ent.ROLE_MANAGER, 'managers': Ent.ROLE_MANAGER, 'member': Ent.ROLE_MEMBER, 'members': Ent.ROLE_MEMBER, } ALL_GROUP_ROLES = {Ent.ROLE_MANAGER, Ent.ROLE_MEMBER, Ent.ROLE_OWNER} def _getRoleVerification(memberRoles, fields): if memberRoles and memberRoles.find(Ent.ROLE_MEMBER) != -1: return (set(memberRoles.split(',')), None, fields if fields.find('role') != -1 else fields[:-1]+',role)') return (set(), memberRoles, fields) def _getCIRoleVerification(memberRoles): if memberRoles: return set(memberRoles.split(',')) return set() def _checkMemberStatusIsSuspendedIsArchived(memberStatus, isSuspended, isArchived): if isSuspended is None and isArchived is None: return True if isSuspended is not None and isArchived is not None: if isSuspended == isArchived: if not isSuspended: return memberStatus not in {'SUSPENDED', 'ARCHIVED'} return memberStatus in {'SUSPENDED', 'ARCHIVED'} if isSuspended: return memberStatus == 'SUSPENDED' return memberStatus == 'ARCHIVED' if isSuspended is not None: if (not isSuspended and memberStatus != 'SUSPENDED') or (isSuspended and memberStatus == 'SUSPENDED'): return True if isArchived is not None: if (not isArchived and memberStatus != 'ARCHIVED') or (isArchived and memberStatus == 'ARCHIVED'): return True return False def _checkMemberIsSuspendedIsArchived(member, isSuspended, isArchived): return _checkMemberStatusIsSuspendedIsArchived(member.get('status', 'UNKNOWN'), isSuspended, isArchived) def _checkMemberRole(member, validRoles): return not validRoles or member.get('role', Ent.ROLE_MEMBER) in validRoles def _checkMemberRoleIsSuspendedIsArchived(member, validRoles, isSuspended, isArchived): return _checkMemberRole(member, validRoles) and _checkMemberIsSuspendedIsArchived(member, isSuspended, isArchived) def _checkMemberCategory(member, memberDisplayOptions): member_email = member.get('email', member.get('id', '')) if member_email.find('@') > 0: _, domain = member_email.lower().split('@', 1) category = 'internal' if domain in memberDisplayOptions['internalDomains'] else 'external' else: category = 'internal' if memberDisplayOptions[category]: member['category'] = category return True return False def _checkCIMemberCategory(member, memberDisplayOptions): member_email = member.get('preferredMemberKey', {}).get('id', '') if member_email.find('@') > 0: _, domain = member_email.lower().split('@', 1) category = 'internal' if domain in memberDisplayOptions['internalDomains'] else 'external' else: category = 'internal' if memberDisplayOptions[category]: member['category'] = category return True return False def getCIGroupMemberRoleFixType(member): ''' fixes missing type and returns the highest role of member ''' if 'type' not in member: if member['preferredMemberKey']['id'] == GC.Values[GC.CUSTOMER_ID]: member['type'] = Ent.TYPE_CUSTOMER else: member['type'] = Ent.TYPE_OTHER roles = {} memberRoles = member.get('roles', [{'name': Ent.MEMBER}]) for role in memberRoles: roles[role['name']] = role for a_role in [Ent.ROLE_OWNER, Ent.ROLE_MANAGER, Ent.ROLE_MEMBER]: if a_role in roles: member['role'] = a_role if 'expiryDetail' in roles[a_role]: member['expireTime'] = roles[a_role]['expiryDetail']['expireTime'] return member['role'] = memberRoles[0]['name'] def getCIGroupTransitiveMemberRoleFixType(groupName, tmember): ''' map transitive member to normal member ''' tid = tmember['preferredMemberKey'][0].get('id', GC.Values[GC.CUSTOMER_ID]) if tmember['preferredMemberKey'] else '' ttype, tname = tmember['member'].split('/') member = {'name': f'{groupName}/membershipd/{tname}', 'preferredMemberKey': {'id': tid}} if 'type' not in tmember: if tid == GC.Values[GC.CUSTOMER_ID]: member['type'] = Ent.TYPE_CUSTOMER elif ttype == 'users': member['type'] = Ent.TYPE_USER if not tid.endswith('.iam.gserviceaccount.com') else Ent.TYPE_SERVICE_ACCOUNT elif ttype == 'groups': member['type'] = Ent.TYPE_GROUP elif tid.startswith('cbcm-browser.'): member['type'] = Ent.TYPE_CBCM_BROWSER else: member['type'] = Ent.TYPE_OTHER else: member['type'] = tmember['type'] if 'roles' in tmember: memberRoles = [] for trole in tmember['roles']: if 'role' in trole: trole['name'] = trole.pop('role') if trole['name'] == 'ADMIN': trole['name'] = Ent.ROLE_MANAGER memberRoles.append(trole) else: memberRoles = [{'name': Ent.MEMBER}] roles = {} for role in memberRoles: roles[role['name']] = role for a_role in [Ent.ROLE_OWNER, Ent.ROLE_MANAGER, Ent.ROLE_MEMBER]: if a_role in roles: member['role'] = a_role if 'expiryDetail' in roles[a_role]: member['expireTime'] = roles[a_role]['expiryDetail']['expireTime'] break else: member['role'] = memberRoles[0]['name'] return member def convertGroupCloudIDToEmail(ci, group, i=0, count=0): if not group.startswith('groups/'): group = normalizeEmailAddressOrUID(group, ciGroupsAPI=True) if not group.startswith('groups/'): return (ci, None, group) if not ci: ci = buildGAPIObject(API.CLOUDIDENTITY_GROUPS) try: ciGroup = callGAPI(ci.groups(), 'get', throwReasons=GAPI.CIGROUP_GET_THROW_REASONS, retryReasons=GAPI.CIGROUP_RETRY_REASONS, name=group, fields='groupKey(id)') return (ci, None, ciGroup['groupKey']['id']) except (GAPI.notFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.invalid, GAPI.systemError, GAPI.permissionDenied, GAPI.serviceNotAvailable) as e: action = Act.Get() Act.Set(Act.LOOKUP) entityActionFailedWarning([Ent.CLOUD_IDENTITY_GROUP, group, Ent.GROUP, None], str(e), i, count) Act.Set(action) return (ci, None, None) def convertGroupEmailToCloudID(ci, group, i=0, count=0): group = normalizeEmailAddressOrUID(group, ciGroupsAPI=True) if not group.startswith('groups/') and group.find('@') == -1: group = 'groups/'+group if group.startswith('groups/'): ci, _, groupEmail = convertGroupCloudIDToEmail(ci, group, i, count) return (ci, group, groupEmail) if not ci: ci = buildGAPIObject(API.CLOUDIDENTITY_GROUPS) try: ciGroup = callGAPI(ci.groups(), 'lookup', throwReasons=GAPI.CIGROUP_GET_THROW_REASONS, retryReasons=GAPI.CIGROUP_RETRY_REASONS, groupKey_id=group, fields='name') return (ci, ciGroup['name'], group) except (GAPI.notFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.invalid, GAPI.systemError, GAPI.failedPrecondition, GAPI.permissionDenied, GAPI.serviceNotAvailable) as e: action = Act.Get() Act.Set(Act.LOOKUP) entityActionFailedWarning([Ent.GROUP, group, Ent.CLOUD_IDENTITY_GROUP, None], str(e), i, count) Act.Set(action) return (ci, None, None) CIGROUP_DISCUSSION_FORUM_LABEL = 'cloudidentity.googleapis.com/groups.discussion_forum' CIGROUP_DYNAMIC_LABEL = 'cloudidentity.googleapis.com/groups.dynamic' CIGROUP_SECURITY_LABEL = 'cloudidentity.googleapis.com/groups.security' CIGROUP_LOCKED_LABEL = 'cloudidentity.googleapis.com/groups.locked' def getCIGroupMembershipGraph(ci, member): if not ci: ci = buildGAPIObject(API.CLOUDIDENTITY_GROUPS) parent = 'groups/-' try: result = callGAPI(ci.groups().memberships(), 'getMembershipGraph', throwReasons=GAPI.CIGROUP_LIST_THROW_REASONS, retryReasons=GAPI.CIGROUP_RETRY_REASONS, parent=parent, query=f"member_key_id == '{member}' && CIGROUP_DISCUSSION_FORUM_LABEL in labels") return (ci, result.get('response', {})) except (GAPI.resourceNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.invalid, GAPI.invalidArgument, GAPI.systemError, GAPI.permissionDenied, GAPI.serviceNotAvailable) as e: action = Act.Get() Act.Set(Act.LOOKUP) entityActionFailedWarning([Ent.CLOUD_IDENTITY_GROUP, parent], str(e)) Act.Set(action) return (ci, None) def checkGroupExists(cd, ci, ciGroupsAPI, group, i=0, count=0): group = normalizeEmailAddressOrUID(group, ciGroupsAPI=ciGroupsAPI) if not ciGroupsAPI: if not group.startswith('groups/'): try: result = callGAPI(cd.groups(), 'get', throwReasons=GAPI.GROUP_GET_THROW_REASONS, retryReasons=GAPI.GROUP_GET_RETRY_REASONS, groupKey=group, fields='email') return (ci, None, result['email']) except (GAPI.groupNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.invalid, GAPI.systemError): entityUnknownWarning(Ent.GROUP, group, i, count) return (ci, None, None) else: ci, _, groupEmail = convertGroupCloudIDToEmail(ci, group, i, count) return (ci, None, groupEmail) else: if not group.startswith('groups/') and group.find('@') == -1: group = 'groups/'+group if group.startswith('groups/'): try: result = callGAPI(ci.groups(), 'get', throwReasons=GAPI.CIGROUP_GET_THROW_REASONS, retryReasons=GAPI.CIGROUP_RETRY_REASONS, name=group, fields='name,groupKey(id)') return (ci, result['name'], result['groupKey']['id']) except (GAPI.notFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.invalid, GAPI.systemError, GAPI.permissionDenied, GAPI.serviceNotAvailable): entityUnknownWarning(Ent.CLOUD_IDENTITY_GROUP, group, i, count) return (ci, None, None) else: return convertGroupEmailToCloudID(ci, group, i, count) # Turn the entity into a list of Users/CrOS devices def getItemsToModify(entityType, entity, memberRoles=None, isSuspended=None, isArchived=None, groupMemberType=Ent.TYPE_USER, noListConversion=False, recursive=False, noCLArgs=False): def _incrEntityDoesNotExist(entityType): entityError['entityType'] = entityType entityError[ENTITY_ERROR_DNE] += 1 def _showInvalidEntity(entityType, entityName): entityError['entityType'] = entityType entityError[ENTITY_ERROR_INVALID] += 1 printErrorMessage(INVALID_ENTITY_RC, formatKeyValueList('', [Ent.Singular(entityType), entityName, Msg.INVALID], '')) def _addGroupUsersToUsers(group, domains, recursive, includeDerivedMembership): printGettingAllEntityItemsForWhom(memberRoles if memberRoles else Ent.ROLE_MANAGER_MEMBER_OWNER, group, entityType=Ent.GROUP) validRoles, listRoles, listFields = _getRoleVerification(memberRoles, 'nextPageToken,members(email,type,status)') try: result = callGAPIpages(cd.members(), 'list', 'members', pageMessage=getPageMessageForWhom(), throwReasons=GAPI.MEMBERS_THROW_REASONS, retryReasons=GAPI.MEMBERS_RETRY_REASONS, includeDerivedMembership=includeDerivedMembership, groupKey=group, roles=listRoles, fields=listFields, maxResults=GC.Values[GC.MEMBER_MAX_RESULTS]) except (GAPI.groupNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.invalid, GAPI.forbidden, GAPI.serviceNotAvailable): entityUnknownWarning(Ent.GROUP, group) _incrEntityDoesNotExist(Ent.GROUP) return for member in result: if member['type'] == Ent.TYPE_USER: email = member['email'].lower() if email in entitySet: continue if _checkMemberRoleIsSuspendedIsArchived(member, validRoles, isSuspended, isArchived): if domains: _, domain = splitEmailAddress(email) if domain not in domains: continue entitySet.add(email) entityList.append(email) elif recursive and member['type'] == Ent.TYPE_GROUP: _addGroupUsersToUsers(member['email'], domains, recursive, includeDerivedMembership) def _addCIGroupUsersToUsers(groupName, groupEmail, recursive): printGettingAllEntityItemsForWhom(memberRoles if memberRoles else Ent.ROLE_MANAGER_MEMBER_OWNER, groupEmail, entityType=Ent.CLOUD_IDENTITY_GROUP) validRoles = _getCIRoleVerification(memberRoles) try: result = callGAPIpages(ci.groups().memberships(), 'list', 'memberships', pageMessage=getPageMessageForWhom(), throwReasons=GAPI.CIGROUP_LIST_THROW_REASONS, retryReasons=GAPI.CIGROUP_RETRY_REASONS, parent=groupName, view='FULL', fields='nextPageToken,memberships(name,preferredMemberKey(id),roles(name),type)', pageSize=GC.Values[GC.MEMBER_MAX_RESULTS]) except (GAPI.resourceNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.invalid, GAPI.invalidArgument, GAPI.systemError, GAPI.permissionDenied, GAPI.serviceNotAvailable): entityUnknownWarning(Ent.CLOUD_IDENTITY_GROUP, groupEmail) _incrEntityDoesNotExist(Ent.CLOUD_IDENTITY_GROUP) return for member in result: getCIGroupMemberRoleFixType(member) if member['type'] == Ent.TYPE_USER: email = member.get('preferredMemberKey', {}).get('id', '') if (email and _checkMemberRole(member, validRoles) and email not in entitySet): entitySet.add(email) entityList.append(email) elif recursive and member['type'] == Ent.TYPE_GROUP and _checkMemberRole(member, validRoles): _, gname = member['name'].rsplit('/', 1) _addCIGroupUsersToUsers(f'groups/{gname}', f'groups/{gname}', recursive) GM.Globals[GM.CLASSROOM_SERVICE_NOT_AVAILABLE] = False ENTITY_ERROR_DNE = 'doesNotExist' ENTITY_ERROR_INVALID = 'invalid' entityError = {'entityType': None, ENTITY_ERROR_DNE: 0, ENTITY_ERROR_INVALID: 0} entityList = [] entitySet = set() entityLocation = Cmd.Location() if entityType in {Cmd.ENTITY_USER, Cmd.ENTITY_USERS}: if not GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY] and not GC.Values[GC.DOMAIN]: buildGAPIObject(API.DIRECTORY) result = convertEntityToList(entity, nonListEntityType=entityType == Cmd.ENTITY_USER) for user in result: if validateEmailAddressOrUID(user): if user not in entitySet: entitySet.add(user) entityList.append(user) else: _showInvalidEntity(Ent.USER, user) if GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY]: return entityList elif entityType in {Cmd.ENTITY_ALL_USERS, Cmd.ENTITY_ALL_USERS_NS, Cmd.ENTITY_ALL_USERS_NS_SUSP, Cmd.ENTITY_ALL_USERS_SUSP}: cd = buildGAPIObject(API.DIRECTORY) if entityType == Cmd.ENTITY_ALL_USERS and isSuspended is not None: query = f'isSuspended={isSuspended}' else: query = Cmd.ALL_USERS_QUERY_MAP[entityType] printGettingAllAccountEntities(Ent.USER) try: result = callGAPIpages(cd.users(), 'list', 'users', pageMessage=getPageMessage(), throwReasons=[GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], customer=GC.Values[GC.CUSTOMER_ID], query=query, orderBy='email', fields='nextPageToken,users(primaryEmail,archived)', maxResults=GC.Values[GC.USER_MAX_RESULTS]) except (GAPI.badRequest, GAPI.resourceNotFound, GAPI.forbidden): accessErrorExit(cd) entityList = [user['primaryEmail'] for user in result if isArchived is None or isArchived == user['archived']] printGotAccountEntities(len(entityList)) elif entityType in {Cmd.ENTITY_DOMAINS, Cmd.ENTITY_DOMAINS_NS, Cmd.ENTITY_DOMAINS_SUSP}: if entityType == Cmd.ENTITY_DOMAINS_NS: query = 'isSuspended=False' elif entityType == Cmd.ENTITY_DOMAINS_SUSP: query = 'isSuspended=True' elif isSuspended is not None: query = f'isSuspended={isSuspended}' else: query = None cd = buildGAPIObject(API.DIRECTORY) domains = convertEntityToList(entity) for domain in domains: printGettingAllEntityItemsForWhom(Ent.USER, domain, entityType=Ent.DOMAIN) try: result = callGAPIpages(cd.users(), 'list', 'users', pageMessage=getPageMessageForWhom(), throwReasons=[GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.DOMAIN_NOT_FOUND, GAPI.FORBIDDEN], domain=domain, query=query, orderBy='email', fields='nextPageToken,users(primaryEmail,archived)', maxResults=GC.Values[GC.USER_MAX_RESULTS]) except (GAPI.badRequest, GAPI.resourceNotFound, GAPI.domainNotFound, GAPI.forbidden): checkEntityDNEorAccessErrorExit(cd, Ent.DOMAIN, domain) _incrEntityDoesNotExist(Ent.DOMAIN) continue entityList = [user['primaryEmail'] for user in result if isArchived is None or isArchived == user['archived']] printGotAccountEntities(len(entityList)) elif entityType in {Cmd.ENTITY_GROUP, Cmd.ENTITY_GROUPS, Cmd.ENTITY_GROUP_NS, Cmd.ENTITY_GROUPS_NS, Cmd.ENTITY_GROUP_SUSP, Cmd.ENTITY_GROUPS_SUSP, Cmd.ENTITY_GROUP_INDE, Cmd.ENTITY_GROUPS_INDE}: if entityType in {Cmd.ENTITY_GROUP_NS, Cmd.ENTITY_GROUPS_NS}: isSuspended = False elif entityType in {Cmd.ENTITY_GROUP_SUSP, Cmd.ENTITY_GROUPS_SUSP}: isSuspended = True includeDerivedMembership = entityType in {Cmd.ENTITY_GROUP_INDE, Cmd.ENTITY_GROUPS_INDE} cd = buildGAPIObject(API.DIRECTORY) groups = convertEntityToList(entity, nonListEntityType=entityType in {Cmd.ENTITY_GROUP, Cmd.ENTITY_GROUP_NS, Cmd.ENTITY_GROUP_SUSP, Cmd.ENTITY_GROUP_INDE}) for group in groups: if validateEmailAddressOrUID(group, checkPeople=False): group = normalizeEmailAddressOrUID(group) printGettingAllEntityItemsForWhom(memberRoles if memberRoles else Ent.ROLE_MANAGER_MEMBER_OWNER, group, entityType=Ent.GROUP) validRoles, listRoles, listFields = _getRoleVerification(memberRoles, 'nextPageToken,members(email,id,type,status)') try: result = callGAPIpages(cd.members(), 'list', 'members', pageMessage=getPageMessageForWhom(), throwReasons=GAPI.MEMBERS_THROW_REASONS, retryReasons=GAPI.MEMBERS_RETRY_REASONS, includeDerivedMembership=includeDerivedMembership, groupKey=group, roles=listRoles, fields=listFields, maxResults=GC.Values[GC.MEMBER_MAX_RESULTS]) except (GAPI.groupNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.invalid, GAPI.forbidden, GAPI.serviceNotAvailable): entityUnknownWarning(Ent.GROUP, group) _incrEntityDoesNotExist(Ent.GROUP) continue for member in result: email = member['email'].lower() if member['type'] != Ent.TYPE_CUSTOMER else member['id'] if ((groupMemberType in ('ALL', member['type'])) and (not includeDerivedMembership or (member['type'] == Ent.TYPE_USER)) and _checkMemberRoleIsSuspendedIsArchived(member, validRoles, isSuspended, isArchived) and email not in entitySet): entitySet.add(email) entityList.append(email) else: _showInvalidEntity(Ent.GROUP, group) elif entityType in {Cmd.ENTITY_GROUP_USERS, Cmd.ENTITY_GROUP_USERS_NS, Cmd.ENTITY_GROUP_USERS_SUSP, Cmd.ENTITY_GROUP_USERS_SELECT}: if entityType == Cmd.ENTITY_GROUP_USERS_NS: isSuspended = False elif entityType == Cmd.ENTITY_GROUP_USERS_SUSP: isSuspended = True cd = buildGAPIObject(API.DIRECTORY) groups = convertEntityToList(entity) includeDerivedMembership = False domains = [] rolesSet = set() if not noCLArgs: while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg in GROUP_ROLES_MAP: rolesSet.add(GROUP_ROLES_MAP[myarg]) elif myarg == 'primarydomain': domains.append(GC.Values[GC.DOMAIN]) elif myarg == 'domains': domains.extend(getEntityList(Cmd.OB_DOMAIN_NAME_ENTITY)) elif myarg == 'recursive': recursive = True includeDerivedMembership = False elif myarg == 'includederivedmembership': includeDerivedMembership = True recursive = False elif entityType == Cmd.ENTITY_GROUP_USERS_SELECT and myarg in SUSPENDED_ARGUMENTS: isSuspended = _getIsSuspended(myarg) elif entityType == Cmd.ENTITY_GROUP_USERS_SELECT and myarg in ARCHIVED_ARGUMENTS: isArchived = _getIsArchived(myarg) elif myarg == 'end': break else: Cmd.Backup() missingArgumentExit('end') if rolesSet: memberRoles = ','.join(sorted(rolesSet)) for group in groups: if validateEmailAddressOrUID(group, checkPeople=False): _addGroupUsersToUsers(normalizeEmailAddressOrUID(group), domains, recursive, includeDerivedMembership) else: _showInvalidEntity(Ent.GROUP, group) elif entityType in {Cmd.ENTITY_CIGROUP, Cmd.ENTITY_CIGROUPS}: ci = buildGAPIObject(API.CLOUDIDENTITY_GROUPS) groups = convertEntityToList(entity, nonListEntityType=entityType in {Cmd.ENTITY_CIGROUP}) for group in groups: if validateEmailAddressOrUID(group, checkPeople=False, ciGroupsAPI=True): _, name, groupEmail = convertGroupEmailToCloudID(ci, group) printGettingAllEntityItemsForWhom(memberRoles if memberRoles else Ent.ROLE_MANAGER_MEMBER_OWNER, groupEmail, entityType=Ent.CLOUD_IDENTITY_GROUP) validRoles = _getCIRoleVerification(memberRoles) try: result = callGAPIpages(ci.groups().memberships(), 'list', 'memberships', pageMessage=getPageMessageForWhom(), throwReasons=GAPI.CIGROUP_LIST_THROW_REASONS, retryReasons=GAPI.CIGROUP_RETRY_REASONS, parent=name, view='FULL', fields='nextPageToken,memberships(preferredMemberKey(id),roles(name),type)', pageSize=GC.Values[GC.MEMBER_MAX_RESULTS]) except (GAPI.resourceNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.invalid, GAPI.invalidArgument, GAPI.systemError, GAPI.permissionDenied, GAPI.serviceNotAvailable): entityUnknownWarning(Ent.CLOUD_IDENTITY_GROUP, groupEmail) _incrEntityDoesNotExist(Ent.CLOUD_IDENTITY_GROUP) continue for member in result: getCIGroupMemberRoleFixType(member) email = member.get('preferredMemberKey', {}).get('id', '') if (email and (groupMemberType in ('ALL', member['type'])) and _checkMemberRole(member, validRoles) and email not in entitySet): entitySet.add(email) entityList.append(email) else: _showInvalidEntity(Ent.CLOUD_IDENTITY_GROUP, groupEmail) elif entityType in {Cmd.ENTITY_CIGROUP_USERS}: ci = buildGAPIObject(API.CLOUDIDENTITY_GROUPS) groups = convertEntityToList(entity) rolesSet = set() if not noCLArgs: while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg in GROUP_ROLES_MAP: rolesSet.add(GROUP_ROLES_MAP[myarg]) elif myarg == 'recursive': recursive = True elif myarg == 'end': break else: Cmd.Backup() missingArgumentExit('end') if rolesSet: memberRoles = ','.join(sorted(rolesSet)) for group in groups: _, name, groupEmail = convertGroupEmailToCloudID(ci, group) if name: _addCIGroupUsersToUsers(name, groupEmail, recursive) else: _showInvalidEntity(Ent.GROUP, group) elif entityType in {Cmd.ENTITY_OU, Cmd.ENTITY_OUS, Cmd.ENTITY_OU_AND_CHILDREN, Cmd.ENTITY_OUS_AND_CHILDREN, Cmd.ENTITY_OU_NS, Cmd.ENTITY_OUS_NS, Cmd.ENTITY_OU_AND_CHILDREN_NS, Cmd.ENTITY_OUS_AND_CHILDREN_NS, Cmd.ENTITY_OU_SUSP, Cmd.ENTITY_OUS_SUSP, Cmd.ENTITY_OU_AND_CHILDREN_SUSP, Cmd.ENTITY_OUS_AND_CHILDREN_SUSP}: if entityType in {Cmd.ENTITY_OU_NS, Cmd.ENTITY_OUS_NS, Cmd.ENTITY_OU_AND_CHILDREN_NS, Cmd.ENTITY_OUS_AND_CHILDREN_NS}: isSuspended = False elif entityType in {Cmd.ENTITY_OU_SUSP, Cmd.ENTITY_OUS_SUSP, Cmd.ENTITY_OU_AND_CHILDREN_SUSP, Cmd.ENTITY_OUS_AND_CHILDREN_SUSP}: isSuspended = True cd = buildGAPIObject(API.DIRECTORY) ous = convertEntityToList(entity, shlexSplit=True, nonListEntityType=entityType in {Cmd.ENTITY_OU, Cmd.ENTITY_OU_AND_CHILDREN, Cmd.ENTITY_OU_NS, Cmd.ENTITY_OU_AND_CHILDREN_NS, Cmd.ENTITY_OU_SUSP, Cmd.ENTITY_OU_AND_CHILDREN_SUSP}) directlyInOU = entityType in {Cmd.ENTITY_OU, Cmd.ENTITY_OUS, Cmd.ENTITY_OU_NS, Cmd.ENTITY_OUS_NS, Cmd.ENTITY_OU_SUSP, Cmd.ENTITY_OUS_SUSP} qualifier = Msg.DIRECTLY_IN_THE.format(Ent.Singular(Ent.ORGANIZATIONAL_UNIT)) if directlyInOU else Msg.IN_THE.format(Ent.Singular(Ent.ORGANIZATIONAL_UNIT)) fields = 'nextPageToken,users(primaryEmail,orgUnitPath,archived)' if directlyInOU else 'nextPageToken,users(primaryEmail,archived)' prevLen = 0 for ou in ous: ou = makeOrgUnitPathAbsolute(ou) if ou.startswith('id:'): try: ou = callGAPI(cd.orgunits(), 'get', throwReasons=GAPI.ORGUNIT_GET_THROW_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], orgUnitPath=ou, fields='orgUnitPath')['orgUnitPath'] except (GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.backendError, GAPI.badRequest, GAPI.invalidCustomerId, GAPI.loginRequired): checkEntityDNEorAccessErrorExit(cd, Ent.ORGANIZATIONAL_UNIT, ou) _incrEntityDoesNotExist(Ent.ORGANIZATIONAL_UNIT) continue ouLower = ou.lower() printGettingAllEntityItemsForWhom(Ent.USER, ou, qualifier=Msg.IN_THE.format(Ent.Singular(Ent.ORGANIZATIONAL_UNIT)), entityType=Ent.ORGANIZATIONAL_UNIT) pageMessage = getPageMessageForWhom() usersInOU = 0 try: feed = yieldGAPIpages(cd.users(), 'list', 'users', pageMessage=pageMessage, messageAttribute='primaryEmail', throwReasons=[GAPI.INVALID_ORGUNIT, GAPI.ORGUNIT_NOT_FOUND, GAPI.INVALID_INPUT, GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], customer=GC.Values[GC.CUSTOMER_ID], query=orgUnitPathQuery(ou, isSuspended), orderBy='email', fields=fields, maxResults=GC.Values[GC.USER_MAX_RESULTS]) for users in feed: if directlyInOU: for user in users: if ouLower == user.get('orgUnitPath', '').lower() and (isArchived is None or isArchived == user['archived']): usersInOU += 1 entityList.append(user['primaryEmail']) elif isArchived is None: entityList.extend([user['primaryEmail'] for user in users]) usersInOU += len(users) else: for user in users: if isArchived == user['archived']: usersInOU += 1 entityList.append(user['primaryEmail']) setGettingAllEntityItemsForWhom(Ent.USER, ou, qualifier=qualifier) printGotEntityItemsForWhom(usersInOU) except (GAPI.invalidInput, GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.backendError, GAPI.badRequest, GAPI.invalidCustomerId, GAPI.loginRequired, GAPI.resourceNotFound, GAPI.forbidden): checkEntityDNEorAccessErrorExit(cd, Ent.ORGANIZATIONAL_UNIT, ou) _incrEntityDoesNotExist(Ent.ORGANIZATIONAL_UNIT) elif entityType in {Cmd.ENTITY_QUERY, Cmd.ENTITY_QUERIES}: cd = buildGAPIObject(API.DIRECTORY) queries = convertEntityToList(entity, shlexSplit=True, nonListEntityType=entityType == Cmd.ENTITY_QUERY) prevLen = 0 for query in queries: printGettingAllAccountEntities(Ent.USER, query) try: result = callGAPIpages(cd.users(), 'list', 'users', pageMessage=getPageMessage(), throwReasons=[GAPI.INVALID_ORGUNIT, GAPI.ORGUNIT_NOT_FOUND, GAPI.INVALID_INPUT, GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], customer=GC.Values[GC.CUSTOMER_ID], query=query, orderBy='email', fields='nextPageToken,users(primaryEmail,suspended,archived)', maxResults=GC.Values[GC.USER_MAX_RESULTS]) except (GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.invalidInput): Cmd.Backup() usageErrorExit(Msg.INVALID_QUERY) except (GAPI.badRequest, GAPI.resourceNotFound, GAPI.forbidden): accessErrorExit(cd) for user in result: email = user['primaryEmail'] if ((isSuspended is None or isSuspended == user['suspended']) and (isArchived is None or isArchived == user['archived']) and email not in entitySet): entitySet.add(email) entityList.append(email) totalLen = len(entityList) printGotAccountEntities(totalLen-prevLen) prevLen = totalLen elif entityType == Cmd.ENTITY_LICENSES: skusList = [] for item in entity.split(','): productId, sku = SKU.getProductAndSKU(item) if not productId: _incrEntityDoesNotExist(Ent.SKU) elif (productId, sku) not in skusList: skusList.append((productId, sku)) if skusList: entityList = doPrintLicenses(returnFields=['userId'], skus=skusList) elif entityType in {Cmd.ENTITY_COURSEPARTICIPANTS, Cmd.ENTITY_TEACHERS, Cmd.ENTITY_STUDENTS}: croom = buildGAPIObject(API.CLASSROOM) if not noListConversion: courseIdList = convertEntityToList(entity) else: courseIdList = [entity] _, _, coursesInfo = _getCoursesOwnerInfo(croom, courseIdList, GC.Values[GC.USE_COURSE_OWNER_ACCESS]) for courseId, courseInfo in coursesInfo.items(): try: if entityType in {Cmd.ENTITY_COURSEPARTICIPANTS, Cmd.ENTITY_TEACHERS}: printGettingAllEntityItemsForWhom(Ent.TEACHER, removeCourseIdScope(courseId), entityType=Ent.COURSE) result = callGAPIpages(courseInfo['croom'].courses().teachers(), 'list', 'teachers', pageMessage=getPageMessageForWhom(), throwReasons=[GAPI.NOT_FOUND, GAPI.FORBIDDEN, GAPI.BAD_REQUEST, GAPI.SERVICE_NOT_AVAILABLE], retryReasons=GAPI.SERVICE_NOT_AVAILABLE_RETRY_REASONS, courseId=courseId, fields='nextPageToken,teachers/profile/emailAddress', pageSize=GC.Values[GC.CLASSROOM_MAX_RESULTS]) for teacher in result: email = teacher['profile'].get('emailAddress', None) if email and (email not in entitySet): entitySet.add(email) entityList.append(email) if entityType in {Cmd.ENTITY_COURSEPARTICIPANTS, Cmd.ENTITY_STUDENTS}: printGettingAllEntityItemsForWhom(Ent.STUDENT, removeCourseIdScope(courseId), entityType=Ent.COURSE) result = callGAPIpages(courseInfo['croom'].courses().students(), 'list', 'students', pageMessage=getPageMessageForWhom(), throwReasons=[GAPI.NOT_FOUND, GAPI.FORBIDDEN, GAPI.BAD_REQUEST, GAPI.SERVICE_NOT_AVAILABLE], retryReasons=GAPI.SERVICE_NOT_AVAILABLE_RETRY_REASONS, courseId=courseId, fields='nextPageToken,students/profile/emailAddress', pageSize=GC.Values[GC.CLASSROOM_MAX_RESULTS]) for student in result: email = student['profile'].get('emailAddress', None) if email and (email not in entitySet): entitySet.add(email) entityList.append(email) except GAPI.notFound: entityDoesNotExistWarning(Ent.COURSE, removeCourseIdScope(courseId)) _incrEntityDoesNotExist(Ent.COURSE) except GAPI.serviceNotAvailable as e: entityActionNotPerformedWarning([Ent.COURSE, removeCourseIdScope(courseId)], str(e)) GM.Globals[GM.CLASSROOM_SERVICE_NOT_AVAILABLE] = True break except (GAPI.forbidden, GAPI.badRequest): ClientAPIAccessDeniedExit() elif entityType == Cmd.ENTITY_CROS: buildGAPIObject(API.DIRECTORY) result = convertEntityToList(entity) for deviceId in result: if deviceId not in entitySet: entitySet.add(deviceId) entityList.append(deviceId) elif entityType == Cmd.ENTITY_ALL_CROS: cd = buildGAPIObject(API.DIRECTORY) printGettingAllAccountEntities(Ent.CROS_DEVICE) try: result = callGAPIpages(cd.chromeosdevices(), 'list', 'chromeosdevices', pageMessage=getPageMessage(), throwReasons=[GAPI.INVALID_INPUT, GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], retryReasons=GAPI.SERVICE_NOT_AVAILABLE_RETRY_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], fields='nextPageToken,chromeosdevices(deviceId)', maxResults=GC.Values[GC.DEVICE_MAX_RESULTS]) except (GAPI.invalidInput, GAPI.badRequest, GAPI.resourceNotFound, GAPI.forbidden): accessErrorExit(cd) entityList = [device['deviceId'] for device in result] elif entityType in {Cmd.ENTITY_CROS_QUERY, Cmd.ENTITY_CROS_QUERIES, Cmd.ENTITY_CROS_SN}: cd = buildGAPIObject(API.DIRECTORY) queries = convertEntityToList(entity, shlexSplit=entityType == Cmd.ENTITY_CROS_QUERIES, nonListEntityType=entityType == Cmd.ENTITY_CROS_QUERY) if entityType == Cmd.ENTITY_CROS_SN: queries = [f'id:{query}' for query in queries] prevLen = 0 for query in queries: printGettingAllAccountEntities(Ent.CROS_DEVICE, query) try: result = callGAPIpages(cd.chromeosdevices(), 'list', 'chromeosdevices', pageMessage=getPageMessage(), throwReasons=[GAPI.INVALID_INPUT, GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], retryReasons=GAPI.SERVICE_NOT_AVAILABLE_RETRY_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], query=query, fields='nextPageToken,chromeosdevices(deviceId)', maxResults=GC.Values[GC.DEVICE_MAX_RESULTS]) except GAPI.invalidInput: Cmd.Backup() usageErrorExit(Msg.INVALID_QUERY) except (GAPI.badRequest, GAPI.resourceNotFound, GAPI.forbidden): accessErrorExit(cd) for device in result: deviceId = device['deviceId'] if deviceId not in entitySet: entitySet.add(deviceId) entityList.append(deviceId) totalLen = len(entityList) printGotAccountEntities(totalLen-prevLen) prevLen = totalLen elif entityType in {Cmd.ENTITY_CROS_OU, Cmd.ENTITY_CROS_OU_AND_CHILDREN, Cmd.ENTITY_CROS_OUS, Cmd.ENTITY_CROS_OUS_AND_CHILDREN, Cmd.ENTITY_CROS_OU_QUERY, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERY, Cmd.ENTITY_CROS_OUS_QUERY, Cmd.ENTITY_CROS_OUS_AND_CHILDREN_QUERY, Cmd.ENTITY_CROS_OU_QUERIES, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERIES, Cmd.ENTITY_CROS_OUS_QUERIES, Cmd.ENTITY_CROS_OUS_AND_CHILDREN_QUERIES}: cd = buildGAPIObject(API.DIRECTORY) ous = convertEntityToList(entity, shlexSplit=True, nonListEntityType=entityType in {Cmd.ENTITY_CROS_OU, Cmd.ENTITY_CROS_OU_AND_CHILDREN, Cmd.ENTITY_CROS_OU_QUERY, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERY, Cmd.ENTITY_CROS_OU_QUERIES, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERIES}) numOus = len(ous) includeChildOrgunits = entityType in {Cmd.ENTITY_CROS_OU_AND_CHILDREN, Cmd.ENTITY_CROS_OUS_AND_CHILDREN, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERY, Cmd.ENTITY_CROS_OUS_AND_CHILDREN_QUERY, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERIES, Cmd.ENTITY_CROS_OUS_AND_CHILDREN_QUERIES} allQualifier = Msg.DIRECTLY_IN_THE.format(Ent.Choose(Ent.ORGANIZATIONAL_UNIT, numOus)) if not includeChildOrgunits else Msg.IN_THE.format(Ent.Choose(Ent.ORGANIZATIONAL_UNIT, numOus)) if entityType in {Cmd.ENTITY_CROS_OU_QUERY, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERY, Cmd.ENTITY_CROS_OUS_QUERY, Cmd.ENTITY_CROS_OUS_AND_CHILDREN_QUERY}: queries = getQueries('query') elif entityType in {Cmd.ENTITY_CROS_OU_QUERIES, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERIES, Cmd.ENTITY_CROS_OUS_QUERIES, Cmd.ENTITY_CROS_OUS_AND_CHILDREN_QUERIES}: queries = getQueries('queries') else: queries = [None] for ou in ous: ou = makeOrgUnitPathAbsolute(ou) oneQualifier = Msg.DIRECTLY_IN_THE.format(Ent.Singular(Ent.ORGANIZATIONAL_UNIT)) if not includeChildOrgunits else Msg.IN_THE.format(Ent.Singular(Ent.ORGANIZATIONAL_UNIT)) for query in queries: printGettingAllEntityItemsForWhom(Ent.CROS_DEVICE, ou, query=query, qualifier=oneQualifier, entityType=Ent.ORGANIZATIONAL_UNIT) try: result = callGAPIpages(cd.chromeosdevices(), 'list', 'chromeosdevices', pageMessage=getPageMessageForWhom(), throwReasons=[GAPI.INVALID_INPUT, GAPI.INVALID_ORGUNIT, GAPI.ORGUNIT_NOT_FOUND, GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], retryReasons=GAPI.SERVICE_NOT_AVAILABLE_RETRY_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], query=query, orgUnitPath=ou, includeChildOrgunits=includeChildOrgunits, fields='nextPageToken,chromeosdevices(deviceId)', maxResults=GC.Values[GC.DEVICE_MAX_RESULTS]) except GAPI.invalidInput: Cmd.Backup() usageErrorExit(Msg.INVALID_QUERY) except (GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.badRequest, GAPI.resourceNotFound, GAPI.forbidden): checkEntityDNEorAccessErrorExit(cd, Ent.ORGANIZATIONAL_UNIT, ou) _incrEntityDoesNotExist(Ent.ORGANIZATIONAL_UNIT) continue if query is None: entityList.extend([device['deviceId'] for device in result]) else: for device in result: deviceId = device['deviceId'] if deviceId not in entitySet: entitySet.add(deviceId) entityList.append(deviceId) Ent.SetGettingQualifier(Ent.CROS_DEVICE, allQualifier) Ent.SetGettingForWhom(','.join(ous)) printGotEntityItemsForWhom(len(entityList)) elif entityType == Cmd.ENTITY_BROWSER: result = convertEntityToList(entity) for deviceId in result: if deviceId not in entitySet: entitySet.add(deviceId) entityList.append(deviceId) elif entityType in {Cmd.ENTITY_BROWSER_OU, Cmd.ENTITY_BROWSER_OUS}: cbcm = buildGAPIObject(API.CBCM) customerId = _getCustomerIdNoC() ous = convertEntityToList(entity, shlexSplit=True, nonListEntityType=entityType == Cmd.ENTITY_BROWSER_OU) numOus = len(ous) allQualifier = Msg.DIRECTLY_IN_THE.format(Ent.Choose(Ent.ORGANIZATIONAL_UNIT, numOus)) oneQualifier = Msg.DIRECTLY_IN_THE.format(Ent.Singular(Ent.ORGANIZATIONAL_UNIT)) for ou in ous: ou = makeOrgUnitPathAbsolute(ou) printGettingAllEntityItemsForWhom(Ent.CHROME_BROWSER, ou, qualifier=oneQualifier, entityType=Ent.ORGANIZATIONAL_UNIT) try: result = callGAPIpages(cbcm.chromebrowsers(), 'list', 'browsers', pageMessage=getPageMessageForWhom(), throwReasons=[GAPI.BAD_REQUEST, GAPI.INVALID_ORGUNIT, GAPI.FORBIDDEN], retryReasons=GAPI.SERVICE_NOT_AVAILABLE_RETRY_REASONS, customer=customerId, orgUnitPath=ou, projection='BASIC', orderBy='id', sortOrder='ASCENDING', fields='nextPageToken,browsers(deviceId)') except (GAPI.badRequest, GAPI.invalidOrgunit, GAPI.forbidden): checkEntityDNEorAccessErrorExit(None, Ent.ORGANIZATIONAL_UNIT, ou) _incrEntityDoesNotExist(Ent.ORGANIZATIONAL_UNIT) continue entityList.extend([browser['deviceId'] for browser in result]) Ent.SetGettingQualifier(Ent.CHROME_BROWSER, allQualifier) Ent.SetGettingForWhom(','.join(ous)) printGotEntityItemsForWhom(len(entityList)) elif entityType in {Cmd.ENTITY_BROWSER_QUERY, Cmd.ENTITY_BROWSER_QUERIES}: cbcm = buildGAPIObject(API.CBCM) customerId = _getCustomerIdNoC() queries = convertEntityToList(entity, shlexSplit=entityType == Cmd.ENTITY_BROWSER_QUERIES, nonListEntityType=entityType == Cmd.ENTITY_BROWSER_QUERY) prevLen = 0 for query in queries: printGettingAllAccountEntities(Ent.CHROME_BROWSER, query) try: result = callGAPIpages(cbcm.chromebrowsers(), 'list', 'browsers', pageMessage=getPageMessage(), throwReasons=[GAPI.INVALID_INPUT, GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], retryReasons=GAPI.SERVICE_NOT_AVAILABLE_RETRY_REASONS, customer=customerId, query=query, projection='BASIC', orderBy='id', sortOrder='ASCENDING', fields='nextPageToken,browsers(deviceId)') except GAPI.invalidInput: Cmd.Backup() usageErrorExit(Msg.INVALID_QUERY) except (GAPI.badRequest, GAPI.resourceNotFound, GAPI.forbidden) as e: accessErrorExitNonDirectory(API.CBCM, str(e)) for device in result: deviceId = device['deviceId'] if deviceId not in entitySet: entitySet.add(deviceId) entityList.append(deviceId) totalLen = len(entityList) printGotAccountEntities(totalLen-prevLen) prevLen = totalLen else: systemErrorExit(UNKNOWN_ERROR_RC, 'getItemsToModify coding error') for errorType in [ENTITY_ERROR_DNE, ENTITY_ERROR_INVALID]: if entityError[errorType] > 0: Cmd.SetLocation(entityLocation-1) writeStderr(Cmd.CommandLineWithBadArgumentMarked(False)) count = entityError[errorType] if errorType == ENTITY_ERROR_DNE: stderrErrorMsg(Msg.BAD_ENTITIES_IN_SOURCE.format(count, Ent.Choose(entityError['entityType'], count), Msg.DO_NOT_EXIST if count != 1 else Msg.DOES_NOT_EXIST)) sys.exit(ENTITY_DOES_NOT_EXIST_RC) else: stderrErrorMsg(Msg.BAD_ENTITIES_IN_SOURCE.format(count, Msg.INVALID, Ent.Choose(entityError['entityType'], count))) sys.exit(INVALID_ENTITY_RC) return entityList def splitEntityList(entity, dataDelimiter): if not entity: return [] if not dataDelimiter: return [entity] return entity.split(dataDelimiter) def splitEntityListShlex(entity, dataDelimiter): if not entity: return (True, []) if not dataDelimiter: return (True, [entity]) return shlexSplitListStatus(entity, dataDelimiter) def fileDataErrorExit(filename, row, itemName, value, errMessage): if itemName: systemErrorExit(DATA_ERROR_RC, formatKeyValueList('', [Ent.Singular(Ent.FILE), filename, Ent.Singular(Ent.ROW), row, Ent.Singular(Ent.ITEM), itemName, Ent.Singular(Ent.VALUE), value, errMessage], '')) else: systemErrorExit(DATA_ERROR_RC, formatKeyValueList('', [Ent.Singular(Ent.FILE), filename, Ent.Singular(Ent.ROW), row, Ent.Singular(Ent.VALUE), value, errMessage], '')) # def getEntitiesFromFile(shlexSplit, returnSet=False): filename = getString(Cmd.OB_FILE_NAME) filenameLower = filename.lower() if filenameLower not in {'gcsv', 'gdoc', 'gcscsv', 'gcsdoc'}: encoding = getCharSet() f = openFile(filename, encoding=encoding, stripUTFBOM=True) elif filenameLower in {'gcsv', 'gdoc'}: f = getGDocData(filenameLower) getCharSet() else: #filenameLower in {'gcscsv', 'gcsdoc'}: f = getStorageFileData(filenameLower) getCharSet() dataDelimiter = getDelimiter() entitySet = set() entityList = [] i = 0 for row in f: i += 1 if shlexSplit: splitStatus, itemList = splitEntityListShlex(row.strip(), dataDelimiter) if not splitStatus: fileDataErrorExit(filename, i, None, row.strip(), f'{Msg.INVALID_LIST}: {itemList}') else: itemList = splitEntityList(row.strip(), dataDelimiter) for item in itemList: item = item.strip() if item and (item not in entitySet): entitySet.add(item) entityList.append(item) closeFile(f) return entityList if not returnSet else entitySet # def getEntitiesFromCSVFile(shlexSplit, returnSet=False): fileFieldName = getString(Cmd.OB_FILE_NAME_FIELD_NAME) if platform.system() == 'Windows' and not fileFieldName.startswith('-:'): drive, fileFieldName = os.path.splitdrive(fileFieldName) else: drive = '' if fileFieldName.find(':') == -1: Cmd.Backup() invalidArgumentExit(Cmd.OB_FILE_NAME_FIELD_NAME) fileFieldNameList = fileFieldName.split(':') filename = drive+fileFieldNameList[0] f, csvFile, fieldnames = openCSVFileReader(filename) for fieldName in fileFieldNameList[1:]: if fieldName not in fieldnames: csvFieldErrorExit(fieldName, fieldnames, backupArg=True, checkForCharset=True) matchFields, skipFields = getMatchSkipFields(fieldnames) dataDelimiter = getDelimiter() entitySet = set() entityList = [] i = 1 for row in csvFile: i += 1 if checkMatchSkipFields(row, None, matchFields, skipFields): for fieldName in fileFieldNameList[1:]: if shlexSplit: splitStatus, itemList = splitEntityListShlex(row[fieldName].strip(), dataDelimiter) if not splitStatus: fileDataErrorExit(filename, i, fieldName, row[fieldName].strip(), f'{Msg.INVALID_LIST}: {itemList}') else: itemList = splitEntityList(row[fieldName].strip(), dataDelimiter) for item in itemList: item = item.strip() if item and (item not in entitySet): entitySet.add(item) entityList.append(item) closeFile(f) return entityList if not returnSet else entitySet # # keyfield [keypattern ] [keyvalue ] [delimiter ] # subkeyfield [keypattern ] [keyvalue ] [delimiter ] # (matchfield|skipfield )* # [datafield (:)* [delimiter ]] def getEntitiesFromCSVbyField(): def getKeyFieldInfo(keyword, required, globalKeyField): if not checkArgumentPresent(keyword, required=required): GM.Globals[globalKeyField] = None return (None, None, None, None) keyField = GM.Globals[globalKeyField] = getString(Cmd.OB_FIELD_NAME) if keyField not in fieldnames: csvFieldErrorExit(keyField, fieldnames, backupArg=True) if checkArgumentPresent('keypattern'): keyPattern = getREPattern() else: keyPattern = None if checkArgumentPresent('keyvalue'): keyValue = getString(Cmd.OB_STRING) else: keyValue = keyField keyDelimiter = getDelimiter() return (keyField, keyPattern, keyValue, keyDelimiter) def getKeyList(row, keyField, keyPattern, keyValue, keyDelimiter, matchFields, skipFields): item = row[keyField].strip() if not item: return [] if not checkMatchSkipFields(row, None, matchFields, skipFields): return [] if keyPattern: keyList = [keyPattern.sub(keyValue, keyItem.strip()) for keyItem in splitEntityList(item, keyDelimiter)] else: keyList = [re.sub(keyField, keyItem.strip(), keyValue) for keyItem in splitEntityList(item, keyDelimiter)] return [key for key in keyList if key] filename = getString(Cmd.OB_FILE_NAME) f, csvFile, fieldnames = openCSVFileReader(filename) mainKeyField, mainKeyPattern, mainKeyValue, mainKeyDelimiter = getKeyFieldInfo('keyfield', True, GM.CSV_KEY_FIELD) subKeyField, subKeyPattern, subKeyValue, subKeyDelimiter = getKeyFieldInfo('subkeyfield', False, GM.CSV_SUBKEY_FIELD) matchFields, skipFields = getMatchSkipFields(fieldnames) if checkArgumentPresent('datafield'): if GM.Globals[GM.CSV_DATA_DICT]: csvDataAlreadySavedErrorExit() GM.Globals[GM.CSV_DATA_FIELD] = getString(Cmd.OB_FIELD_NAME, checkBlank=True) dataFields = GM.Globals[GM.CSV_DATA_FIELD].split(':') for dataField in dataFields: if dataField not in fieldnames: csvFieldErrorExit(dataField, fieldnames, backupArg=True) dataDelimiter = getDelimiter() else: GM.Globals[GM.CSV_DATA_FIELD] = None dataFields = [] dataDelimiter = None entitySet = set() entityList = [] csvDataKeys = {} GM.Globals[GM.CSV_DATA_DICT] = {} if not subKeyField: for row in csvFile: mainKeyList = getKeyList(row, mainKeyField, mainKeyPattern, mainKeyValue, mainKeyDelimiter, matchFields, skipFields) if not mainKeyList: continue for mainKey in mainKeyList: if mainKey not in entitySet: entitySet.add(mainKey) entityList.append(mainKey) if GM.Globals[GM.CSV_DATA_FIELD]: csvDataKeys[mainKey] = set() GM.Globals[GM.CSV_DATA_DICT][mainKey] = [] for dataField in dataFields: if dataField in row: dataList = splitEntityList(row[dataField].strip(), dataDelimiter) for dataValue in dataList: dataValue = dataValue.strip() if not dataValue: continue for mainKey in mainKeyList: if dataValue not in csvDataKeys[mainKey]: csvDataKeys[mainKey].add(dataValue) GM.Globals[GM.CSV_DATA_DICT][mainKey].append(dataValue) else: csvSubKeys = {} for row in csvFile: mainKeyList = getKeyList(row, mainKeyField, mainKeyPattern, mainKeyValue, mainKeyDelimiter, matchFields, skipFields) if not mainKeyList: continue for mainKey in mainKeyList: if mainKey not in entitySet: entitySet.add(mainKey) entityList.append(mainKey) csvSubKeys[mainKey] = set() csvDataKeys[mainKey] = {} GM.Globals[GM.CSV_DATA_DICT][mainKey] = {} subKeyList = getKeyList(row, subKeyField, subKeyPattern, subKeyValue, subKeyDelimiter, {}, {}) if not subKeyList: continue for mainKey in mainKeyList: for subKey in subKeyList: if subKey not in csvSubKeys[mainKey]: csvSubKeys[mainKey].add(subKey) if GM.Globals[GM.CSV_DATA_FIELD]: csvDataKeys[mainKey][subKey] = set() GM.Globals[GM.CSV_DATA_DICT][mainKey][subKey] = [] for dataField in dataFields: if dataField in row: dataList = splitEntityList(row[dataField].strip(), dataDelimiter) for dataValue in dataList: dataValue = dataValue.strip() if not dataValue: continue for mainKey in mainKeyList: for subKey in subKeyList: if dataValue not in csvDataKeys[mainKey][subKey]: csvDataKeys[mainKey][subKey].add(dataValue) GM.Globals[GM.CSV_DATA_DICT][mainKey][subKey].append(dataValue) closeFile(f) return entityList # Typically used to map courseparticipants to students or teachers def mapEntityType(entityType, typeMap): if (typeMap is not None) and (entityType in typeMap): return typeMap[entityType] return entityType def getEntityArgument(entityList): if entityList is None: return (0, 0, entityList) if isinstance(entityList, dict): clLoc = Cmd.Location() Cmd.SetLocation(GM.Globals[GM.ENTITY_CL_DELAY_START]) entityList = getItemsToModify(**entityList) Cmd.SetLocation(clLoc) return (0, len(entityList), entityList) def getEntityToModify(defaultEntityType=None, browserAllowed=False, crosAllowed=False, userAllowed=True, typeMap=None, isSuspended=None, isArchived=None, groupMemberType=Ent.TYPE_USER, delayGet=False): if GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY]: crosAllowed = False selectorChoices = Cmd.SERVICE_ACCOUNT_ONLY_ENTITY_SELECTORS[:] else: selectorChoices = Cmd.BASE_ENTITY_SELECTORS[:] if userAllowed: selectorChoices += Cmd.USER_ENTITY_SELECTORS+Cmd.USER_CSVDATA_ENTITY_SELECTORS if crosAllowed: selectorChoices += Cmd.CROS_ENTITY_SELECTORS+Cmd.CROS_CSVDATA_ENTITY_SELECTORS if browserAllowed: selectorChoices = Cmd.BROWSER_ENTITY_SELECTORS entitySelector = getChoice(selectorChoices, defaultChoice=None) if entitySelector: choices = [] if entitySelector == Cmd.ENTITY_SELECTOR_ALL: if userAllowed: choices += Cmd.USER_ENTITY_SELECTOR_ALL_SUBTYPES if crosAllowed: choices += Cmd.CROS_ENTITY_SELECTOR_ALL_SUBTYPES entityType = Cmd.ENTITY_SELECTOR_ALL_SUBTYPES_MAP[getChoice(choices)] if not delayGet: return (Cmd.ENTITY_USERS if entityType != Cmd.ENTITY_ALL_CROS else Cmd.ENTITY_CROS, getItemsToModify(entityType, None)) GM.Globals[GM.ENTITY_CL_DELAY_START] = Cmd.Location() buildGAPIObject(API.DIRECTORY) return (Cmd.ENTITY_USERS if entityType != Cmd.ENTITY_ALL_CROS else Cmd.ENTITY_CROS, {'entityType': entityType, 'entity': None}) if userAllowed: if entitySelector == Cmd.ENTITY_SELECTOR_FILE: return (Cmd.ENTITY_USERS, getItemsToModify(Cmd.ENTITY_USERS, getEntitiesFromFile(False))) if entitySelector in [Cmd.ENTITY_SELECTOR_CSV, Cmd.ENTITY_SELECTOR_CSVFILE]: return (Cmd.ENTITY_USERS, getItemsToModify(Cmd.ENTITY_USERS, getEntitiesFromCSVFile(False))) if crosAllowed: if entitySelector == Cmd.ENTITY_SELECTOR_CROSFILE: return (Cmd.ENTITY_CROS, getEntitiesFromFile(False)) if entitySelector in [Cmd.ENTITY_SELECTOR_CROSCSV, Cmd.ENTITY_SELECTOR_CROSCSVFILE]: return (Cmd.ENTITY_CROS, getEntitiesFromCSVFile(False)) if entitySelector == Cmd.ENTITY_SELECTOR_CROSFILE_SN: return (Cmd.ENTITY_CROS, getItemsToModify(Cmd.ENTITY_CROS_SN, getEntitiesFromFile(False))) if entitySelector in [Cmd.ENTITY_SELECTOR_CROSCSV_SN, Cmd.ENTITY_SELECTOR_CROSCSVFILE_SN]: return (Cmd.ENTITY_CROS, getItemsToModify(Cmd.ENTITY_CROS_SN, getEntitiesFromCSVFile(False))) if browserAllowed: if entitySelector == Cmd.ENTITY_SELECTOR_FILE: return (Cmd.ENTITY_BROWSER, getEntitiesFromFile(False)) if entitySelector in [Cmd.ENTITY_SELECTOR_CSV, Cmd.ENTITY_SELECTOR_CSVFILE]: return (Cmd.ENTITY_BROWSER, getEntitiesFromCSVFile(False)) if entitySelector == Cmd.ENTITY_SELECTOR_DATAFILE: if userAllowed: choices += Cmd.USER_ENTITY_SELECTOR_DATAFILE_CSVKMD_SUBTYPES if not GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY] else [Cmd.ENTITY_USERS] if crosAllowed: choices += Cmd.CROS_ENTITY_SELECTOR_DATAFILE_CSVKMD_SUBTYPES entityType = mapEntityType(getChoice(choices), typeMap) return (Cmd.ENTITY_USERS if entityType not in Cmd.CROS_ENTITY_SELECTOR_DATAFILE_CSVKMD_SUBTYPES else Cmd.ENTITY_CROS, getItemsToModify(entityType, getEntitiesFromFile(shlexSplit=entityType in [Cmd.ENTITY_OUS, Cmd.ENTITY_OUS_AND_CHILDREN, Cmd.ENTITY_OUS_NS, Cmd.ENTITY_OUS_AND_CHILDREN_NS, Cmd.ENTITY_OUS_SUSP, Cmd.ENTITY_OUS_AND_CHILDREN_SUSP, Cmd.ENTITY_CROS_OUS, Cmd.ENTITY_CROS_OUS_AND_CHILDREN]))) if entitySelector == Cmd.ENTITY_SELECTOR_CSVDATAFILE: if userAllowed: choices += Cmd.USER_ENTITY_SELECTOR_DATAFILE_CSVKMD_SUBTYPES if not GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY] else [Cmd.ENTITY_USERS] if crosAllowed: choices += Cmd.CROS_ENTITY_SELECTOR_DATAFILE_CSVKMD_SUBTYPES entityType = mapEntityType(getChoice(choices), typeMap) return (Cmd.ENTITY_USERS if entityType not in Cmd.CROS_ENTITY_SELECTOR_DATAFILE_CSVKMD_SUBTYPES else Cmd.ENTITY_CROS, getItemsToModify(entityType, getEntitiesFromCSVFile(shlexSplit=entityType in [Cmd.ENTITY_OUS, Cmd.ENTITY_OUS_AND_CHILDREN, Cmd.ENTITY_OUS_NS, Cmd.ENTITY_OUS_AND_CHILDREN_NS, Cmd.ENTITY_OUS_SUSP, Cmd.ENTITY_OUS_AND_CHILDREN_SUSP, Cmd.ENTITY_CROS_OUS, Cmd.ENTITY_CROS_OUS_AND_CHILDREN]))) if entitySelector == Cmd.ENTITY_SELECTOR_CSVKMD: if userAllowed: choices += Cmd.USER_ENTITY_SELECTOR_DATAFILE_CSVKMD_SUBTYPES if not GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY] else [Cmd.ENTITY_USERS] if crosAllowed: choices += Cmd.CROS_ENTITY_SELECTOR_DATAFILE_CSVKMD_SUBTYPES entityType = mapEntityType(getChoice(choices, choiceAliases=Cmd.ENTITY_ALIAS_MAP), typeMap) return (Cmd.ENTITY_USERS if entityType not in Cmd.CROS_ENTITY_SELECTOR_DATAFILE_CSVKMD_SUBTYPES else Cmd.ENTITY_CROS, getItemsToModify(entityType, getEntitiesFromCSVbyField())) if entitySelector in [Cmd.ENTITY_SELECTOR_CSVDATA, Cmd.ENTITY_SELECTOR_CROSCSVDATA]: checkDataField() return (Cmd.ENTITY_USERS if entitySelector == Cmd.ENTITY_SELECTOR_CSVDATA else Cmd.ENTITY_CROS, GM.Globals[GM.CSV_DATA_DICT]) entityChoices = [] if userAllowed: entityChoices += Cmd.USER_ENTITIES if not GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY] else [Cmd.ENTITY_USER, Cmd.ENTITY_USERS] if crosAllowed: entityChoices += Cmd.CROS_ENTITIES if browserAllowed: entityChoices += Cmd.BROWSER_ENTITIES entityType = mapEntityType(getChoice(entityChoices, choiceAliases=Cmd.ENTITY_ALIAS_MAP, defaultChoice=defaultEntityType), typeMap) if not entityType: invalidChoiceExit(Cmd.Current(), selectorChoices+entityChoices, False) if entityType not in Cmd.CROS_ENTITIES+Cmd.BROWSER_ENTITIES: entityClass = Cmd.ENTITY_USERS if entityType == Cmd.ENTITY_OAUTHUSER: return (entityClass, [_getAdminEmail()]) entityItem = getString(Cmd.OB_USER_ENTITY, minLen=0) elif entityType in Cmd.CROS_ENTITIES: entityClass = Cmd.ENTITY_CROS entityItem = getString(Cmd.OB_CROS_ENTITY, minLen=0) else: entityClass = Cmd.ENTITY_BROWSER entityItem = getString(Cmd.OB_BROWSER_ENTITY, minLen=0) if not delayGet: if entityClass == Cmd.ENTITY_USERS: return (entityClass, getItemsToModify(entityType, entityItem, isSuspended=isSuspended, isArchived=isArchived, groupMemberType=groupMemberType)) return (entityClass, getItemsToModify(entityType, entityItem)) GM.Globals[GM.ENTITY_CL_DELAY_START] = Cmd.Location() if not GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY]: buildGAPIObject(API.DIRECTORY) if entityClass == Cmd.ENTITY_USERS: if entityType in [Cmd.ENTITY_GROUP_USERS, Cmd.ENTITY_GROUP_USERS_NS, Cmd.ENTITY_GROUP_USERS_SUSP, Cmd.ENTITY_GROUP_USERS_SELECT, Cmd.ENTITY_CIGROUP_USERS]: # Skip over sub-arguments while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg in GROUP_ROLES_MAP or myarg in {'primarydomain', 'recursive', 'includederivedmembership'}: pass elif myarg == 'domains': Cmd.Advance() elif ((entityType == Cmd.ENTITY_GROUP_USERS_SELECT) and (myarg in SUSPENDED_ARGUMENTS) or (myarg in ARCHIVED_ARGUMENTS)): if myarg in {'issuspended', 'isarchived'}: if Cmd.PeekArgumentPresent(TRUE_VALUES) or Cmd.PeekArgumentPresent(FALSE_VALUES): Cmd.Advance() elif myarg == 'end': break else: Cmd.Backup() missingArgumentExit('end') return (entityClass, {'entityType': entityType, 'entity': entityItem, 'isSuspended': isSuspended, 'isArchived': isArchived, 'groupMemberType': groupMemberType}) if entityClass == Cmd.ENTITY_CROS: if entityType in {Cmd.ENTITY_CROS_OU_QUERY, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERY, Cmd.ENTITY_CROS_OUS_QUERY, Cmd.ENTITY_CROS_OUS_AND_CHILDREN_QUERY, Cmd.ENTITY_CROS_OU_QUERIES, Cmd.ENTITY_CROS_OU_AND_CHILDREN_QUERIES, Cmd.ENTITY_CROS_OUS_QUERIES, Cmd.ENTITY_CROS_OUS_AND_CHILDREN_QUERIES}: Cmd.Advance() return (entityClass, {'entityType': entityType, 'entity': entityItem}) def getEntitySelector(): return getChoice(Cmd.ENTITY_LIST_SELECTORS, defaultChoice=None) def getEntitySelection(entitySelector, shlexSplit): if entitySelector in [Cmd.ENTITY_SELECTOR_FILE]: return getEntitiesFromFile(shlexSplit) if entitySelector in [Cmd.ENTITY_SELECTOR_CSV, Cmd.ENTITY_SELECTOR_CSVFILE]: return getEntitiesFromCSVFile(shlexSplit) if entitySelector == Cmd.ENTITY_SELECTOR_CSVKMD: return getEntitiesFromCSVbyField() if entitySelector in [Cmd.ENTITY_SELECTOR_CSVSUBKEY]: checkSubkeyField() return GM.Globals[GM.CSV_DATA_DICT] if entitySelector in [Cmd.ENTITY_SELECTOR_CSVDATA]: checkDataField() return GM.Globals[GM.CSV_DATA_DICT] return [] def getEntityList(item, shlexSplit=False): entitySelector = getEntitySelector() if entitySelector: return getEntitySelection(entitySelector, shlexSplit) return convertEntityToList(getString(item, minLen=0), shlexSplit=shlexSplit) def getNormalizedEmailAddressEntity(shlexSplit=False, noUid=True, noLower=False): return [normalizeEmailAddressOrUID(emailAddress, noUid=noUid, noLower=noLower) for emailAddress in getEntityList(Cmd.OB_EMAIL_ADDRESS_ENTITY, shlexSplit)] def getUserObjectEntity(clObject, itemType, shlexSplit=False): entity = {'item': itemType, 'list': getEntityList(clObject, shlexSplit), 'dict': None} if isinstance(entity['list'], dict): entity['dict'] = entity['list'] return entity def _validateUserGetObjectList(user, i, count, entity, api=API.GMAIL, showAction=True): if entity['dict']: entityList = entity['dict'][user] else: entityList = entity['list'] user, svc = buildGAPIServiceObject(api, user, i, count) if not svc: return (user, None, [], 0) jcount = len(entityList) if showAction: entityPerformActionNumItems([Ent.USER, user], jcount, entity['item'], i, count) if jcount == 0: setSysExitRC(NO_ENTITIES_FOUND_RC) return (user, svc, entityList, jcount) def _validateUserGetMessageIds(user, i, count, entity): if entity: if entity['dict']: entityList = entity['dict'][user] else: entityList = entity['list'] else: entityList = [] user, gmail = buildGAPIServiceObject(API.GMAIL, user, i, count) if not gmail: return (user, None, None) return (user, gmail, entityList) def checkUserExists(cd, user, entityType=Ent.USER, i=0, count=0): user = normalizeEmailAddressOrUID(user) try: return callGAPI(cd.users(), 'get', throwReasons=GAPI.USER_GET_THROW_REASONS, userKey=user, fields='primaryEmail')['primaryEmail'] except (GAPI.userNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.backendError, GAPI.systemError): entityUnknownWarning(entityType, user, i, count) return None def checkUserSuspended(cd, user, entityType=Ent.USER, i=0, count=0): user = normalizeEmailAddressOrUID(user) try: return callGAPI(cd.users(), 'get', throwReasons=GAPI.USER_GET_THROW_REASONS, userKey=user, fields='suspended')['suspended'] except (GAPI.userNotFound, GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.badRequest, GAPI.backendError, GAPI.systemError): entityUnknownWarning(entityType, user, i, count) return None # Add attachements to an email message def _addAttachmentsToMessage(message, attachments): for attachment in attachments: try: attachFilename = attachment[0] attachContentType, attachEncoding = mimetypes.guess_type(attachFilename) if attachContentType is None or attachEncoding is not None: attachContentType = 'application/octet-stream' main_type, sub_type = attachContentType.split('/', 1) if main_type == 'text': msg = MIMEText(readFile(attachFilename, 'r', attachment[1]), _subtype=sub_type, _charset=UTF8) elif main_type == 'image': msg = MIMEImage(readFile(attachFilename, 'rb'), _subtype=sub_type) elif main_type == 'audio': msg = MIMEAudio(readFile(attachFilename, 'rb'), _subtype=sub_type) elif main_type == 'application': msg = MIMEApplication(readFile(attachFilename, 'rb'), _subtype=sub_type) else: msg = MIMEBase(main_type, sub_type) msg.set_payload(readFile(attachFilename, 'rb')) msg.add_header('Content-Disposition', 'attachment', filename=os.path.basename(attachFilename)) message.attach(msg) except (IOError, UnicodeDecodeError) as e: usageErrorExit(f'{attachFilename}: {str(e)}') # Add embedded images to an email message def _addEmbeddedImagesToMessage(message, embeddedImages): for embeddedImage in embeddedImages: try: imageFilename = embeddedImage[0] imageContentType, imageEncoding = mimetypes.guess_type(imageFilename) if imageContentType is None or imageEncoding is not None: imageContentType = 'application/octet-stream' main_type, sub_type = imageContentType.split('/', 1) if main_type == 'image': msg = MIMEImage(readFile(imageFilename, 'rb'), _subtype=sub_type) else: msg = MIMEBase(main_type, sub_type) msg.set_payload(readFile(imageFilename, 'rb')) msg.add_header('Content-Disposition', 'attachment', filename=os.path.basename(imageFilename)) msg.add_header('Content-ID', f'<{embeddedImage[1]}>') message.attach(msg) except (IOError, UnicodeDecodeError) as e: usageErrorExit(f'{imageFilename}: {str(e)}') NAME_EMAIL_ADDRESS_PATTERN = re.compile(r'^.*<(.+)>$') # Send an email def send_email(msgSubject, msgBody, msgTo, i=0, count=0, clientAccess=False, msgFrom=None, msgReplyTo=None, html=False, charset=UTF8, attachments=None, embeddedImages=None, msgHeaders=None, ccRecipients=None, bccRecipients=None, mailBox=None): def checkResult(entityType, recipients): if not recipients: return toSent = set(recipients.split(',')) toFailed = {} for addr, err in iter(result.items()): if addr in toSent: toSent.remove(addr) toFailed[addr] = f'{err[0]}: {err[1]}' if toSent: entityActionPerformed([entityType, ','.join(toSent), Ent.MESSAGE, msgSubject], i, count) for addr, errMsg in iter(toFailed.items()): entityActionFailedWarning([entityType, addr, Ent.MESSAGE, msgSubject], errMsg, i, count) def cleanAddr(emailAddr): match = NAME_EMAIL_ADDRESS_PATTERN.match(emailAddr) if match: return match.group(1) return emailAddr if msgFrom is None: msgFrom = _getAdminEmail() # Force ASCII for RFC compliance # xmlcharref seems to work to display at least # some unicode in HTML body and is ignored in # plain text body. # msgBody = msgBody.encode('ascii', 'xmlcharrefreplace').decode(UTF8) if not attachments and not embeddedImages: message = MIMEText(msgBody, ['plain', 'html'][html], charset) else: message = MIMEMultipart() msg = MIMEText(msgBody, ['plain', 'html'][html], charset) message.attach(msg) if attachments: _addAttachmentsToMessage(message, attachments) if embeddedImages: _addEmbeddedImagesToMessage(message, embeddedImages) message['Subject'] = msgSubject message['From'] = msgFrom if msgReplyTo is not None: message['Reply-To'] = msgReplyTo if ccRecipients: message['Cc'] = ccRecipients.lower() if bccRecipients: message['Bcc'] = bccRecipients.lower() if msgHeaders: for header, value in iter(msgHeaders.items()): if header not in {'Subject', 'From', 'To', 'Reply-To', 'Cc', 'Bcc'}: message[header] = value if mailBox is None: mailBox = msgFrom mailBoxAddr = normalizeEmailAddressOrUID(cleanAddr(mailBox), noUid=True, noLower=True) action = Act.Get() Act.Set(Act.SENDEMAIL) if not GC.Values[GC.SMTP_HOST]: if not clientAccess: userId, gmail = buildGAPIServiceObject(API.GMAIL, mailBoxAddr) if not gmail: return else: userId = mailBoxAddr gmail = buildGAPIObject(API.GMAIL) message['To'] = msgTo if msgTo else userId try: result = callGAPI(gmail.users().messages(), 'send', throwReasons=[GAPI.SERVICE_NOT_AVAILABLE, GAPI.AUTH_ERROR, GAPI.DOMAIN_POLICY, GAPI.INVALID, GAPI.INVALID_ARGUMENT, GAPI.FORBIDDEN, GAPI.PERMISSION_DENIED], userId=userId, body={'raw': base64.urlsafe_b64encode(message.as_bytes()).decode()}, fields='id') entityActionPerformedMessage([Ent.RECIPIENT, msgTo, Ent.MESSAGE, msgSubject], f"{result['id']}", i, count) except (GAPI.serviceNotAvailable, GAPI.authError, GAPI.domainPolicy, GAPI.invalid, GAPI.invalidArgument, GAPI.forbidden, GAPI.permissionDenied) as e: entityActionFailedWarning([Ent.RECIPIENT, msgTo, Ent.MESSAGE, msgSubject], str(e), i, count) else: message['To'] = msgTo if msgTo else mailBoxAddr server = None try: server = smtplib.SMTP(GC.Values[GC.SMTP_HOST], 587, GC.Values[GC.SMTP_FQDN]) if GC.Values[GC.DEBUG_LEVEL] > 0: server.set_debuglevel(1) server.starttls(context=ssl.create_default_context(cafile=GC.Values[GC.CACERTS_PEM])) if GC.Values[GC.SMTP_USERNAME] and GC.Values[GC.SMTP_PASSWORD]: if isinstance(GC.Values[GC.SMTP_PASSWORD], bytes): server.login(GC.Values[GC.SMTP_USERNAME], base64.b64decode(GC.Values[GC.SMTP_PASSWORD]).decode(UTF8)) else: server.login(GC.Values[GC.SMTP_USERNAME], GC.Values[GC.SMTP_PASSWORD]) result = server.send_message(message) checkResult(Ent.RECIPIENT, message['To']) checkResult(Ent.RECIPIENT_CC, ccRecipients) checkResult(Ent.RECIPIENT_BCC, bccRecipients) except smtplib.SMTPException as e: entityActionFailedWarning([Ent.RECIPIENT, msgTo, Ent.MESSAGE, msgSubject], str(e), i, count) if server: try: server.quit() except Exception: pass Act.Set(action) def addFieldToFieldsList(fieldName, fieldsChoiceMap, fieldsList): fields = fieldsChoiceMap[fieldName.lower()] if isinstance(fields, list): fieldsList.extend(fields) else: fieldsList.append(fields) def _getFieldsList(): return getString(Cmd.OB_FIELD_NAME_LIST).lower().replace('_', '').replace(',', ' ').split() def _getRawFields(requiredField=None): rawFields = getString(Cmd.OB_FIELDS) if requiredField is None or requiredField in rawFields: return rawFields return f'{requiredField},{rawFields}' def CheckInputRowFilterHeaders(titlesList, rowFilter, rowDropFilter): status = True for filterVal in rowFilter: columns = [t for t in titlesList if filterVal[0].match(t)] if not columns: stderrErrorMsg(Msg.COLUMN_DOES_NOT_MATCH_ANY_INPUT_COLUMNS.format(GC.CSV_INPUT_ROW_FILTER, filterVal[0].pattern)) status = False for filterVal in rowDropFilter: columns = [t for t in titlesList if filterVal[0].match(t)] if not columns: stderrErrorMsg(Msg.COLUMN_DOES_NOT_MATCH_ANY_INPUT_COLUMNS.format(GC.CSV_INPUT_ROW_DROP_FILTER, filterVal[0].pattern)) status = False if not status: sys.exit(USAGE_ERROR_RC) def RowFilterMatch(row, titlesList, rowFilter, rowFilterModeAll, rowDropFilter, rowDropFilterModeAll): def rowRegexFilterMatch(filterPattern): if anyMatch: for column in columns: if filterPattern.search(str(row.get(column, ''))): return True return False for column in columns: if not filterPattern.search(str(row.get(column, ''))): return False return True def rowNotRegexFilterMatch(filterPattern): if anyMatch: for column in columns: if filterPattern.search(str(row.get(column, ''))): return False return True for column in columns: if not filterPattern.search(str(row.get(column, ''))): return True return False def stripTimeFromDateTime(rowDate): if YYYYMMDD_PATTERN.match(rowDate): try: rowTime = datetime.datetime.strptime(rowDate, YYYYMMDD_FORMAT) except ValueError: return None else: try: rowTime, _ = iso8601.parse_date(rowDate) except (iso8601.ParseError, OverflowError): return None return ISOformatTimeStamp(datetime.datetime(rowTime.year, rowTime.month, rowTime.day, tzinfo=iso8601.UTC)) def rowDateTimeFilterMatch(dateMode, op, filterDate): def checkMatch(rowDate): if not rowDate or not isinstance(rowDate, str): return False if rowDate == GC.Values[GC.NEVER_TIME]: rowDate = NEVER_TIME if dateMode: rowDate = stripTimeFromDateTime(rowDate) if not rowDate: return False if op == '<': return rowDate < filterDate if op == '<=': return rowDate <= filterDate if op == '>': return rowDate > filterDate if op == '>=': return rowDate >= filterDate if op == '!=': return rowDate != filterDate return rowDate == filterDate if anyMatch: for column in columns: if checkMatch(row.get(column, '')): return True return False for column in columns: if not checkMatch(row.get(column, '')): return False return True def rowDateTimeRangeFilterMatch(dateMode, op, filterDateL, filterDateR): def checkMatch(rowDate): if not rowDate or not isinstance(rowDate, str): return False if rowDate == GC.Values[GC.NEVER_TIME]: rowDate = NEVER_TIME if dateMode: rowDate = stripTimeFromDateTime(rowDate) if not rowDate: return False if op == '!=': return not filterDateL <= rowDate <= filterDateR return filterDateL <= rowDate <= filterDateR if anyMatch: for column in columns: if checkMatch(row.get(column, '')): return True return False for column in columns: if not checkMatch(row.get(column, '')): return False return True def getHourMinuteFromDateTime(rowDate): if YYYYMMDD_PATTERN.match(rowDate): return None try: rowTime, _ = iso8601.parse_date(rowDate) except (iso8601.ParseError, OverflowError): return None return f'{rowTime.hour:02d}:{rowTime.minute:02d}' def rowTimeOfDayRangeFilterMatch(op, startHourMinute, endHourMinute): def checkMatch(rowDate): if not rowDate or not isinstance(rowDate, str) or rowDate == GC.Values[GC.NEVER_TIME]: return False rowHourMinute = getHourMinuteFromDateTime(rowDate) if not rowHourMinute: return False if op == '!=': return not startHourMinute <= rowHourMinute <= endHourMinute return startHourMinute <= rowHourMinute <= endHourMinute if anyMatch: for column in columns: if checkMatch(row.get(column, '')): return True return False for column in columns: if not checkMatch(row.get(column, '')): return False return True def rowCountFilterMatch(op, filterCount): def checkMatch(rowCount): if isinstance(rowCount, str): ##### Blank = 0 if not rowCount: rowCount = '0' elif not rowCount.isdigit(): return False rowCount = int(rowCount) elif not isinstance(rowCount, int): return False if op == '<': return rowCount < filterCount if op == '<=': return rowCount <= filterCount if op == '>': return rowCount > filterCount if op == '>=': return rowCount >= filterCount if op == '!=': return rowCount != filterCount return rowCount == filterCount if anyMatch: for column in columns: if checkMatch(row.get(column, 0)): return True return False for column in columns: if not checkMatch(row.get(column, 0)): return False return True def rowCountRangeFilterMatch(op, filterCountL, filterCountR): def checkMatch(rowCount): if isinstance(rowCount, str): if not rowCount.isdigit(): return False rowCount = int(rowCount) elif not isinstance(rowCount, int): return False if op == '!=': return not filterCountL <= rowCount <= filterCountR return filterCountL <= rowCount <= filterCountR if anyMatch: for column in columns: if checkMatch(row.get(column, 0)): return True return False for column in columns: if not checkMatch(row.get(column, 0)): return False return True def rowLengthFilterMatch(op, filterLength): def checkMatch(rowString): if not isinstance(rowString, str): return False rowLength = len(rowString) if op == '<': return rowLength < filterLength if op == '<=': return rowLength <= filterLength if op == '>': return rowLength > filterLength if op == '>=': return rowLength >= filterLength if op == '!=': return rowLength != filterLength return rowLength == filterLength if anyMatch: for column in columns: if checkMatch(row.get(column, '')): return True return False for column in columns: if not checkMatch(row.get(column, '')): return False return True def rowLengthRangeFilterMatch(op, filterLengthL, filterLengthR): def checkMatch(rowString): if not isinstance(rowString, str): return False rowLength = len(rowString) if op == '!=': return not filterLengthL <= rowLength <= filterLengthR return filterLengthL <= rowLength <= filterLengthR if anyMatch: for column in columns: if checkMatch(row.get(column, '')): return True return False for column in columns: if not checkMatch(row.get(column, '')): return False return True def rowBooleanFilterMatch(filterBoolean): def checkMatch(rowBoolean): if isinstance(rowBoolean, bool): return rowBoolean == filterBoolean if isinstance(rowBoolean, str): if rowBoolean.lower() in TRUE_FALSE: return rowBoolean.capitalize() == str(filterBoolean) ##### Blank = False if not rowBoolean: return not filterBoolean return False if anyMatch: for column in columns: if checkMatch(row.get(column, False)): return True return False for column in columns: if not checkMatch(row.get(column, False)): return False return True def rowDataFilterMatch(filterData): if anyMatch: for column in columns: if str(row.get(column, '')) in filterData: return True return False for column in columns: if not str(row.get(column, '')) in filterData: return False return True def rowNotDataFilterMatch(filterData): if anyMatch: for column in columns: if str(row.get(column, '')) in filterData: return False return True for column in columns: if not str(row.get(column, '')) in filterData: return True return False def rowTextFilterMatch(op, filterText): def checkMatch(rowText): if not isinstance(rowText, str): rowText = str(rowText) if op == '<': return rowText < filterText if op == '<=': return rowText <= filterText if op == '>': return rowText > filterText if op == '>=': return rowText >= filterText if op == '!=': return rowText != filterText return rowText == filterText if anyMatch: for column in columns: if checkMatch(row.get(column, '')): return True return False for column in columns: if not checkMatch(row.get(column, '')): return False return True def rowTextRangeFilterMatch(op, filterTextL, filterTextR): def checkMatch(rowText): if not isinstance(rowText, str): rowText = str(rowText) if op == '!=': return not filterTextL <= rowText <= filterTextR return filterTextL <= rowText <= filterTextR if anyMatch: for column in columns: if checkMatch(row.get(column, '')): return True return False for column in columns: if not checkMatch(row.get(column, '')): return False return True def filterMatch(filterVal): if filterVal[2] == 'regex': if rowRegexFilterMatch(filterVal[3]): return True elif filterVal[2] == 'notregex': if rowNotRegexFilterMatch(filterVal[3]): return True elif filterVal[2] in {'date', 'time'}: if rowDateTimeFilterMatch(filterVal[2] == 'date', filterVal[3], filterVal[4]): return True elif filterVal[2] in {'daterange', 'timerange'}: if rowDateTimeRangeFilterMatch(filterVal[2] == 'date', filterVal[3], filterVal[4], filterVal[5]): return True elif filterVal[2] == 'timeofdayrange': if rowTimeOfDayRangeFilterMatch(filterVal[3], filterVal[4], filterVal[5]): return True elif filterVal[2] == 'count': if rowCountFilterMatch(filterVal[3], filterVal[4]): return True elif filterVal[2] == 'countrange': if rowCountRangeFilterMatch(filterVal[3], filterVal[4], filterVal[5]): return True elif filterVal[2] == 'length': if rowLengthFilterMatch(filterVal[3], filterVal[4]): return True elif filterVal[2] == 'lengthrange': if rowLengthRangeFilterMatch(filterVal[3], filterVal[4], filterVal[5]): return True elif filterVal[2] == 'boolean': if rowBooleanFilterMatch(filterVal[3]): return True elif filterVal[2] == 'data': if rowDataFilterMatch(filterVal[3]): return True elif filterVal[2] == 'notdata': if rowNotDataFilterMatch(filterVal[3]): return True elif filterVal[2] == 'text': if rowTextFilterMatch(filterVal[3], filterVal[4]): return True elif filterVal[2] == 'textrange': if rowTextRangeFilterMatch(filterVal[3], filterVal[4], filterVal[5]): return True return False if rowFilter: anyMatches = False for filterVal in rowFilter: columns = [t for t in titlesList if filterVal[0].match(t)] if not columns: columns = [None] anyMatch = filterVal[1] if filterMatch(filterVal): if not rowFilterModeAll: # Any - any match selects anyMatches = True break else: if rowFilterModeAll: # All - any match failure doesn't select return False if not rowFilterModeAll and not anyMatches: # Any - no matches doesn't select return False if rowDropFilter: allMatches = True for filterVal in rowDropFilter: columns = [t for t in titlesList if filterVal[0].match(t)] if not columns: columns = [None] anyMatch = filterVal[1] if filterMatch(filterVal): if not rowDropFilterModeAll: # Any - any match drops return False else: if rowDropFilterModeAll: # All - any match failure doesn't drop allMatches = False break if rowDropFilterModeAll and allMatches: # All - all matches drops return False return True # myarg is command line argument # fieldChoiceMap maps myarg to API field names #FIELD_CHOICE_MAP = { # 'foo': 'foo', # 'foobar': 'fooBar', # } # fieldsList is the list of API fields def getFieldsList(myarg, fieldsChoiceMap, fieldsList, initialField=None, fieldsArg='fields', onlyFieldsArg=False): def addInitialField(): if isinstance(initialField, list): fieldsList.extend(initialField) else: fieldsList.append(initialField) def addMappedFields(mappedFields): if isinstance(mappedFields, list): fieldsList.extend(mappedFields) else: fieldsList.append(mappedFields) if not onlyFieldsArg and myarg in fieldsChoiceMap: if not fieldsList and initialField is not None: addInitialField() addMappedFields(fieldsChoiceMap[myarg]) elif myarg == fieldsArg: if not fieldsList and initialField is not None: addInitialField() for field in _getFieldsList(): if field in fieldsChoiceMap: addMappedFields(fieldsChoiceMap[field]) else: invalidChoiceExit(field, fieldsChoiceMap, True) else: return False return True def getFieldsFromFieldsList(fieldsList): if fieldsList: return ','.join(set(fieldsList)).replace('.', '/') return None def getItemFieldsFromFieldsList(item, fieldsList, returnItemIfNoneList=False): if fieldsList: return f'nextPageToken,{item}({",".join(set(fieldsList))})'.replace('.', '/') if not returnItemIfNoneList: return None return f'nextPageToken,{item}' class CSVPrintFile(): def __init__(self, titles=None, sortTitles=None, indexedTitles=None): self.rows = [] self.rowCount = 0 self.outputTranspose = GM.Globals[GM.CSV_OUTPUT_TRANSPOSE] self.todrive = GM.Globals[GM.CSV_TODRIVE] self.titlesSet = set() self.titlesList = [] self.JSONtitlesSet = set() self.JSONtitlesList = [] self.sortHeaders = [] self.SetHeaderForce(GC.Values[GC.CSV_OUTPUT_HEADER_FORCE]) if not self.headerForce and titles is not None: self.SetTitles(titles) self.SetJSONTitles(titles) self.SetHeaderOrder(GC.Values[GC.CSV_OUTPUT_HEADER_ORDER]) if GM.Globals.get(GM.CSV_OUTPUT_COLUMN_DELIMITER) is None: GM.Globals[GM.CSV_OUTPUT_COLUMN_DELIMITER] = GC.Values.get(GC.CSV_OUTPUT_COLUMN_DELIMITER, ',') self.SetColumnDelimiter(GM.Globals[GM.CSV_OUTPUT_COLUMN_DELIMITER]) if GM.Globals.get(GM.CSV_OUTPUT_QUOTE_CHAR) is None: GM.Globals[GM.CSV_OUTPUT_QUOTE_CHAR] = GC.Values.get(GC.CSV_OUTPUT_QUOTE_CHAR, '"') if GM.Globals.get(GM.CSV_OUTPUT_NO_ESCAPE_CHAR) is None: GM.Globals[GM.CSV_OUTPUT_NO_ESCAPE_CHAR] = GC.Values.get(GC.CSV_OUTPUT_NO_ESCAPE_CHAR, False) self.SetNoEscapeChar(GM.Globals[GM.CSV_OUTPUT_NO_ESCAPE_CHAR]) self.SetQuoteChar(GM.Globals[GM.CSV_OUTPUT_QUOTE_CHAR]) # if GM.Globals.get(GM.CSV_OUTPUT_SORT_HEADERS) is None: if not GM.Globals.get(GM.CSV_OUTPUT_SORT_HEADERS): GM.Globals[GM.CSV_OUTPUT_SORT_HEADERS] = GC.Values.get(GC.CSV_OUTPUT_SORT_HEADERS, []) self.SetSortHeaders(GM.Globals[GM.CSV_OUTPUT_SORT_HEADERS]) # if GM.Globals.get(GM.CSV_OUTPUT_TIMESTAMP_COLUMN) is None: if not GM.Globals.get(GM.CSV_OUTPUT_TIMESTAMP_COLUMN): GM.Globals[GM.CSV_OUTPUT_TIMESTAMP_COLUMN] = GC.Values.get(GC.CSV_OUTPUT_TIMESTAMP_COLUMN, '') self.SetTimestampColumn(GM.Globals[GM.CSV_OUTPUT_TIMESTAMP_COLUMN]) self.SetFormatJSON(False) self.SetMapDrive3Titles(False) self.SetNodataFields(False, None, None, None, False) self.SetFixPaths(False) self.SetShowPermissionsLast(False) self.sortTitlesSet = set() self.sortTitlesList = [] if sortTitles is not None: if not isinstance(sortTitles, str) or sortTitles != 'sortall': self.SetSortTitles(sortTitles) else: self.SetSortAllTitles() self.SetIndexedTitles(indexedTitles if indexedTitles is not None else []) self.SetHeaderFilter(GC.Values[GC.CSV_OUTPUT_HEADER_FILTER]) self.SetHeaderDropFilter(GC.Values[GC.CSV_OUTPUT_HEADER_DROP_FILTER]) self.SetRowFilter(GC.Values[GC.CSV_OUTPUT_ROW_FILTER], GC.Values[GC.CSV_OUTPUT_ROW_FILTER_MODE]) self.SetRowDropFilter(GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER], GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER_MODE]) self.SetRowLimit(GC.Values[GC.CSV_OUTPUT_ROW_LIMIT]) self.SetZeroBlankMimeTypeCounts(False) def AddTitle(self, title): self.titlesSet.add(title) self.titlesList.append(title) def AddTitles(self, titles): for title in titles if isinstance(titles, list) else [titles]: if title not in self.titlesSet: self.AddTitle(title) def SetTitles(self, titles): self.titlesSet = set() self.titlesList = [] self.AddTitles(titles) def RemoveTitles(self, titles): for title in titles if isinstance(titles, list) else [titles]: if title in self.titlesSet: self.titlesSet.remove(title) self.titlesList.remove(title) def MoveTitlesToEnd(self, titles): self.RemoveTitles(titles) self.AddTitles(titles) def MapTitles(self, ov, nv): if ov in self.titlesSet: self.titlesSet.remove(ov) self.titlesSet.add(nv) for i, v in enumerate(self.titlesList): if v == ov: self.titlesList[i] = nv break def AddSortTitle(self, title): self.sortTitlesSet.add(title) self.sortTitlesList.append(title) def AddSortTitles(self, titles): for title in titles if isinstance(titles, list) else [titles]: if title not in self.sortTitlesSet: self.AddSortTitle(title) def SetSortTitles(self, titles): self.sortTitlesSet = set() self.sortTitlesList = [] self.AddSortTitles(titles) def SetSortAllTitles(self): self.sortTitlesList = self.titlesList[:] self.sortTitlesSet = set(self.sortTitlesList) def SetMapDrive3Titles(self, mapDrive3Titles): self.mapDrive3Titles = mapDrive3Titles def MapDrive3TitlesToDrive2(self): _mapDrive3TitlesToDrive2(self.titlesList, API.DRIVE3_TO_DRIVE2_FILES_FIELDS_MAP) self.titlesSet = set(self.titlesList) def AddJSONTitle(self, title): self.JSONtitlesSet.add(title) self.JSONtitlesList.append(title) def AddJSONTitles(self, titles): for title in titles if isinstance(titles, list) else [titles]: if title not in self.JSONtitlesSet: self.AddJSONTitle(title) def RemoveJSONTitles(self, titles): for title in titles if isinstance(titles, list) else [titles]: if title in self.JSONtitlesSet: self.JSONtitlesSet.remove(title) self.JSONtitlesList.remove(title) def MoveJSONTitlesToEnd(self, titles): for title in titles if isinstance(titles, list) else [titles]: if title in self.JSONtitlesList: self.JSONtitlesList.remove(title) self.JSONtitlesList.append(title) def SetJSONTitles(self, titles): self.JSONtitlesSet = set() self.JSONtitlesList = [] self.AddJSONTitles(titles) # fieldName is command line argument # fieldNameMap maps fieldName to API field names; CSV file header will be API field name #ARGUMENT_TO_PROPERTY_MAP = { # 'admincreated': 'adminCreated', # 'aliases': ['aliases', 'nonEditableAliases'], # } # fieldsList is the list of API fields def AddField(self, fieldName, fieldNameMap, fieldsList): fields = fieldNameMap[fieldName.lower()] if isinstance(fields, list): for field in fields: if field not in fieldsList: fieldsList.append(field) self.AddTitles(field.replace('.', GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER])) elif fields not in fieldsList: fieldsList.append(fields) self.AddTitles(fields.replace('.', GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER])) def addInitialField(self, initialField, fieldsChoiceMap, fieldsList): if isinstance(initialField, list): for field in initialField: self.AddField(field, fieldsChoiceMap, fieldsList) else: self.AddField(initialField, fieldsChoiceMap, fieldsList) def GetFieldsListTitles(self, fieldName, fieldsChoiceMap, fieldsList, initialField=None): if fieldName in fieldsChoiceMap: if not fieldsList and initialField is not None: self.addInitialField(initialField, fieldsChoiceMap, fieldsList) self.AddField(fieldName, fieldsChoiceMap, fieldsList) elif fieldName == 'fields': if not fieldsList and initialField is not None: self.addInitialField(initialField, fieldsChoiceMap, fieldsList) for field in _getFieldsList(): if field in fieldsChoiceMap: self.AddField(field, fieldsChoiceMap, fieldsList) else: invalidChoiceExit(field, fieldsChoiceMap, True) else: return False return True TDSHEET_ENTITY_MAP = {'tdsheet': 'sheetEntity', 'tdbackupsheet': 'backupSheetEntity', 'tdcopysheet': 'copySheetEntity'} TDSHARE_ACL_ROLES_MAP = { 'commenter': 'commenter', 'contributor': 'writer', 'editor': 'writer', 'read': 'reader', 'reader': 'reader', 'viewer': 'reader', 'writer': 'writer', } def GetTodriveParameters(self): def invalidTodriveFileIdExit(entityValueList, message, location): Cmd.SetLocation(location-1) usageErrorExit(formatKeyValueList('', Ent.FormatEntityValueList([Ent.DRIVE_FILE_ID, self.todrive['fileId']]+entityValueList)+[message], '')) def invalidTodriveParentExit(entityType, message): Cmd.SetLocation(tdparentLocation-1) if not localParent: usageErrorExit(Msg.INVALID_ENTITY.format(Ent.Singular(entityType), formatKeyValueList('', [Ent.Singular(Ent.CONFIG_FILE), GM.Globals[GM.GAM_CFG_FILE], Ent.Singular(Ent.ITEM), GC.TODRIVE_PARENT, Ent.Singular(Ent.VALUE), self.todrive['parent'], message], ''))) else: usageErrorExit(Msg.INVALID_ENTITY.format(Ent.Singular(entityType), message)) def invalidTodriveUserExit(entityType, message): Cmd.SetLocation(tduserLocation-1) if not localUser: usageErrorExit(Msg.INVALID_ENTITY.format(Ent.Singular(entityType), formatKeyValueList('', [Ent.Singular(Ent.CONFIG_FILE), GM.Globals[GM.GAM_CFG_FILE], Ent.Singular(Ent.ITEM), GC.TODRIVE_USER, Ent.Singular(Ent.VALUE), self.todrive['user'], message], ''))) else: usageErrorExit(Msg.INVALID_ENTITY.format(Ent.Singular(entityType), message)) def getDriveObject(): if not GC.Values[GC.TODRIVE_CLIENTACCESS]: _, drive = buildGAPIServiceObject(API.DRIVETD, self.todrive['user']) if not drive: invalidTodriveUserExit(Ent.USER, Msg.NOT_FOUND) else: drive = buildGAPIObject(API.DRIVE3) return drive CELL_WRAP_MAP = {'clip': 'CLIP', 'overflow': 'OVERFLOW_CELL', 'overflowcell': 'OVERFLOW_CELL', 'wrap': 'WRAP'} CELL_NUMBER_FORMAT_MAP = {'text': 'TEXT', 'number': 'NUMBER'} localUser = localParent = False tdfileidLocation = tdparentLocation = tdaddsheetLocation = tdupdatesheetLocation = tduserLocation = Cmd.Location() tdsheetLocation = {} for sheetEntity in iter(self.TDSHEET_ENTITY_MAP.values()): tdsheetLocation[sheetEntity] = Cmd.Location() self.todrive = {'user': GC.Values[GC.TODRIVE_USER], 'title': None, 'description': None, 'sheetEntity': None, 'addsheet': False, 'updatesheet': False, 'sheettitle': None, 'cellwrap': None, 'cellnumberformat': None, 'clearfilter': GC.Values[GC.TODRIVE_CLEARFILTER], 'backupSheetEntity': None, 'copySheetEntity': None, 'locale': GC.Values[GC.TODRIVE_LOCALE], 'timeZone': GC.Values[GC.TODRIVE_TIMEZONE], 'timestamp': GC.Values[GC.TODRIVE_TIMESTAMP], 'timeformat': GC.Values[GC.TODRIVE_TIMEFORMAT], 'noescapechar': GC.Values[GC.TODRIVE_NO_ESCAPE_CHAR], 'daysoffset': None, 'hoursoffset': None, 'sheettimestamp': GC.Values[GC.TODRIVE_SHEET_TIMESTAMP], 'sheettimeformat': GC.Values[GC.TODRIVE_SHEET_TIMEFORMAT], 'sheetdaysoffset': None, 'sheethoursoffset': None, 'fileId': None, 'parentId': None, 'parent': GC.Values[GC.TODRIVE_PARENT], 'retaintitle': False, 'localcopy': GC.Values[GC.TODRIVE_LOCALCOPY], 'uploadnodata': GC.Values[GC.TODRIVE_UPLOAD_NODATA], 'nobrowser': GC.Values[GC.TODRIVE_NOBROWSER], 'noemail': GC.Values[GC.TODRIVE_NOEMAIL], 'returnidonly': False, 'alert': [], 'share': [], 'notify': False, 'subject': None, 'from': None} while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'tduser': self.todrive['user'] = getString(Cmd.OB_EMAIL_ADDRESS) tduserLocation = Cmd.Location() localUser = True elif myarg == 'tdtitle': self.todrive['title'] = getString(Cmd.OB_STRING, minLen=0) elif myarg == 'tddescription': self.todrive['description'] = getString(Cmd.OB_STRING) elif myarg in self.TDSHEET_ENTITY_MAP: sheetEntity = self.TDSHEET_ENTITY_MAP[myarg] tdsheetLocation[sheetEntity] = Cmd.Location() self.todrive[sheetEntity] = getSheetEntity(True) elif myarg == 'tdaddsheet': tdaddsheetLocation = Cmd.Location() self.todrive['addsheet'] = getBoolean() if self.todrive['addsheet']: self.todrive['updatesheet'] = False elif myarg == 'tdupdatesheet': tdupdatesheetLocation = Cmd.Location() self.todrive['updatesheet'] = getBoolean() if self.todrive['updatesheet']: self.todrive['addsheet'] = False elif myarg == 'tdcellwrap': self.todrive['cellwrap'] = getChoice(CELL_WRAP_MAP, mapChoice=True) elif myarg == 'tdcellnumberformat': self.todrive['cellnumberformat'] = getChoice(CELL_NUMBER_FORMAT_MAP, mapChoice=True) elif myarg == 'tdclearfilter': self.todrive['clearfilter'] = getBoolean() elif myarg == 'tdlocale': self.todrive['locale'] = getLanguageCode(LOCALE_CODES_MAP) elif myarg == 'tdtimezone': self.todrive['timeZone'] = getString(Cmd.OB_STRING, minLen=0) elif myarg == 'tdtimestamp': self.todrive['timestamp'] = getBoolean() elif myarg == 'tdtimeformat': self.todrive['timeformat'] = getString(Cmd.OB_STRING, minLen=0) elif myarg == 'tdsheettitle': self.todrive['sheettitle'] = getString(Cmd.OB_STRING, minLen=0) elif myarg == 'tdsheettimestamp': self.todrive['sheettimestamp'] = getBoolean() elif myarg == 'tdsheettimeformat': self.todrive['sheettimeformat'] = getString(Cmd.OB_STRING, minLen=0) elif myarg == 'tddaysoffset': self.todrive['daysoffset'] = getInteger(minVal=0) elif myarg == 'tdhoursoffset': self.todrive['hoursoffset'] = getInteger(minVal=0) elif myarg == 'tdsheetdaysoffset': self.todrive['sheetdaysoffset'] = getInteger(minVal=0) elif myarg == 'tdsheethoursoffset': self.todrive['sheethoursoffset'] = getInteger(minVal=0) elif myarg == 'tdfileid': self.todrive['fileId'] = getString(Cmd.OB_DRIVE_FILE_ID) tdfileidLocation = Cmd.Location() elif myarg == 'tdretaintitle': self.todrive['retaintitle'] = getBoolean() elif myarg == 'tdparent': self.todrive['parent'] = escapeDriveFileName(getString(Cmd.OB_DRIVE_FOLDER_NAME, minLen=0)) tdparentLocation = Cmd.Location() localParent = True elif myarg == 'tdlocalcopy': self.todrive['localcopy'] = getBoolean() elif myarg == 'tduploadnodata': self.todrive['uploadnodata'] = getBoolean() elif myarg == 'tdnobrowser': self.todrive['nobrowser'] = getBoolean() elif myarg == 'tdnoemail': self.todrive['noemail'] = getBoolean() elif myarg == 'tdreturnidonly': self.todrive['returnidonly'] = getBoolean() elif myarg == 'tdnoescapechar': self.todrive['noescapechar'] = getBoolean() elif myarg == 'tdalert': self.todrive['alert'].append({'emailAddress': normalizeEmailAddressOrUID(getString(Cmd.OB_EMAIL_ADDRESS))}) elif myarg == 'tdshare': self.todrive['share'].append({'emailAddress': normalizeEmailAddressOrUID(getString(Cmd.OB_EMAIL_ADDRESS)), 'type': 'user', 'role': getChoice(self.TDSHARE_ACL_ROLES_MAP, mapChoice=True)}) elif myarg == 'tdnotify': self.todrive['notify'] = getBoolean() elif myarg == 'tdsubject': self.todrive['subject'] = getString(Cmd.OB_STRING, minLen=0) elif myarg == 'tdfrom': self.todrive['from'] = getString(Cmd.OB_EMAIL_ADDRESS) else: Cmd.Backup() break if self.todrive['addsheet']: if not self.todrive['fileId']: Cmd.SetLocation(tdaddsheetLocation-1) missingArgumentExit('tdfileid') if self.todrive['sheetEntity'] and self.todrive['sheetEntity']['sheetId']: Cmd.SetLocation(tdsheetLocation[sheetEntity]-1) invalidArgumentExit('tdsheet ') if self.todrive['updatesheet'] and (not self.todrive['fileId'] or not self.todrive['sheetEntity']): Cmd.SetLocation(tdupdatesheetLocation-1) missingArgumentExit('tdfileid and tdsheet') if self.todrive['sheetEntity'] and self.todrive['sheetEntity']['sheetId'] and (not self.todrive['fileId'] or not self.todrive['updatesheet']): Cmd.SetLocation(tdsheetLocation['sheetEntity']-1) missingArgumentExit('tdfileid and tdupdatesheet') if not self.todrive['user'] or GC.Values[GC.TODRIVE_CLIENTACCESS]: self.todrive['user'] = _getAdminEmail() if not GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY] and not GC.Values[GC.TODRIVE_CLIENTACCESS]: user = checkUserExists(buildGAPIObject(API.DIRECTORY), self.todrive['user']) if not user: invalidTodriveUserExit(Ent.USER, Msg.NOT_FOUND) self.todrive['user'] = user else: self.todrive['user'] = normalizeEmailAddressOrUID(self.todrive['user']) if self.todrive['fileId']: drive = getDriveObject() try: result = callGAPI(drive.files(), 'get', throwReasons=GAPI.DRIVE_GET_THROW_REASONS, fileId=self.todrive['fileId'], fields='id,mimeType,capabilities(canEdit)', supportsAllDrives=True) if result['mimeType'] == MIMETYPE_GA_FOLDER: invalidTodriveFileIdExit([], Msg.NOT_AN_ENTITY.format(Ent.Singular(Ent.DRIVE_FILE)), tdfileidLocation) if not result['capabilities']['canEdit']: invalidTodriveFileIdExit([], Msg.NOT_WRITABLE, tdfileidLocation) if self.todrive['sheetEntity']: if result['mimeType'] != MIMETYPE_GA_SPREADSHEET: invalidTodriveFileIdExit([], f'{Msg.NOT_A} {Ent.Singular(Ent.SPREADSHEET)}', tdfileidLocation) if not GC.Values[GC.TODRIVE_CLIENTACCESS]: _, sheet = buildGAPIServiceObject(API.SHEETSTD, self.todrive['user']) if sheet is None: invalidTodriveUserExit(Ent.USER, Msg.NOT_FOUND) else: sheet = buildGAPIObject(API.SHEETS) try: spreadsheet = callGAPI(sheet.spreadsheets(), 'get', throwReasons=GAPI.SHEETS_ACCESS_THROW_REASONS, spreadsheetId=self.todrive['fileId'], fields='spreadsheetUrl,sheets(properties(sheetId,title),protectedRanges(range(sheetId),requestingUserCanEdit))') for sheetEntity in iter(self.TDSHEET_ENTITY_MAP.values()): if self.todrive[sheetEntity]: sheetId = getSheetIdFromSheetEntity(spreadsheet, self.todrive[sheetEntity]) if sheetId is None: if not self.todrive['addsheet'] and ((sheetEntity != 'sheetEntity') or (self.todrive[sheetEntity]['sheetType'] == Ent.SHEET_ID)): invalidTodriveFileIdExit([self.todrive[sheetEntity]['sheetType'], self.todrive[sheetEntity]['sheetValue']], Msg.NOT_FOUND, tdsheetLocation[sheetEntity]) else: if self.todrive['addsheet']: invalidTodriveFileIdExit([self.todrive[sheetEntity]['sheetType'], self.todrive[sheetEntity]['sheetValue']], Msg.ALREADY_EXISTS, tdsheetLocation[sheetEntity]) if protectedSheetId(spreadsheet, sheetId): invalidTodriveFileIdExit([self.todrive[sheetEntity]['sheetType'], self.todrive[sheetEntity]['sheetValue']], Msg.NOT_WRITABLE, tdsheetLocation[sheetEntity]) self.todrive[sheetEntity]['sheetId'] = sheetId except (GAPI.notFound, GAPI.forbidden, GAPI.permissionDenied, GAPI.internalError, GAPI.insufficientFilePermissions, GAPI.badRequest, GAPI.invalid, GAPI.invalidArgument, GAPI.failedPrecondition) as e: invalidTodriveFileIdExit([], str(e), tdfileidLocation) except GAPI.fileNotFound: invalidTodriveFileIdExit([], Msg.NOT_FOUND, tdfileidLocation) except (GAPI.serviceNotAvailable, GAPI.authError, GAPI.domainPolicy) as e: invalidTodriveUserExit(Ent.USER, str(e)) elif not self.todrive['parent'] or self.todrive['parent'] == ROOT: self.todrive['parentId'] = ROOT else: drive = getDriveObject() if self.todrive['parent'].startswith('id:'): try: result = callGAPI(drive.files(), 'get', throwReasons=GAPI.DRIVE_USER_THROW_REASONS+[GAPI.FILE_NOT_FOUND, GAPI.INVALID], fileId=self.todrive['parent'][3:], fields='id,mimeType,capabilities(canEdit)', supportsAllDrives=True) except GAPI.fileNotFound: invalidTodriveParentExit(Ent.DRIVE_FOLDER_ID, Msg.NOT_FOUND) except GAPI.invalid as e: invalidTodriveParentExit(Ent.DRIVE_FOLDER_ID, str(e)) except (GAPI.serviceNotAvailable, GAPI.authError, GAPI.domainPolicy) as e: invalidTodriveUserExit(Ent.USER, str(e)) if result['mimeType'] != MIMETYPE_GA_FOLDER: invalidTodriveParentExit(Ent.DRIVE_FOLDER_ID, Msg.NOT_AN_ENTITY.format(Ent.Singular(Ent.DRIVE_FOLDER))) if not result['capabilities']['canEdit']: invalidTodriveParentExit(Ent.DRIVE_FOLDER_ID, Msg.NOT_WRITABLE) self.todrive['parentId'] = result['id'] else: try: results = callGAPIpages(drive.files(), 'list', 'files', throwReasons=GAPI.DRIVE_USER_THROW_REASONS+[GAPI.INVALID_QUERY], retryReasons=[GAPI.UNKNOWN_ERROR], q=f"name = '{self.todrive['parent']}'", fields='nextPageToken,files(id,mimeType,capabilities(canEdit))', pageSize=1, supportsAllDrives=True) except GAPI.invalidQuery: invalidTodriveParentExit(Ent.DRIVE_FOLDER_NAME, Msg.NOT_FOUND) except (GAPI.serviceNotAvailable, GAPI.authError, GAPI.domainPolicy) as e: invalidTodriveUserExit(Ent.USER, str(e)) if not results: invalidTodriveParentExit(Ent.DRIVE_FOLDER_NAME, Msg.NOT_FOUND) if results[0]['mimeType'] != MIMETYPE_GA_FOLDER: invalidTodriveParentExit(Ent.DRIVE_FOLDER_NAME, Msg.NOT_AN_ENTITY.format(Ent.Singular(Ent.DRIVE_FOLDER))) if not results[0]['capabilities']['canEdit']: invalidTodriveParentExit(Ent.DRIVE_FOLDER_NAME, Msg.NOT_WRITABLE) self.todrive['parentId'] = results[0]['id'] def SortTitles(self): if not self.sortTitlesList: return restoreTitles = [] for title in self.sortTitlesList: if title in self.titlesSet: self.titlesList.remove(title) restoreTitles.append(title) self.titlesList.sort() for title in restoreTitles[::-1]: self.titlesList.insert(0, title) def RemoveIndexedTitles(self, titles): for title in titles if isinstance(titles, list) else [titles]: if title in self.indexedTitles: self.indexedTitles.remove(title) def SetIndexedTitles(self, indexedTitles): self.indexedTitles = indexedTitles def SortIndexedTitles(self, titlesList): for field in self.indexedTitles: fieldDotN = re.compile(fr'({field}){GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}(\d+)(.*)') indexes = [] subtitles = [] for i, v in enumerate(titlesList): mg = fieldDotN.match(v) if mg: indexes.append(i) subtitles.append(mg.groups('')) for i, ii in enumerate(indexes): titlesList[ii] = [f'{subtitle[0]}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subtitle[1]}{subtitle[2]}' for subtitle in sorted(subtitles, key=lambda k: (int(k[1]), k[2]))][i] @staticmethod def FixPathsTitles(titlesList): # Put paths before path.0 try: index = titlesList.index(f'path{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}0') titlesList.remove('paths') titlesList.insert(index, 'paths') except ValueError: pass def FixNodataTitles(self): if self.mapNodataFields: titles = [] addPermissionsTitle = not self.oneItemPerRow for field in self.nodataFields: if field.find('(') != -1: field, subFields = field.split('(', 1) if field in self.driveListFields: if field != 'permissions': titles.append(field) elif addPermissionsTitle: titles.append(field) addPermissionsTitle = False titles.extend([f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}0{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subField}' for subField in subFields[:-1].split(',') if subField]) else: titles.extend([f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subField}' for subField in subFields[:-1].split(',') if subField]) elif field.find('.') != -1: field, subField = field.split('.', 1) if field in self.driveListFields: if field != 'permissions': titles.append(field) elif addPermissionsTitle: titles.append(field) addPermissionsTitle = False titles.append(f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}0{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subField}') else: titles.append(f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subField}') elif field.lower() in self.driveSubfieldsChoiceMap: if field in self.driveListFields: if field != 'permissions': titles.append(field) elif addPermissionsTitle: titles.append(field) addPermissionsTitle = False for subField in iter(self.driveSubfieldsChoiceMap[field.lower()].values()): if not isinstance(subField, list): titles.append(f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}0{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subField}') else: titles.extend([f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}0{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subSubField}' for subSubField in subField]) else: for subField in iter(self.driveSubfieldsChoiceMap[field.lower()].values()): if not isinstance(subField, list): titles.append(f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subField}') else: titles.extend([f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subSubField}' for subSubField in subField]) else: titles.append(field) if self.oneItemPerRow: for i, title in enumerate(titles): if title.startswith('permissions.0'): titles[i] = title.replace('permissions.0', 'permission') if not self.formatJSON: self.SetTitles(titles) self.SetSortTitles(['Owner', 'id', 'name', 'title']) self.SortTitles() else: self.SetJSONTitles(titles) else: self.SetTitles(self.nodataFields) self.SetJSONTitles(self.nodataFields) def MovePermsToEnd(self): # Put permissions at end of titles try: last = len(self.titlesList) start = end = self.titlesList.index('permissions') while end < last and self.titlesList[end].startswith('permissions'): end += 1 self.titlesList = self.titlesList[:start]+self.titlesList[end:]+self.titlesList[start:end] except ValueError: pass def SetColumnDelimiter(self, columnDelimiter): self.columnDelimiter = columnDelimiter def SetNoEscapeChar(self, noEscapeChar): self.noEscapeChar = noEscapeChar def SetQuoteChar(self, quoteChar): self.quoteChar = quoteChar def SetTimestampColumn(self, timestampColumn): self.timestampColumn = timestampColumn if not GC.Values[GC.OUTPUT_TIMEFORMAT]: self.todaysTime = ISOformatTimeStamp(todaysTime()) else: self.todaysTime = todaysTime().strftime(GC.Values[GC.OUTPUT_TIMEFORMAT]) def SetSortHeaders(self, sortHeaders): self.sortHeaders = sortHeaders def SetFormatJSON(self, formatJSON): self.formatJSON = formatJSON def SetNodataFields(self, mapNodataFields, nodataFields, driveListFields, driveSubfieldsChoiceMap, oneItemPerRow): self.mapNodataFields = mapNodataFields self.nodataFields = nodataFields self.driveListFields = driveListFields self.driveSubfieldsChoiceMap = driveSubfieldsChoiceMap self.oneItemPerRow = oneItemPerRow def SetFixPaths(self, fixPaths): self.fixPaths = fixPaths def SetShowPermissionsLast(self, showPermissionsLast): self.showPermissionsLast = showPermissionsLast def FixCourseAliasesTitles(self): # Put Aliases.* after Aliases try: aliasesIndex = self.sortTitlesList.index('Aliases') index = self.titlesList.index('Aliases.0') tempSortTitlesList = self.sortTitlesList[:] self.SetSortTitles(tempSortTitlesList[:aliasesIndex+1]) while self.titlesList[index].startswith('Aliases.'): self.AddSortTitle(self.titlesList[index]) index += 1 self.AddSortTitles(tempSortTitlesList[aliasesIndex+1:]) except ValueError: pass def RearrangeCourseTitles(self, ttitles, stitles): # Put teachers and students after courseMaterialSets if present, otherwise at end for title in ttitles['list']: if title in self.titlesList: self.titlesList.remove(title) for title in stitles['list']: if title in self.titlesList: self.titlesList.remove(title) try: cmsIndex = self.titlesList.index('courseMaterialSets') self.titlesList = self.titlesList[:cmsIndex]+ttitles['list']+stitles['list']+self.titlesList[cmsIndex:] except ValueError: self.titlesList.extend(ttitles['list']) self.titlesList.extend(stitles['list']) def SortRows(self, title, reverse): if title in self.titlesSet: self.rows.sort(key=lambda k: k[title], reverse=reverse) def SortRowsTwoTitles(self, title1, title2, reverse): if title1 in self.titlesSet and title2 in self.titlesSet: self.rows.sort(key=lambda k: (k[title1], k[title2]), reverse=reverse) def SetRowFilter(self, rowFilter, rowFilterMode): self.rowFilter = rowFilter self.rowFilterMode = rowFilterMode def SetRowDropFilter(self, rowDropFilter, rowDropFilterMode): self.rowDropFilter = rowDropFilter self.rowDropFilterMode = rowDropFilterMode def SetRowLimit(self, rowLimit): self.rowLimit = rowLimit def AppendRow(self, row): if self.timestampColumn: row[self.timestampColumn] = self.todaysTime if not self.rowLimit or self.rowCount < self.rowLimit: self.rowCount +=1 self.rows.append(row) def WriteRowNoFilter(self, row): self.AppendRow(row) def WriteRow(self, row): if RowFilterMatch(row, self.titlesList, self.rowFilter, self.rowFilterMode, self.rowDropFilter, self.rowDropFilterMode): self.AppendRow(row) def WriteRowTitles(self, row): for title in row: if title not in self.titlesSet: self.AddTitle(title) if RowFilterMatch(row, self.titlesList, self.rowFilter, self.rowFilterMode, self.rowDropFilter, self.rowDropFilterMode): self.AppendRow(row) def WriteRowTitlesNoFilter(self, row): for title in row: if title not in self.titlesSet: self.AddTitle(title) self.AppendRow(row) def WriteRowTitlesJSONNoFilter(self, row): for title in row: if title not in self.JSONtitlesSet: self.AddJSONTitle(title) self.AppendRow(row) def CheckRowTitles(self, row): if not self.rowFilter and not self.rowDropFilter: return True for title in row: if title not in self.titlesSet: self.AddTitle(title) return RowFilterMatch(row, self.titlesList, self.rowFilter, self.rowFilterMode, self.rowDropFilter, self.rowDropFilterMode) def UpdateMimeTypeCounts(self, row, mimeTypeInfo, sizeField): saveList = self.titlesList[:] saveSet = set(self.titlesSet) for title in row: if title not in self.titlesSet: self.AddTitle(title) if RowFilterMatch(row, self.titlesList, self.rowFilter, self.rowFilterMode, self.rowDropFilter, self.rowDropFilterMode): mimeTypeInfo.setdefault(row['mimeType'], {'count': 0, 'size': 0}) mimeTypeInfo[row['mimeType']]['count'] += 1 mimeTypeInfo[row['mimeType']]['size'] += int(row.get(sizeField, '0')) self.titlesList = saveList[:] self.titlesSet = set(saveSet) def SetZeroBlankMimeTypeCounts(self, zeroBlankMimeTypeCounts): self.zeroBlankMimeTypeCounts = zeroBlankMimeTypeCounts def ZeroBlankMimeTypeCounts(self): for row in self.rows: for title in self.titlesList: if title not in self.sortTitlesSet and title not in row: row[title] = 0 def CheckOutputRowFilterHeaders(self): for filterVal in self.rowFilter: columns = [t for t in self.titlesList if filterVal[0].match(t)] if not columns: stderrWarningMsg(Msg.COLUMN_DOES_NOT_MATCH_ANY_OUTPUT_COLUMNS.format(GC.CSV_OUTPUT_ROW_FILTER, filterVal[0].pattern)) for filterVal in self.rowDropFilter: columns = [t for t in self.titlesList if filterVal[0].match(t)] if not columns: stderrWarningMsg(Msg.COLUMN_DOES_NOT_MATCH_ANY_OUTPUT_COLUMNS.format(GC.CSV_OUTPUT_ROW_DROP_FILTER, filterVal[0].pattern)) def SetHeaderFilter(self, headerFilter): self.headerFilter = headerFilter def SetHeaderDropFilter(self, headerDropFilter): self.headerDropFilter = headerDropFilter def SetHeaderForce(self, headerForce): self.headerForce = headerForce self.SetTitles(headerForce) self.SetJSONTitles(headerForce) def SetHeaderOrder(self, headerOrder): self.headerOrder = headerOrder def orderHeaders(self, titlesList): for title in self.headerOrder: if title in titlesList: titlesList.remove(title) return self.headerOrder+titlesList @staticmethod def HeaderFilterMatch(filters, title): for filterStr in filters: if filterStr.match(title): return True return False def FilterHeaders(self): if self.headerDropFilter: self.titlesList = [t for t in self.titlesList if not self.HeaderFilterMatch(self.headerDropFilter, t)] if self.headerFilter: self.titlesList = [t for t in self.titlesList if self.HeaderFilterMatch(self.headerFilter, t)] self.titlesSet = set(self.titlesList) if not self.titlesSet: systemErrorExit(USAGE_ERROR_RC, Msg.NO_COLUMNS_SELECTED_WITH_CSV_OUTPUT_HEADER_FILTER.format(GC.CSV_OUTPUT_HEADER_FILTER, GC.CSV_OUTPUT_HEADER_DROP_FILTER)) def FilterJSONHeaders(self): if self.headerDropFilter: self.JSONtitlesList = [t for t in self.JSONtitlesList if not self.HeaderFilterMatch(self.headerDropFilter, t)] if self.headerFilter: self.JSONtitlesList = [t for t in self.JSONtitlesList if self.HeaderFilterMatch(self.headerFilter, t)] self.JSONtitlesSet = set(self.JSONtitlesList) if not self.JSONtitlesSet: systemErrorExit(USAGE_ERROR_RC, Msg.NO_COLUMNS_SELECTED_WITH_CSV_OUTPUT_HEADER_FILTER.format(GC.CSV_OUTPUT_HEADER_FILTER, GC.CSV_OUTPUT_HEADER_DROP_FILTER)) def writeCSVfile(self, list_type): def todriveCSVErrorExit(entityValueList, errMsg): systemErrorExit(ACTION_FAILED_RC, formatKeyValueList(Ind.Spaces(), Ent.FormatEntityValueList(entityValueList)+[Act.NotPerformed(), errMsg], currentCountNL(0, 0))) @staticmethod def itemgetter(*items): if len(items) == 1: item = items[0] def g(obj): return obj.get(item, '') else: def g(obj): return tuple(obj.get(item, '') for item in items) return g def writeCSVData(writer): try: if GM.Globals[GM.CSVFILE][GM.REDIRECT_WRITE_HEADER]: writer.writerow(dict((item, item) for item in writer.fieldnames)) if not self.sortHeaders: writer.writerows(self.rows) else: for row in sorted(self.rows, key=itemgetter(*self.sortHeaders)): writer.writerow(row) return True except IOError as e: stderrErrorMsg(e) return False def setDialect(lineterminator, noEscapeChar): writerDialect = { 'delimiter': self.columnDelimiter, 'doublequote': True, 'escapechar': '\\' if not noEscapeChar else None, 'lineterminator': lineterminator, 'quotechar': self.quoteChar, 'quoting': csv.QUOTE_MINIMAL, 'skipinitialspace': False, 'strict': False} return writerDialect def normalizeSortHeaders(): if self.sortHeaders: writerKeyMap = {} for k in titlesList: writerKeyMap[k.lower()] = k self.sortHeaders = [writerKeyMap[k.lower()] for k in self.sortHeaders if k.lower() in writerKeyMap] def writeCSVToStdout(): csvFile = StringIOobject() writerDialect = setDialect('\n', self.noEscapeChar) writer = csv.DictWriter(csvFile, titlesList, extrasaction=extrasaction, **writerDialect) if writeCSVData(writer): try: GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD].write(csvFile.getvalue()) except IOError as e: stderrErrorMsg(fdErrorMessage(GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD], 'stdout', e)) setSysExitRC(FILE_ERROR_RC) closeFile(csvFile) def writeCSVToFile(): csvFile = openFile(GM.Globals[GM.CSVFILE][GM.REDIRECT_NAME], GM.Globals[GM.CSVFILE][GM.REDIRECT_MODE], newline='', encoding=GM.Globals[GM.CSVFILE][GM.REDIRECT_ENCODING], errors='backslashreplace', continueOnError=True) if csvFile: writerDialect = setDialect(str(GC.Values[GC.CSV_OUTPUT_LINE_TERMINATOR]), self.noEscapeChar) writer = csv.DictWriter(csvFile, titlesList, extrasaction=extrasaction, **writerDialect) writeCSVData(writer) closeFile(csvFile) def writeCSVToDrive(): numRows = len(self.rows) numColumns = len(titlesList) if numRows == 0 and not self.todrive['uploadnodata']: printKeyValueList([Msg.NO_CSV_DATA_TO_UPLOAD]) setSysExitRC(NO_CSV_DATA_TO_UPLOAD_RC) return if self.todrive['addsheet'] or self.todrive['updatesheet']: csvFile = TemporaryFile(mode='w+', encoding=UTF8) else: csvFile = StringIOobject() writerDialect = setDialect('\n', self.todrive['noescapechar']) writer = csv.DictWriter(csvFile, titlesList, extrasaction=extrasaction, **writerDialect) if writeCSVData(writer): if ((self.todrive['title'] is None) or (not self.todrive['title'] and not self.todrive['timestamp'])): title = f'{GC.Values[GC.DOMAIN]} - {list_type}' else: title = self.todrive['title'] if ((self.todrive['sheettitle'] is None) or (not self.todrive['sheettitle'] and not self.todrive['sheettimestamp'])): if ((self.todrive['sheetEntity'] is None) or (not self.todrive['sheetEntity']['sheetTitle'])): sheetTitle = title else: sheetTitle = self.todrive['sheetEntity']['sheetTitle'] else: sheetTitle = self.todrive['sheettitle'] tdbasetime = tdtime = datetime.datetime.now(GC.Values[GC.TIMEZONE]) if self.todrive['daysoffset'] is not None or self.todrive['hoursoffset'] is not None: tdtime = tdbasetime+relativedelta(days=-self.todrive['daysoffset'] if self.todrive['daysoffset'] is not None else 0, hours=-self.todrive['hoursoffset'] if self.todrive['hoursoffset'] is not None else 0) if self.todrive['timestamp']: if title: title += ' - ' if not self.todrive['timeformat']: title += ISOformatTimeStamp(tdtime) else: title += tdtime.strftime(self.todrive['timeformat']) if self.todrive['sheettimestamp']: if self.todrive['sheetdaysoffset'] is not None or self.todrive['sheethoursoffset'] is not None: tdtime = tdbasetime+relativedelta(days=-self.todrive['sheetdaysoffset'] if self.todrive['sheetdaysoffset'] is not None else 0, hours=-self.todrive['sheethoursoffset'] if self.todrive['sheethoursoffset'] is not None else 0) if sheetTitle: sheetTitle += ' - ' if not self.todrive['sheettimeformat']: sheetTitle += ISOformatTimeStamp(tdtime) else: sheetTitle += tdtime.strftime(self.todrive['sheettimeformat']) action = Act.Get() if not GC.Values[GC.TODRIVE_CLIENTACCESS]: user, drive = buildGAPIServiceObject(API.DRIVETD, self.todrive['user']) if not drive: closeFile(csvFile) return else: user = self.todrive['user'] drive = buildGAPIObject(API.DRIVE3) importSize = csvFile.tell() # Add/Update sheet try: if self.todrive['addsheet'] or self.todrive['updatesheet']: Act.Set(Act.CREATE if self.todrive['addsheet'] else Act.UPDATE) result = callGAPI(drive.about(), 'get', throwReasons=GAPI.DRIVE_USER_THROW_REASONS, fields='maxImportSizes') if numRows*numColumns > MAX_GOOGLE_SHEET_CELLS or importSize > int(result['maxImportSizes'][MIMETYPE_GA_SPREADSHEET]): todriveCSVErrorExit([Ent.USER, user], Msg.RESULTS_TOO_LARGE_FOR_GOOGLE_SPREADSHEET) fields = ','.join(['id', 'mimeType', 'webViewLink', 'name', 'capabilities(canEdit)']) body = {'description': self.todrive['description']} if body['description'] is None: body['description'] = Cmd.QuotedArgumentList(Cmd.AllArguments()) if not self.todrive['retaintitle']: body['name'] = title result = callGAPI(drive.files(), 'update', throwReasons=GAPI.DRIVE_USER_THROW_REASONS+[GAPI.INSUFFICIENT_PERMISSIONS, GAPI.INSUFFICIENT_PARENT_PERMISSIONS, GAPI.FILE_NOT_FOUND, GAPI.UNKNOWN_ERROR], fileId=self.todrive['fileId'], body=body, fields=fields, supportsAllDrives=True) entityValueList = [Ent.USER, user, Ent.DRIVE_FILE_ID, self.todrive['fileId']] if not result['capabilities']['canEdit']: todriveCSVErrorExit(entityValueList, Msg.NOT_WRITABLE) if result['mimeType'] != MIMETYPE_GA_SPREADSHEET: todriveCSVErrorExit(entityValueList, f'{Msg.NOT_A} {Ent.Singular(Ent.SPREADSHEET)}') if not GC.Values[GC.TODRIVE_CLIENTACCESS]: _, sheet = buildGAPIServiceObject(API.SHEETSTD, user) if sheet is None: return else: sheet = buildGAPIObject(API.SHEETS) csvFile.seek(0) spreadsheet = None if self.todrive['updatesheet']: for sheetEntity in iter(self.TDSHEET_ENTITY_MAP.values()): if self.todrive[sheetEntity]: entityValueList = [Ent.USER, user, Ent.SPREADSHEET, title, self.todrive[sheetEntity]['sheetType'], self.todrive[sheetEntity]['sheetValue']] if spreadsheet is None: spreadsheet = callGAPI(sheet.spreadsheets(), 'get', throwReasons=GAPI.SHEETS_ACCESS_THROW_REASONS, spreadsheetId=self.todrive['fileId'], fields='spreadsheetUrl,sheets(properties(sheetId,title),protectedRanges(range(sheetId),requestingUserCanEdit))') sheetId = getSheetIdFromSheetEntity(spreadsheet, self.todrive[sheetEntity]) if sheetId is None: if ((sheetEntity != 'sheetEntity') or (self.todrive[sheetEntity]['sheetType'] == Ent.SHEET_ID)): todriveCSVErrorExit(entityValueList, Msg.NOT_FOUND) self.todrive['addsheet'] = True else: if protectedSheetId(spreadsheet, sheetId): todriveCSVErrorExit(entityValueList, Msg.NOT_WRITABLE) self.todrive[sheetEntity]['sheetId'] = sheetId if self.todrive['addsheet']: body = {'requests': [{'addSheet': {'properties': {'title': sheetTitle, 'sheetType': 'GRID'}}}]} try: addresult = callGAPI(sheet.spreadsheets(), 'batchUpdate', throwReasons=GAPI.SHEETS_ACCESS_THROW_REASONS, spreadsheetId=self.todrive['fileId'], body=body) self.todrive['sheetEntity'] = {'sheetId': addresult['replies'][0]['addSheet']['properties']['sheetId']} except (GAPI.notFound, GAPI.forbidden, GAPI.permissionDenied, GAPI.internalError, GAPI.insufficientFilePermissions, GAPI.insufficientParentPermissions, GAPI.badRequest, GAPI.invalid, GAPI.invalidArgument, GAPI.failedPrecondition) as e: todriveCSVErrorExit(entityValueList, str(e)) body = {'requests': []} if not self.todrive['addsheet']: if self.todrive['backupSheetEntity']: body['requests'].append({'copyPaste': {'source': {'sheetId': self.todrive['sheetEntity']['sheetId']}, 'destination': {'sheetId': self.todrive['backupSheetEntity']['sheetId']}, 'pasteType': 'PASTE_NORMAL'}}) if self.todrive['clearfilter']: body['requests'].append({'clearBasicFilter': {'sheetId': self.todrive['sheetEntity']['sheetId']}}) if self.todrive['sheettitle']: body['requests'].append({'updateSheetProperties': {'properties': {'sheetId': self.todrive['sheetEntity']['sheetId'], 'title': sheetTitle}, 'fields': 'title'}}) body['requests'].append({'updateCells': {'range': {'sheetId': self.todrive['sheetEntity']['sheetId']}, 'fields': '*'}}) if self.todrive['cellwrap']: body['requests'].append({'repeatCell': {'range': {'sheetId': self.todrive['sheetEntity']['sheetId']}, 'fields': 'userEnteredFormat.wrapStrategy', 'cell': {'userEnteredFormat': {'wrapStrategy': self.todrive['cellwrap']}}}}) if self.todrive['cellnumberformat']: body['requests'].append({'repeatCell': {'range': {'sheetId': self.todrive['sheetEntity']['sheetId']}, 'fields': 'userEnteredFormat.numberFormat', 'cell': {'userEnteredFormat': {'numberFormat': {'type': self.todrive['cellnumberformat']}}}}}) body['requests'].append({'pasteData': {'coordinate': {'sheetId': self.todrive['sheetEntity']['sheetId'], 'rowIndex': '0', 'columnIndex': '0'}, 'data': csvFile.read(), 'type': 'PASTE_NORMAL', 'delimiter': self.columnDelimiter}}) if self.todrive['copySheetEntity']: body['requests'].append({'copyPaste': {'source': {'sheetId': self.todrive['sheetEntity']['sheetId']}, 'destination': {'sheetId': self.todrive['copySheetEntity']['sheetId']}, 'pasteType': 'PASTE_NORMAL'}}) try: callGAPI(sheet.spreadsheets(), 'batchUpdate', throwReasons=GAPI.SHEETS_ACCESS_THROW_REASONS, spreadsheetId=self.todrive['fileId'], body=body) except (GAPI.notFound, GAPI.forbidden, GAPI.permissionDenied, GAPI.internalError, GAPI.insufficientFilePermissions, GAPI.badRequest, GAPI.invalid, GAPI.invalidArgument, GAPI.failedPrecondition) as e: todriveCSVErrorExit(entityValueList, str(e)) closeFile(csvFile) # Create/update file else: if GC.Values[GC.TODRIVE_CONVERSION]: result = callGAPI(drive.about(), 'get', throwReasons=GAPI.DRIVE_USER_THROW_REASONS, fields='maxImportSizes') if numRows*len(titlesList) > MAX_GOOGLE_SHEET_CELLS or importSize > int(result['maxImportSizes'][MIMETYPE_GA_SPREADSHEET]): printKeyValueList([WARNING, Msg.RESULTS_TOO_LARGE_FOR_GOOGLE_SPREADSHEET]) mimeType = 'text/csv' else: mimeType = MIMETYPE_GA_SPREADSHEET else: mimeType = 'text/csv' fields = ','.join(['id', 'mimeType', 'webViewLink']) body = {'description': self.todrive['description'], 'mimeType': mimeType} if body['description'] is None: body['description'] = Cmd.QuotedArgumentList(Cmd.AllArguments()) if not self.todrive['fileId'] or not self.todrive['retaintitle']: body['name'] = title try: if not self.todrive['fileId']: Act.Set(Act.CREATE) body['parents'] = [self.todrive['parentId']] result = callGAPI(drive.files(), 'create', bailOnInternalError=True, throwReasons=GAPI.DRIVE_USER_THROW_REASONS+[GAPI.FORBIDDEN, GAPI.INSUFFICIENT_PERMISSIONS, GAPI.INSUFFICIENT_PARENT_PERMISSIONS, GAPI.FILE_NOT_FOUND, GAPI.UNKNOWN_ERROR, GAPI.INTERNAL_ERROR, GAPI.STORAGE_QUOTA_EXCEEDED, GAPI.TEAMDRIVE_FILE_LIMIT_EXCEEDED, GAPI.TEAMDRIVE_HIERARCHY_TOO_DEEP], body=body, media_body=googleapiclient.http.MediaIoBaseUpload(io.BytesIO(csvFile.getvalue().encode()), mimetype='text/csv', resumable=True), fields=fields, supportsAllDrives=True) else: Act.Set(Act.UPDATE) result = callGAPI(drive.files(), 'update', bailOnInternalError=True, throwReasons=GAPI.DRIVE_USER_THROW_REASONS+[GAPI.INSUFFICIENT_PERMISSIONS, GAPI.INSUFFICIENT_PARENT_PERMISSIONS, GAPI.FILE_NOT_FOUND, GAPI.UNKNOWN_ERROR, GAPI.INTERNAL_ERROR], fileId=self.todrive['fileId'], body=body, media_body=googleapiclient.http.MediaIoBaseUpload(io.BytesIO(csvFile.getvalue().encode()), mimetype='text/csv', resumable=True), fields=fields, supportsAllDrives=True) spreadsheetId = result['id'] except GAPI.internalError as e: entityActionFailedWarning([Ent.DRIVE_FILE, body['name']], Msg.UPLOAD_CSV_FILE_INTERNAL_ERROR.format(str(e), str(numRows))) closeFile(csvFile) return closeFile(csvFile) if not self.todrive['fileId'] and self.todrive['share']: Act.Set(Act.SHARE) for share in self.todrive['share']: if share['emailAddress'] != user: try: callGAPI(drive.permissions(), 'create', bailOnInternalError=True, throwReasons=GAPI.DRIVE_ACCESS_THROW_REASONS+GAPI.DRIVE3_CREATE_ACL_THROW_REASONS, fileId=spreadsheetId, sendNotificationEmail=False, body=share, fields='', supportsAllDrives=True) entityActionPerformed([Ent.USER, user, Ent.SPREADSHEET, title, Ent.TARGET_USER, share['emailAddress'], Ent.ROLE, share['role']]) except (GAPI.badRequest, GAPI.invalid, GAPI.fileNotFound, GAPI.forbidden, GAPI.internalError, GAPI.insufficientFilePermissions, GAPI.insufficientParentPermissions, GAPI.unknownError, GAPI.ownershipChangeAcrossDomainNotPermitted, GAPI.teamDriveDomainUsersOnlyRestriction, GAPI.teamDriveTeamMembersOnlyRestriction, GAPI.targetUserRoleLimitedByLicenseRestriction, GAPI.insufficientAdministratorPrivileges, GAPI.sharingRateLimitExceeded, GAPI.publishOutNotPermitted, GAPI.shareInNotPermitted, GAPI.shareOutNotPermitted, GAPI.shareOutNotPermittedToUser, GAPI.cannotShareTeamDriveTopFolderWithAnyoneOrDomains, GAPI.cannotShareTeamDriveWithNonGoogleAccounts, GAPI.ownerOnTeamDriveItemNotSupported, GAPI.organizerOnNonTeamDriveNotSupported, GAPI.organizerOnNonTeamDriveItemNotSupported, GAPI.fileOrganizerNotYetEnabledForThisTeamDrive, GAPI.fileOrganizerOnFoldersInSharedDriveOnly, GAPI.fileOrganizerOnNonTeamDriveNotSupported, GAPI.teamDrivesFolderSharingNotSupported, GAPI.invalidLinkVisibility, GAPI.invalidSharingRequest, GAPI.fileNeverWritable, GAPI.abusiveContentRestriction) as e: entityActionFailedWarning([Ent.USER, user, Ent.SPREADSHEET, title, Ent.TARGET_USER, share['emailAddress'], Ent.ROLE, share['role']], str(e)) if ((result['mimeType'] == MIMETYPE_GA_SPREADSHEET) and (self.todrive['sheetEntity'] or self.todrive['locale'] or self.todrive['timeZone'] or self.todrive['sheettitle'] or self.todrive['cellwrap'] or self.todrive['cellnumberformat'])): if not GC.Values[GC.TODRIVE_CLIENTACCESS]: _, sheet = buildGAPIServiceObject(API.SHEETSTD, user) if sheet is None: return else: sheet = buildGAPIObject(API.SHEETS) try: body = {'requests': []} if self.todrive['sheetEntity'] or self.todrive['sheettitle'] or self.todrive['cellwrap']: spreadsheet = callGAPI(sheet.spreadsheets(), 'get', throwReasons=GAPI.SHEETS_ACCESS_THROW_REASONS, spreadsheetId=spreadsheetId, fields='sheets/properties') spreadsheet['sheets'][0]['properties']['title'] = sheetTitle body['requests'].append({'updateSheetProperties': {'properties': spreadsheet['sheets'][0]['properties'], 'fields': 'title'}}) if self.todrive['cellwrap']: body['requests'].append({'repeatCell': {'range': {'sheetId': spreadsheet['sheets'][0]['properties']['sheetId']}, 'fields': 'userEnteredFormat.wrapStrategy', 'cell': {'userEnteredFormat': {'wrapStrategy': self.todrive['cellwrap']}}}}) if self.todrive['locale']: body['requests'].append({'updateSpreadsheetProperties': {'properties': {'locale': self.todrive['locale']}, 'fields': 'locale'}}) if self.todrive['timeZone']: body['requests'].append({'updateSpreadsheetProperties': {'properties': {'timeZone': self.todrive['timeZone']}, 'fields': 'timeZone'}}) if body['requests']: callGAPI(sheet.spreadsheets(), 'batchUpdate', throwReasons=GAPI.SHEETS_ACCESS_THROW_REASONS, spreadsheetId=spreadsheetId, body=body) except (GAPI.notFound, GAPI.forbidden, GAPI.permissionDenied, GAPI.internalError, GAPI.insufficientFilePermissions, GAPI.badRequest, GAPI.invalid, GAPI.invalidArgument, GAPI.failedPrecondition, GAPI.teamDriveFileLimitExceeded, GAPI.teamDriveHierarchyTooDeep) as e: todriveCSVErrorExit([Ent.USER, user, Ent.SPREADSHEET, title], str(e)) Act.Set(action) file_url = result['webViewLink'] msg_txt = f'{Msg.DATA_UPLOADED_TO_DRIVE_FILE}:\n{file_url}' if not self.todrive['returnidonly']: printKeyValueList([msg_txt]) else: if self.todrive['fileId']: writeStdout(f'{self.todrive["fileId"]}\n') else: writeStdout(f'{spreadsheetId}\n') if not self.todrive['subject']: subject = title else: subject = self.todrive['subject'].replace('#file#', title).replace('#sheet#', sheetTitle) if not self.todrive['noemail']: send_email(subject, msg_txt, user, clientAccess=GC.Values[GC.TODRIVE_CLIENTACCESS], msgFrom=self.todrive['from']) if self.todrive['notify']: for recipient in self.todrive['share']+self.todrive['alert']: if recipient['emailAddress'] != user: send_email(subject, msg_txt, recipient['emailAddress'], clientAccess=GC.Values[GC.TODRIVE_CLIENTACCESS], msgFrom=self.todrive['from']) if not self.todrive['nobrowser']: webbrowser.open(file_url) except (GAPI.forbidden, GAPI.insufficientPermissions): printWarningMessage(INSUFFICIENT_PERMISSIONS_RC, Msg.INSUFFICIENT_PERMISSIONS_TO_PERFORM_TASK) except (GAPI.fileNotFound, GAPI.unknownError, GAPI.internalError, GAPI.storageQuotaExceeded) as e: if not self.todrive['fileId']: entityActionFailedWarning([Ent.DRIVE_FOLDER, self.todrive['parentId']], str(e)) else: entityActionFailedWarning([Ent.DRIVE_FILE, self.todrive['fileId']], str(e)) except (GAPI.serviceNotAvailable, GAPI.authError, GAPI.domainPolicy) as e: userDriveServiceNotEnabledWarning(user, str(e), 0, 0) else: closeFile(csvFile) if GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE] is not None: GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE].put((GM.REDIRECT_QUEUE_NAME, list_type)) GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE].put((GM.REDIRECT_QUEUE_TODRIVE, self.todrive)) GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE].put((GM.REDIRECT_QUEUE_CSVPF, (self.titlesList, self.sortTitlesList, self.indexedTitles, self.formatJSON, self.JSONtitlesList, self.columnDelimiter, self.noEscapeChar, self.quoteChar, self.sortHeaders, self.timestampColumn, self.mapDrive3Titles, self.fixPaths, self.mapNodataFields, self.nodataFields, self.driveListFields, self.driveSubfieldsChoiceMap, self.oneItemPerRow, self.showPermissionsLast, self.zeroBlankMimeTypeCounts))) GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE].put((GM.REDIRECT_QUEUE_DATA, self.rows)) return if self.zeroBlankMimeTypeCounts: self.ZeroBlankMimeTypeCounts() if self.rowFilter or self.rowDropFilter: self.CheckOutputRowFilterHeaders() if self.headerFilter or self.headerDropFilter: if not self.formatJSON: self.FilterHeaders() else: self.FilterJSONHeaders() extrasaction = 'ignore' else: extrasaction = 'raise' if not self.formatJSON: if not self.headerForce: self.SortTitles() self.SortIndexedTitles(self.titlesList) if self.fixPaths: self.FixPathsTitles(self.titlesList) if self.showPermissionsLast: self.MovePermsToEnd() if not self.rows and self.nodataFields is not None: self.FixNodataTitles() if self.mapDrive3Titles: self. MapDrive3TitlesToDrive2() else: self.titlesList = self.headerForce if self.timestampColumn: self.AddTitle(self.timestampColumn) if self.headerOrder: self.titlesList = self.orderHeaders(self.titlesList) titlesList = self.titlesList else: if not self.headerForce: if self.fixPaths: self.FixPathsTitles(self.JSONtitlesList) if not self.rows and self.nodataFields is not None: self.FixNodataTitles() else: self.JSONtitlesList = self.headerForce if self.timestampColumn: for i, v in enumerate(self.JSONtitlesList): if v.startswith('JSON'): self.JSONtitlesList.insert(i, self.timestampColumn) self.JSONtitlesSet.add(self.timestampColumn) break else: self.AddJSONTitle(self.timestampColumn) if self.headerOrder: self.JSONtitlesList = self.orderHeaders(self.JSONtitlesList) titlesList = self.JSONtitlesList normalizeSortHeaders() if self.outputTranspose: newRows = [] pivotKey = titlesList[0] newTitlesList = [pivotKey] newTitlesSet = set(newTitlesList) for title in titlesList[1:]: newRow = {pivotKey: title} for row in self.rows: pivotValue = row[pivotKey] if pivotValue not in newTitlesSet: newTitlesSet.add(pivotValue) newTitlesList.append(pivotValue) newRow[pivotValue] = row.get(title) newRows.append(newRow) titlesList = newTitlesList self.rows = newRows if (not self.todrive) or self.todrive['localcopy']: if GM.Globals[GM.CSVFILE][GM.REDIRECT_NAME] == '-': if GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD]: writeCSVToStdout() else: GM.Globals[GM.CSVFILE][GM.REDIRECT_NAME] = GM.Globals[GM.STDOUT][GM.REDIRECT_NAME] writeCSVToFile() else: writeCSVToFile() if self.todrive: writeCSVToDrive() if GM.Globals[GM.CSVFILE][GM.REDIRECT_MODE] == DEFAULT_FILE_APPEND_MODE: GM.Globals[GM.CSVFILE][GM.REDIRECT_WRITE_HEADER] = False def writeEntityNoHeaderCSVFile(entityType, entityList): csvPF = CSVPrintFile(entityType) _, _, entityList = getEntityArgument(entityList) if entityType == Ent.USER: for entity in entityList: csvPF.WriteRowNoFilter({entityType: normalizeEmailAddressOrUID(entity)}) else: for entity in entityList: csvPF.WriteRowNoFilter({entityType: entity}) GM.Globals[GM.CSVFILE][GM.REDIRECT_WRITE_HEADER] = False csvPF.writeCSVfile(Ent.Plural(entityType)) def getTodriveOnly(csvPF): while Cmd.ArgumentsRemaining(): myarg = getArgument() if csvPF and myarg == 'todrive': csvPF.GetTodriveParameters() else: unknownArgumentExit() DEFAULT_SKIP_OBJECTS = {'kind', 'etag', 'etags', '@type'} # Clean a JSON object def cleanJSON(topStructure, listLimit=None, skipObjects=None, timeObjects=None): def _clean(structure, key, subSkipObjects): if not isinstance(structure, (dict, list)): if key not in timeObjects: if isinstance(structure, str) and GC.Values[GC.CSV_OUTPUT_CONVERT_CR_NL]: return escapeCRsNLs(structure) return structure if isinstance(structure, str) and not structure.isdigit(): return formatLocalTime(structure) return formatLocalTimestamp(structure) if isinstance(structure, list): listLen = len(structure) listLen = min(listLen, listLimit or listLen) return [_clean(v, '', DEFAULT_SKIP_OBJECTS) for v in structure[0:listLen]] return {k: _clean(v, k, DEFAULT_SKIP_OBJECTS) for k, v in sorted(iter(structure.items())) if k not in subSkipObjects} timeObjects = timeObjects or set() return _clean(topStructure, '', DEFAULT_SKIP_OBJECTS.union(skipObjects or set())) # Flatten a JSON object def flattenJSON(topStructure, flattened=None, listLimit=None, skipObjects=None, timeObjects=None, noLenObjects=None, simpleLists=None, delimiter=None): def _flatten(structure, key, path): if not isinstance(structure, (dict, list)): if key not in timeObjects: if isinstance(structure, str): if GC.Values[GC.CSV_OUTPUT_CONVERT_CR_NL] and (structure.find('\n') >= 0 or structure.find('\r') >= 0): flattened[path] = escapeCRsNLs(structure) else: flattened[path] = structure else: flattened[path] = structure else: if isinstance(structure, str) and not structure.isdigit(): flattened[path] = formatLocalTime(structure) else: flattened[path] = formatLocalTimestamp(structure) elif isinstance(structure, list): listLen = len(structure) listLen = min(listLen, listLimit or listLen) if key in simpleLists: flattened[path] = delimiter.join(structure[:listLen]) else: if key not in noLenObjects: flattened[path] = listLen for i in range(listLen): _flatten(structure[i], '', f'{path}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{i}') else: if structure: for k, v in sorted(iter(structure.items())): if k not in DEFAULT_SKIP_OBJECTS: _flatten(v, k, f'{path}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}') else: flattened[path] = '' flattened = flattened or {} allSkipObjects = DEFAULT_SKIP_OBJECTS.union(skipObjects or set()) timeObjects = timeObjects or set() noLenObjects = noLenObjects or set() simpleLists = simpleLists or set() for k, v in sorted(iter(topStructure.items())): if k not in allSkipObjects: _flatten(v, k, k) return flattened # Show a json object def showJSON(showName, showValue, skipObjects=None, timeObjects=None, simpleLists=None, dictObjectsKey=None, sortDictKeys=True): def _show(objectName, objectValue, subObjectKey, level, subSkipObjects): if objectName in subSkipObjects: return if objectName is not None: printJSONKey(objectName) subObjectKey = dictObjectsKey.get(objectName) if isinstance(objectValue, list): if objectName in simpleLists: printJSONValue(' '.join(objectValue)) return if len(objectValue) == 1 and isinstance(objectValue[0], (str, bool, float, int)): if objectName is not None: printJSONValue(objectValue[0]) else: printKeyValueList([objectValue[0]]) return if objectName is not None: printBlankLine() Ind.Increment() for subValue in objectValue: if isinstance(subValue, (str, bool, float, int)): printKeyValueList([subValue]) else: _show(None, subValue, subObjectKey, level+1, DEFAULT_SKIP_OBJECTS) if objectName is not None: Ind.Decrement() elif isinstance(objectValue, dict): indentAfterFirst = unindentAfterLast = False if objectName is not None: printBlankLine() Ind.Increment() elif level > 0: indentAfterFirst = unindentAfterLast = True subObjects = sorted(objectValue) if sortDictKeys else objectValue.keys() if subObjectKey and (subObjectKey in subObjects): subObjects.remove(subObjectKey) subObjects.insert(0, subObjectKey) subObjectKey = None for subObject in subObjects: if subObject not in subSkipObjects: _show(subObject, objectValue[subObject], subObjectKey, level+1, DEFAULT_SKIP_OBJECTS) if indentAfterFirst: Ind.Increment() indentAfterFirst = False if objectName is not None or ((not indentAfterFirst) and unindentAfterLast): Ind.Decrement() else: if objectName not in timeObjects: if isinstance(objectValue, str) and (objectValue.find('\n') >= 0 or objectValue.find('\r') >= 0): if GC.Values[GC.SHOW_CONVERT_CR_NL]: printJSONValue(escapeCRsNLs(objectValue)) else: printBlankLine() Ind.Increment() printKeyValueList([Ind.MultiLineText(objectValue)]) Ind.Decrement() else: printJSONValue(objectValue if objectValue is not None else '') else: if isinstance(objectValue, str) and not objectValue.isdigit(): printJSONValue(formatLocalTime(objectValue)) else: printJSONValue(formatLocalTimestamp(objectValue)) timeObjects = timeObjects or set() simpleLists = simpleLists or set() dictObjectsKey = dictObjectsKey or {} _show(showName, showValue, None, 0, DEFAULT_SKIP_OBJECTS.union(skipObjects or set())) class FormatJSONQuoteChar(): def __init__(self, csvPF=None, formatJSONOnly=False): self.SetCsvPF(csvPF) self.SetFormatJSON(False) self.SetQuoteChar(GM.Globals.get(GM.CSV_OUTPUT_QUOTE_CHAR, GC.Values.get(GC.CSV_OUTPUT_QUOTE_CHAR, '"'))) if not formatJSONOnly: return while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'formatjson': self.SetFormatJSON(True) return unknownArgumentExit() def SetCsvPF(self, csvPF): self.csvPF = csvPF def SetFormatJSON(self, formatJSON): self.formatJSON = formatJSON if self.csvPF: self.csvPF.SetFormatJSON(formatJSON) def GetFormatJSON(self, myarg): if myarg == 'formatjson': self.SetFormatJSON(True) return unknownArgumentExit() def SetQuoteChar(self, quoteChar): self.quoteChar = quoteChar if self.csvPF: self.csvPF.SetQuoteChar(quoteChar) def GetQuoteChar(self, myarg): if self.csvPF and myarg == 'quotechar': self.SetQuoteChar(getCharacter()) return unknownArgumentExit() def GetFormatJSONQuoteChar(self, myarg, addTitle=False, noExit=False): if myarg == 'formatjson': self.SetFormatJSON(True) if self.csvPF and addTitle: self.csvPF.AddJSONTitles('JSON') return True if self.csvPF and myarg == 'quotechar': self.SetQuoteChar(getCharacter()) return True if noExit: return False unknownArgumentExit() # Batch processing request_id fields RI_ENTITY = 0 RI_I = 1 RI_COUNT = 2 RI_J = 3 RI_JCOUNT = 4 RI_ITEM = 5 RI_ROLE = 6 RI_OPTION = 7 def batchRequestID(entityName, i, count, j, jcount, item, role=None, option=None): if role is None and option is None: return f'{entityName}\n{i}\n{count}\n{j}\n{jcount}\n{item}' return f'{entityName}\n{i}\n{count}\n{j}\n{jcount}\n{item}\n{role}\n{option}' TIME_OFFSET_UNITS = [('day', SECONDS_PER_DAY), ('hour', SECONDS_PER_HOUR), ('minute', SECONDS_PER_MINUTE), ('second', 1)] def getLocalGoogleTimeOffset(testLocation=GOOGLE_TIMECHECK_LOCATION): # If local time is well off, it breaks https because the server certificate will be seen as too old or new and thus invalid; http doesn't have that issue. # Try with http first, if time is close (.googleapis.com so # add those domains. disc_hosts = [] for api, config in API._INFO.items(): if config.get('v2discovery') and not config.get('localdiscovery'): if mapped_api := config.get('mappedAPI'): api = mapped_api host = f'{api}.googleapis.com' if host not in disc_hosts: disc_hosts.append(host) for host in disc_hosts: check_host(host) checked_hosts = initial_hosts + api_hosts + disc_hosts # now we need to "build" each API and check it's base URL host # if we haven't already. This may not be any hosts at all but # to ensure we are checking all hosts GAM may use we should # keep this. for api in API._INFO: if api in [API.CONTACTS, API.EMAIL_AUDIT]: continue svc = getService(api, httpObj) base_url = svc._rootDesc.get('baseUrl') parsed_base_url = urlparse(base_url) base_host = parsed_base_url.netloc if base_host not in checked_hosts: print(f'checking {base_host} for {api}') check_host(base_host) checked_hosts.append(base_host) if success_count == try_count: writeStdout(createGreenText('All hosts passed!\n')) else: systemErrorExit(3, createYellowText('Some hosts failed to connect! Please follow the recommendations for those hosts to correct any issues and try again.')) # gam comment def doComment(): writeStdout(Cmd.QuotedArgumentList(Cmd.Remaining())+'\n') # gam version [check|checkrc|simple|extended] [timeoffset] [nooffseterror] [location ] def doVersion(checkForArgs=True): forceCheck = 0 extended = noOffsetError = timeOffset = simple = False testLocation = GOOGLE_TIMECHECK_LOCATION if checkForArgs: while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'check': forceCheck = 1 elif myarg == 'checkrc': forceCheck = -1 elif myarg == 'simple': simple = True elif myarg == 'extended': extended = timeOffset = True elif myarg == 'timeoffset': timeOffset = True elif myarg == 'nooffseterror': noOffsetError = True elif myarg == 'location': testLocation = getString(Cmd.OB_HOST_NAME) else: unknownArgumentExit() if simple: writeStdout(__version__) return writeStdout((f'{GAM} {__version__} - {GAM_URL} - {GM.Globals[GM.GAM_TYPE]}\n' f'{__author__}\n' f'Python {sys.version_info[0]}.{sys.version_info[1]}.{sys.version_info[2]} {struct.calcsize("P")*8}-bit {sys.version_info[3]}\n' f'{getOSPlatform()} {platform.machine()}\n' f'Path: {GM.Globals[GM.GAM_PATH]}\n' f'{Ent.Singular(Ent.CONFIG_FILE)}: {GM.Globals[GM.GAM_CFG_FILE]}, {Ent.Singular(Ent.SECTION)}: {GM.Globals[GM.GAM_CFG_SECTION_NAME]}, ' f'{GC.CUSTOMER_ID}: {GC.Values[GC.CUSTOMER_ID]}, {GC.DOMAIN}: {GC.Values[GC.DOMAIN]}\n' f'Time: {ISOformatTimeStamp(todaysTime())}\n' )) if sys.platform.startswith('win') and str(struct.calcsize('P')*8).find('32') != -1 and platform.machine().find('64') != -1: printKeyValueList([Msg.UPDATE_GAM_TO_64BIT]) if timeOffset: offsetSeconds, offsetFormatted = getLocalGoogleTimeOffset(testLocation) printKeyValueList([Msg.YOUR_SYSTEM_TIME_DIFFERS_FROM_GOOGLE.format(testLocation, offsetFormatted)]) if offsetSeconds > MAX_LOCAL_GOOGLE_TIME_OFFSET: if not noOffsetError: systemErrorExit(NETWORK_ERROR_RC, Msg.PLEASE_CORRECT_YOUR_SYSTEM_TIME) stderrWarningMsg(Msg.PLEASE_CORRECT_YOUR_SYSTEM_TIME) if forceCheck: doGAMCheckForUpdates(forceCheck) if extended: printKeyValueList([ssl.OPENSSL_VERSION]) tls_ver, cipher_name = _getServerTLSUsed(testLocation) for lib in glverlibs.GAM_VER_LIBS: try: writeStdout(f'{lib} {lib_version(lib)}\n') except: pass printKeyValueList([f'{testLocation} connects using {tls_ver} {cipher_name}']) # gam help def doUsage(): printBlankLine() doVersion(checkForArgs=False) writeStdout(Msg.HELP_SYNTAX.format(os.path.join(GM.Globals[GM.GAM_PATH], FN_GAMCOMMANDS_TXT))) writeStdout(Msg.HELP_WIKI.format(GAM_WIKI)) class NullHandler(logging.Handler): def emit(self, record): pass def initializeLogging(): nh = NullHandler() logging.getLogger().addHandler(nh) def saveNonPickleableValues(): savedValues = {GM.STDOUT: {}, GM.STDERR: {}, GM.SAVED_STDOUT: None, GM.CMDLOG_HANDLER: None, GM.CMDLOG_LOGGER: None} savedValues[GM.SAVED_STDOUT] = GM.Globals[GM.SAVED_STDOUT] GM.Globals[GM.SAVED_STDOUT] = None savedValues[GM.STDOUT][GM.REDIRECT_FD] = GM.Globals[GM.STDOUT].get(GM.REDIRECT_FD, None) GM.Globals[GM.STDOUT].pop(GM.REDIRECT_FD, None) savedValues[GM.STDERR][GM.REDIRECT_FD] = GM.Globals[GM.STDERR].get(GM.REDIRECT_FD, None) GM.Globals[GM.STDERR].pop(GM.REDIRECT_FD, None) savedValues[GM.STDOUT][GM.REDIRECT_MULTI_FD] = GM.Globals[GM.STDOUT].get(GM.REDIRECT_MULTI_FD, None) GM.Globals[GM.STDOUT].pop(GM.REDIRECT_MULTI_FD, None) savedValues[GM.STDERR][GM.REDIRECT_MULTI_FD] = GM.Globals[GM.STDERR].get(GM.REDIRECT_MULTI_FD, None) GM.Globals[GM.STDERR].pop(GM.REDIRECT_MULTI_FD, None) savedValues[GM.CMDLOG_HANDLER] = GM.Globals[GM.CMDLOG_HANDLER] GM.Globals[GM.CMDLOG_HANDLER] = None savedValues[GM.CMDLOG_LOGGER] = GM.Globals[GM.CMDLOG_LOGGER] GM.Globals[GM.CMDLOG_LOGGER] = None return savedValues def restoreNonPickleableValues(savedValues): GM.Globals[GM.SAVED_STDOUT] = savedValues[GM.SAVED_STDOUT] GM.Globals[GM.STDOUT][GM.REDIRECT_FD] = savedValues[GM.STDOUT][GM.REDIRECT_FD] GM.Globals[GM.STDERR][GM.REDIRECT_FD] = savedValues[GM.STDERR][GM.REDIRECT_FD] GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD] = savedValues[GM.STDOUT][GM.REDIRECT_MULTI_FD] GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD] = savedValues[GM.STDERR][GM.REDIRECT_MULTI_FD] GM.Globals[GM.CMDLOG_HANDLER] = savedValues[GM.CMDLOG_HANDLER] GM.Globals[GM.CMDLOG_LOGGER] = savedValues[GM.CMDLOG_LOGGER] def CSVFileQueueHandler(mpQueue, mpQueueStdout, mpQueueStderr, csvPF, datetimeNow, tzinfo, output_timeformat): global Cmd def reopenSTDFile(stdtype): if GM.Globals[stdtype][GM.REDIRECT_NAME] == 'null': GM.Globals[stdtype][GM.REDIRECT_FD] = open(os.devnull, GM.Globals[stdtype][GM.REDIRECT_MODE], encoding=UTF8) elif GM.Globals[stdtype][GM.REDIRECT_NAME] == '-': GM.Globals[stdtype][GM.REDIRECT_FD] = os.fdopen(os.dup([sys.stderr.fileno(), sys.stdout.fileno()][stdtype == GM.STDOUT]), GM.Globals[stdtype][GM.REDIRECT_MODE], encoding=GM.Globals[GM.SYS_ENCODING]) elif stdtype == GM.STDERR and GM.Globals[stdtype][GM.REDIRECT_NAME] == 'stdout': GM.Globals[stdtype][GM.REDIRECT_FD] = GM.Globals[GM.STDOUT][GM.REDIRECT_FD] else: GM.Globals[stdtype][GM.REDIRECT_FD] = openFile(GM.Globals[stdtype][GM.REDIRECT_NAME], GM.Globals[stdtype][GM.REDIRECT_MODE]) if stdtype == GM.STDERR and GM.Globals[stdtype][GM.REDIRECT_NAME] == 'stdout': GM.Globals[stdtype][GM.REDIRECT_MULTI_FD] = GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD] else: GM.Globals[stdtype][GM.REDIRECT_MULTI_FD] = GM.Globals[stdtype][GM.REDIRECT_FD] if not GM.Globals[stdtype][GM.REDIRECT_MULTIPROCESS] else StringIOobject() GM.Globals[GM.DATETIME_NOW] = datetimeNow GC.Values[GC.TIMEZONE] = tzinfo GC.Values[GC.OUTPUT_TIMEFORMAT] = output_timeformat # if sys.platform.startswith('win'): # signal.signal(signal.SIGINT, signal.SIG_IGN) if multiprocessing.get_start_method() == 'spawn': signal.signal(signal.SIGINT, signal.SIG_IGN) Cmd = glclargs.GamCLArgs() else: csvPF.SetColumnDelimiter(GC.Values[GC.CSV_OUTPUT_COLUMN_DELIMITER]) csvPF.SetNoEscapeChar(GC.Values[GC.CSV_OUTPUT_NO_ESCAPE_CHAR]) csvPF.SetQuoteChar(GC.Values[GC.CSV_OUTPUT_QUOTE_CHAR]) csvPF.SetSortHeaders(GC.Values[GC.CSV_OUTPUT_SORT_HEADERS]) csvPF.SetTimestampColumn(GC.Values[GC.CSV_OUTPUT_TIMESTAMP_COLUMN]) csvPF.SetHeaderFilter(GC.Values[GC.CSV_OUTPUT_HEADER_FILTER]) csvPF.SetHeaderDropFilter(GC.Values[GC.CSV_OUTPUT_HEADER_DROP_FILTER]) csvPF.SetRowFilter(GC.Values[GC.CSV_OUTPUT_ROW_FILTER], GC.Values[GC.CSV_OUTPUT_ROW_FILTER_MODE]) csvPF.SetRowDropFilter(GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER], GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER_MODE]) csvPF.SetRowLimit(GC.Values[GC.CSV_OUTPUT_ROW_LIMIT]) list_type = 'CSV' while True: dataType, dataItem = mpQueue.get() if dataType == GM.REDIRECT_QUEUE_NAME: list_type = dataItem elif dataType == GM.REDIRECT_QUEUE_TODRIVE: csvPF.todrive = dataItem elif dataType == GM.REDIRECT_QUEUE_CSVPF: csvPF.AddTitles(dataItem[0]) csvPF.SetSortTitles(dataItem[1]) csvPF.SetIndexedTitles(dataItem[2]) csvPF.SetFormatJSON(dataItem[3]) csvPF.AddJSONTitles(dataItem[4]) csvPF.SetColumnDelimiter(dataItem[5]) csvPF.SetNoEscapeChar(dataItem[6]) csvPF.SetQuoteChar(dataItem[7]) csvPF.SetSortHeaders(dataItem[8]) csvPF.SetTimestampColumn(dataItem[9]) csvPF.SetMapDrive3Titles(dataItem[10]) csvPF.SetFixPaths(dataItem[11]) csvPF.SetNodataFields(dataItem[12], dataItem[13], dataItem[14], dataItem[15], dataItem[16]) csvPF.SetShowPermissionsLast(dataItem[17]) csvPF.SetZeroBlankMimeTypeCounts(dataItem[18]) elif dataType == GM.REDIRECT_QUEUE_DATA: csvPF.rows.extend(dataItem) elif dataType == GM.REDIRECT_QUEUE_ARGS: Cmd.InitializeArguments(dataItem) elif dataType == GM.REDIRECT_QUEUE_GLOBALS: GM.Globals = dataItem if multiprocessing.get_start_method() == 'spawn': reopenSTDFile(GM.STDOUT) reopenSTDFile(GM.STDERR) elif dataType == GM.REDIRECT_QUEUE_VALUES: GC.Values = dataItem csvPF.SetColumnDelimiter(GC.Values[GC.CSV_OUTPUT_COLUMN_DELIMITER]) csvPF.SetNoEscapeChar(GC.Values[GC.CSV_OUTPUT_NO_ESCAPE_CHAR]) csvPF.SetQuoteChar(GC.Values[GC.CSV_OUTPUT_QUOTE_CHAR]) csvPF.SetSortHeaders(GC.Values[GC.CSV_OUTPUT_SORT_HEADERS]) csvPF.SetTimestampColumn(GC.Values[GC.CSV_OUTPUT_TIMESTAMP_COLUMN]) csvPF.SetHeaderFilter(GC.Values[GC.CSV_OUTPUT_HEADER_FILTER]) csvPF.SetHeaderDropFilter(GC.Values[GC.CSV_OUTPUT_HEADER_DROP_FILTER]) csvPF.SetRowFilter(GC.Values[GC.CSV_OUTPUT_ROW_FILTER], GC.Values[GC.CSV_OUTPUT_ROW_FILTER_MODE]) csvPF.SetRowDropFilter(GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER], GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER_MODE]) csvPF.SetRowLimit(GC.Values[GC.CSV_OUTPUT_ROW_LIMIT]) else: #GM.REDIRECT_QUEUE_EOF break csvPF.writeCSVfile(list_type) if mpQueueStdout: mpQueueStdout.put((0, GM.REDIRECT_QUEUE_DATA, GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD].getvalue())) else: flushStdout() if mpQueueStderr and mpQueueStderr is not mpQueueStdout: mpQueueStderr.put((0, GM.REDIRECT_QUEUE_DATA, GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD].getvalue())) else: flushStderr() def initializeCSVFileQueueHandler(mpManager, mpQueueStdout, mpQueueStderr): mpQueue = mpManager.Queue() mpQueueHandler = multiprocessing.Process(target=CSVFileQueueHandler, args=(mpQueue, mpQueueStdout, mpQueueStderr, GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE_CSVPF], GM.Globals[GM.DATETIME_NOW], GC.Values[GC.TIMEZONE], GC.Values[GC.OUTPUT_TIMEFORMAT])) mpQueueHandler.start() return (mpQueue, mpQueueHandler) def terminateCSVFileQueueHandler(mpQueue, mpQueueHandler): GM.Globals[GM.PARSER] = None GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE] = None if multiprocessing.get_start_method() == 'spawn': mpQueue.put((GM.REDIRECT_QUEUE_ARGS, Cmd.AllArguments())) savedValues = saveNonPickleableValues() mpQueue.put((GM.REDIRECT_QUEUE_GLOBALS, GM.Globals)) restoreNonPickleableValues(savedValues) mpQueue.put((GM.REDIRECT_QUEUE_VALUES, GC.Values)) mpQueue.put((GM.REDIRECT_QUEUE_EOF, None)) mpQueueHandler.join() def StdQueueHandler(mpQueue, stdtype, gmGlobals, gcValues): PROCESS_MSG = '{0}: {1:6d}, {2:>5s}: {3}, RC: {4:3d}, Cmd: {5}\n' def _writeData(data): fd.write(data) def _writePidData(pid, data): try: if pid != 0 and GC.Values[GC.SHOW_MULTIPROCESS_INFO]: _writeData(PROCESS_MSG.format(pidData[pid]['queue'], pid, 'Start', pidData[pid]['start'], 0, pidData[pid]['cmd'])) if data[1] is not None: _writeData(data[1]) if GC.Values[GC.SHOW_MULTIPROCESS_INFO]: _writeData(PROCESS_MSG.format(pidData[pid]['queue'], pid, 'End', currentISOformatTimeStamp(), data[0], pidData[pid]['cmd'])) fd.flush() except IOError as e: systemErrorExit(FILE_ERROR_RC, fdErrorMessage(fd, GM.Globals[stdtype][GM.REDIRECT_NAME], e)) # if sys.platform.startswith('win'): # signal.signal(signal.SIGINT, signal.SIG_IGN) if multiprocessing.get_start_method() == 'spawn': signal.signal(signal.SIGINT, signal.SIG_IGN) GM.Globals = gmGlobals.copy() GC.Values = gcValues.copy() pid0DataItem = [KEYBOARD_INTERRUPT_RC, None] pidData = {} if multiprocessing.get_start_method() == 'spawn': if GM.Globals[stdtype][GM.REDIRECT_NAME] == 'null': fd = open(os.devnull, GM.Globals[stdtype][GM.REDIRECT_MODE], encoding=UTF8) elif GM.Globals[stdtype][GM.REDIRECT_NAME] == '-': fd = os.fdopen(os.dup([sys.stderr.fileno(), sys.stdout.fileno()][GM.Globals[stdtype][GM.REDIRECT_QUEUE] == 'stdout']), GM.Globals[stdtype][GM.REDIRECT_MODE], encoding=GM.Globals[GM.SYS_ENCODING]) elif GM.Globals[stdtype][GM.REDIRECT_NAME] == 'stdout' and GM.Globals[stdtype][GM.REDIRECT_QUEUE] == 'stderr': fd = os.fdopen(os.dup(sys.stdout.fileno()), GM.Globals[stdtype][GM.REDIRECT_MODE], encoding=GM.Globals[GM.SYS_ENCODING]) else: fd = openFile(GM.Globals[stdtype][GM.REDIRECT_NAME], GM.Globals[stdtype][GM.REDIRECT_MODE]) else: fd = GM.Globals[stdtype][GM.REDIRECT_FD] while True: try: pid, dataType, dataItem = mpQueue.get() except (EOFError, ValueError): break if dataType == GM.REDIRECT_QUEUE_START: pidData[pid] = {'queue': GM.Globals[stdtype][GM.REDIRECT_QUEUE], 'start': currentISOformatTimeStamp(), 'cmd': Cmd.QuotedArgumentList(dataItem)} if pid == 0 and GC.Values[GC.SHOW_MULTIPROCESS_INFO]: fd.write(PROCESS_MSG.format(pidData[pid]['queue'], pid, 'Start', pidData[pid]['start'], 0, pidData[pid]['cmd'])) elif dataType == GM.REDIRECT_QUEUE_DATA: _writeData(dataItem) elif dataType == GM.REDIRECT_QUEUE_END: if pid != 0: _writePidData(pid, dataItem) del pidData[pid] else: pid0DataItem = dataItem else: #GM.REDIRECT_QUEUE_EOF break for pid in pidData: if pid != 0: _writePidData(pid, [KEYBOARD_INTERRUPT_RC, None]) _writePidData(0, pid0DataItem) if fd not in [sys.stdout, sys.stderr]: try: fd.flush() fd.close() except IOError: pass GM.Globals[stdtype][GM.REDIRECT_FD] = None def initializeStdQueueHandler(mpManager, stdtype, gmGlobals, gcValues): mpQueue = mpManager.Queue() mpQueueHandler = multiprocessing.Process(target=StdQueueHandler, args=(mpQueue, stdtype, gmGlobals, gcValues)) mpQueueHandler.start() return (mpQueue, mpQueueHandler) def batchWriteStderr(data): try: sys.stderr.write(data) sys.stderr.flush() except IOError as e: systemErrorExit(FILE_ERROR_RC, fileErrorMessage('stderr', e)) def writeStdQueueHandler(mpQueue, item): while True: try: mpQueue.put(item) return except Exception as e: time.sleep(1) batchWriteStderr(f'{currentISOformatTimeStamp()},{item[0]}/{GM.Globals[GM.NUM_BATCH_ITEMS]},Error,{str(e)}\n') def terminateStdQueueHandler(mpQueue, mpQueueHandler): mpQueue.put((0, GM.REDIRECT_QUEUE_EOF, None)) mpQueueHandler.join() def ProcessGAMCommandMulti(pid, numItems, logCmd, mpQueueCSVFile, mpQueueStdout, mpQueueStderr, debugLevel, todrive, printAguDomains, printCrosOUs, printCrosOUsAndChildren, output_dateformat, output_timeformat, csvColumnDelimiter, csvNoEscapeChar, csvQuoteChar, csvSortHeaders, csvTimestampColumn, csvHeaderFilter, csvHeaderDropFilter, csvHeaderForce, csvHeaderOrder, csvRowFilter, csvRowFilterMode, csvRowDropFilter, csvRowDropFilterMode, csvRowLimit, showGettings, showGettingsGotNL, args): global mplock with mplock: initializeLogging() # if sys.platform.startswith('win'): if multiprocessing.get_start_method() == 'spawn': signal.signal(signal.SIGINT, signal.SIG_IGN) GM.Globals[GM.API_CALLS_RETRY_DATA] = {} GM.Globals[GM.CMDLOG_LOGGER] = None GM.Globals[GM.CSVFILE] = {} GM.Globals[GM.CSV_DATA_DICT] = {} GM.Globals[GM.CSV_KEY_FIELD] = None GM.Globals[GM.CSV_SUBKEY_FIELD] = None GM.Globals[GM.CSV_DATA_FIELD] = None GM.Globals[GM.CSV_OUTPUT_COLUMN_DELIMITER] = csvColumnDelimiter GM.Globals[GM.CSV_OUTPUT_NO_ESCAPE_CHAR] = csvNoEscapeChar GM.Globals[GM.CSV_OUTPUT_HEADER_DROP_FILTER] = csvHeaderDropFilter[:] GM.Globals[GM.CSV_OUTPUT_HEADER_FILTER] = csvHeaderFilter[:] GM.Globals[GM.CSV_OUTPUT_HEADER_FORCE] = csvHeaderForce[:] GM.Globals[GM.CSV_OUTPUT_HEADER_ORDER] = csvHeaderOrder[:] GM.Globals[GM.CSV_OUTPUT_QUOTE_CHAR] = csvQuoteChar GM.Globals[GM.CSV_OUTPUT_ROW_DROP_FILTER] = csvRowDropFilter[:] GM.Globals[GM.CSV_OUTPUT_ROW_DROP_FILTER_MODE] = csvRowDropFilterMode GM.Globals[GM.CSV_OUTPUT_ROW_FILTER] = csvRowFilter[:] GM.Globals[GM.CSV_OUTPUT_ROW_FILTER_MODE] = csvRowFilterMode GM.Globals[GM.CSV_OUTPUT_ROW_LIMIT] = csvRowLimit GM.Globals[GM.CSV_OUTPUT_SORT_HEADERS] = csvSortHeaders[:] GM.Globals[GM.CSV_OUTPUT_TIMESTAMP_COLUMN] = csvTimestampColumn GM.Globals[GM.CSV_TODRIVE] = todrive.copy() GM.Globals[GM.DEBUG_LEVEL] = debugLevel GM.Globals[GM.OUTPUT_DATEFORMAT] = output_dateformat GM.Globals[GM.OUTPUT_TIMEFORMAT] = output_timeformat GM.Globals[GM.NUM_BATCH_ITEMS] = numItems GM.Globals[GM.PID] = pid GM.Globals[GM.PRINT_AGU_DOMAINS] = printAguDomains[:] GM.Globals[GM.PRINT_CROS_OUS] = printCrosOUs[:] GM.Globals[GM.PRINT_CROS_OUS_AND_CHILDREN] = printCrosOUsAndChildren[:] GM.Globals[GM.SAVED_STDOUT] = None GM.Globals[GM.SHOW_GETTINGS] = showGettings GM.Globals[GM.SHOW_GETTINGS_GOT_NL] = showGettingsGotNL GM.Globals[GM.SYSEXITRC] = 0 GM.Globals[GM.PARSER] = None if mpQueueCSVFile: GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE] = mpQueueCSVFile if mpQueueStdout: GM.Globals[GM.STDOUT] = {GM.REDIRECT_NAME: '', GM.REDIRECT_FD: None, GM.REDIRECT_MULTI_FD: StringIOobject()} if debugLevel: sys.stdout = GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD] # mpQueueStdout.put((pid, GM.REDIRECT_QUEUE_START, args)) writeStdQueueHandler(mpQueueStdout,(pid, GM.REDIRECT_QUEUE_START, args)) else: GM.Globals[GM.STDOUT] = {} if mpQueueStderr: if mpQueueStderr is not mpQueueStdout: GM.Globals[GM.STDERR] = {GM.REDIRECT_NAME: '', GM.REDIRECT_FD: None, GM.REDIRECT_MULTI_FD: StringIOobject()} # mpQueueStderr.put((pid, GM.REDIRECT_QUEUE_START, args)) writeStdQueueHandler(mpQueueStderr, (pid, GM.REDIRECT_QUEUE_START, args)) else: GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD] = GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD] else: GM.Globals[GM.STDERR] = {} sysRC = ProcessGAMCommand(args) with mplock: if mpQueueStdout: # mpQueueStdout.put((pid, GM.REDIRECT_QUEUE_END, [sysRC, GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD].getvalue()])) writeStdQueueHandler(mpQueueStdout, (pid, GM.REDIRECT_QUEUE_END, [sysRC, GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD].getvalue()])) GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD].close() GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD] = None if mpQueueStderr and mpQueueStderr is not mpQueueStdout: # mpQueueStderr.put((pid, GM.REDIRECT_QUEUE_END, [sysRC, GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD].getvalue()])) writeStdQueueHandler(mpQueueStderr, (pid, GM.REDIRECT_QUEUE_END, [sysRC, GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD].getvalue()])) GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD].close() GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD] = None return (pid, sysRC, logCmd) ERROR_PLURAL_SINGULAR = [Msg.ERRORS, Msg.ERROR] PROCESS_PLURAL_SINGULAR = [Msg.PROCESSES, Msg.PROCESS] THREAD_PLURAL_SINGULAR = [Msg.THREADS, Msg.THREAD] def checkChildProcessRC(rc): # Comparison if 'comp' in GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION]: op = GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION]['comp'] value = GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION]['value'] if op == '<': return rc < value if op == '<=': return rc <= value if op == '>': return rc > value if op == '>=': return rc >= value if op == '!=': return rc != value return rc == value # Range op = GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION]['range'] low = GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION]['low'] high = GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION]['high'] if op == '!=': return not low <= rc <= high return low <= rc <= high def initGamWorker(l): global mplock mplock = l def MultiprocessGAMCommands(items, showCmds): def poolCallback(result): poolProcessResults[0] -= 1 if showCmds: batchWriteStderr(f'{currentISOformatTimeStamp()},{result[0]}/{numItems},End,{result[1]},{result[2]}\n') if GM.Globals[GM.CMDLOG_LOGGER]: GM.Globals[GM.CMDLOG_LOGGER].info(f'{currentISOformatTimeStamp()},{result[1]},{result[2]}') if GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION] is not None and checkChildProcessRC(result[1]): GM.Globals[GM.MULTIPROCESS_EXIT_PROCESSING] = True def signal_handler(*_): nonlocal controlC controlC = True def handleControlC(source): nonlocal controlC batchWriteStderr(f'Control-C (Multiprocess-{source})\n') setSysExitRC(KEYBOARD_INTERRUPT_RC) batchWriteStderr(Msg.BATCH_CSV_TERMINATE_N_PROCESSES.format(currentISOformatTimeStamp(), numItems, poolProcessResults[0], PROCESS_PLURAL_SINGULAR[poolProcessResults[0] == 1])) pool.terminate() controlC = False if not items: return GM.Globals[GM.NUM_BATCH_ITEMS] = numItems = len(items) numPoolProcesses = min(numItems, GC.Values[GC.NUM_THREADS]) if GC.Values[GC.MULTIPROCESS_POOL_LIMIT] == -1: parallelPoolProcesses = -1 elif GC.Values[GC.MULTIPROCESS_POOL_LIMIT] == 0: parallelPoolProcesses = numPoolProcesses else: parallelPoolProcesses = min(numItems, GC.Values[GC.MULTIPROCESS_POOL_LIMIT]) # origSigintHandler = signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN) mpManager = multiprocessing.Manager() l = mpManager.Lock() try: if multiprocessing.get_start_method() == 'spawn': pool = mpManager.Pool(processes=numPoolProcesses, initializer=initGamWorker, initargs=(l,), maxtasksperchild=200) else: pool = multiprocessing.Pool(processes=numPoolProcesses, initializer=initGamWorker, initargs=(l,), maxtasksperchild=200) except IOError as e: systemErrorExit(FILE_ERROR_RC, e) except AssertionError as e: Cmd.SetLocation(0) usageErrorExit(str(e)) if multiprocessing.get_start_method() == 'spawn': savedValues = saveNonPickleableValues() if GM.Globals[GM.STDOUT][GM.REDIRECT_MULTIPROCESS]: mpQueueStdout, mpQueueHandlerStdout = initializeStdQueueHandler(mpManager, GM.STDOUT, GM.Globals, GC.Values) mpQueueStdout.put((0, GM.REDIRECT_QUEUE_START, Cmd.AllArguments())) else: mpQueueStdout = None if GM.Globals[GM.STDERR][GM.REDIRECT_MULTIPROCESS]: if GM.Globals[GM.STDERR][GM.REDIRECT_NAME] != 'stdout': mpQueueStderr, mpQueueHandlerStderr = initializeStdQueueHandler(mpManager, GM.STDERR, GM.Globals, GC.Values) mpQueueStderr.put((0, GM.REDIRECT_QUEUE_START, Cmd.AllArguments())) else: mpQueueStderr = mpQueueStdout else: mpQueueStderr = None if multiprocessing.get_start_method() == 'spawn': restoreNonPickleableValues(savedValues) if mpQueueStdout: mpQueueStdout.put((0, GM.REDIRECT_QUEUE_DATA, GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD].getvalue())) GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD].truncate(0) if mpQueueStderr and mpQueueStderr is not mpQueueStdout: mpQueueStderr.put((0, GM.REDIRECT_QUEUE_DATA, GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD].getvalue())) GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD].truncate(0) if GM.Globals[GM.CSVFILE][GM.REDIRECT_MULTIPROCESS]: mpQueueCSVFile, mpQueueHandlerCSVFile = initializeCSVFileQueueHandler(mpManager, mpQueueStdout, mpQueueStderr) else: mpQueueCSVFile = None # signal.signal(signal.SIGINT, origSigintHandler) controlC = False signal.signal(signal.SIGINT, signal_handler) batchWriteStderr(Msg.USING_N_PROCESSES.format(currentISOformatTimeStamp(), numItems, numPoolProcesses, PROCESS_PLURAL_SINGULAR[numPoolProcesses == 1])) try: pid = 0 poolProcessResults = {pid: 0} for item in items: if GM.Globals[GM.MULTIPROCESS_EXIT_PROCESSING]: break if controlC: break if item[0] == Cmd.COMMIT_BATCH_CMD: batchWriteStderr(Msg.COMMIT_BATCH_WAIT_N_PROCESSES.format(currentISOformatTimeStamp(), numItems, poolProcessResults[0], PROCESS_PLURAL_SINGULAR[poolProcessResults[0] == 1])) while poolProcessResults[0] > 0: time.sleep(1) completedProcesses = [] for p, result in iter(poolProcessResults.items()): if p != 0 and result.ready(): poolCallback(result.get()) completedProcesses.append(p) for p in completedProcesses: del poolProcessResults[p] batchWriteStderr(Msg.COMMIT_BATCH_COMPLETE.format(currentISOformatTimeStamp(), numItems, Msg.PROCESSES)) if len(item) > 1: readStdin(f'{currentISOformatTimeStamp()},0/{numItems},{Cmd.QuotedArgumentList(item[1:])}') continue if item[0] == Cmd.PRINT_CMD: batchWriteStderr(Cmd.QuotedArgumentList(item[1:])+'\n') continue if item[0] == Cmd.SLEEP_CMD: batchWriteStderr(f'{currentISOformatTimeStamp()},0/{numItems},Sleepiing {item[1]} seconds\n') time.sleep(int(item[1])) continue pid += 1 if not showCmds and ((pid % 100 == 0) or (pid == numItems)): batchWriteStderr(Msg.PROCESSING_ITEM_N_OF_M.format(currentISOformatTimeStamp(), pid, numItems)) if showCmds or GM.Globals[GM.CMDLOG_LOGGER]: logCmd = Cmd.QuotedArgumentList(item) if showCmds: batchWriteStderr(f'{currentISOformatTimeStamp()},{pid}/{numItems},Start,0,{logCmd}\n') else: logCmd = '' poolProcessResults[pid] = pool.apply_async(ProcessGAMCommandMulti, [pid, numItems, logCmd, mpQueueCSVFile, mpQueueStdout, mpQueueStderr, GC.Values[GC.DEBUG_LEVEL], GM.Globals[GM.CSV_TODRIVE], GC.Values[GC.PRINT_AGU_DOMAINS], GC.Values[GC.PRINT_CROS_OUS], GC.Values[GC.PRINT_CROS_OUS_AND_CHILDREN], GC.Values[GC.OUTPUT_DATEFORMAT], GC.Values[GC.OUTPUT_TIMEFORMAT], GC.Values[GC.CSV_OUTPUT_COLUMN_DELIMITER], GC.Values[GC.CSV_OUTPUT_NO_ESCAPE_CHAR], GC.Values[GC.CSV_OUTPUT_QUOTE_CHAR], GC.Values[GC.CSV_OUTPUT_SORT_HEADERS], GC.Values[GC.CSV_OUTPUT_TIMESTAMP_COLUMN], GC.Values[GC.CSV_OUTPUT_HEADER_FILTER], GC.Values[GC.CSV_OUTPUT_HEADER_DROP_FILTER], GC.Values[GC.CSV_OUTPUT_HEADER_FORCE], GC.Values[GC.CSV_OUTPUT_HEADER_ORDER], GC.Values[GC.CSV_OUTPUT_ROW_FILTER], GC.Values[GC.CSV_OUTPUT_ROW_FILTER_MODE], GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER], GC.Values[GC.CSV_OUTPUT_ROW_DROP_FILTER_MODE], GC.Values[GC.CSV_OUTPUT_ROW_LIMIT], GC.Values[GC.SHOW_GETTINGS], GC.Values[GC.SHOW_GETTINGS_GOT_NL], item]) poolProcessResults[0] += 1 if parallelPoolProcesses > 0: while poolProcessResults[0] == parallelPoolProcesses: completedProcesses = [] for p, result in iter(poolProcessResults.items()): if p != 0 and result.ready(): poolCallback(result.get()) completedProcesses.append(p) if completedProcesses: for p in completedProcesses: del poolProcessResults[p] break time.sleep(1) processWaitStart = time.time() if not controlC: if GC.Values[GC.PROCESS_WAIT_LIMIT] > 0: waitRemaining = GC.Values[GC.PROCESS_WAIT_LIMIT] else: waitRemaining = 'unlimited' while poolProcessResults[0] > 0: batchWriteStderr(Msg.BATCH_CSV_WAIT_N_PROCESSES.format(currentISOformatTimeStamp(), numItems, poolProcessResults[0], PROCESS_PLURAL_SINGULAR[poolProcessResults[0] == 1], Msg.BATCH_CSV_WAIT_LIMIT.format(waitRemaining))) completedProcesses = [] for p, result in iter(poolProcessResults.items()): if p != 0 and result.ready(): poolCallback(result.get()) completedProcesses.append(p) for p in completedProcesses: del poolProcessResults[p] if poolProcessResults[0] > 0: if controlC: handleControlC('SIG') break time.sleep(5) if GC.Values[GC.PROCESS_WAIT_LIMIT] > 0: delta = int(time.time()-processWaitStart) if delta >= GC.Values[GC.PROCESS_WAIT_LIMIT]: batchWriteStderr(Msg.BATCH_CSV_TERMINATE_N_PROCESSES.format(currentISOformatTimeStamp(), numItems, poolProcessResults[0], PROCESS_PLURAL_SINGULAR[poolProcessResults[0] == 1])) pool.terminate() break waitRemaining = GC.Values[GC.PROCESS_WAIT_LIMIT] - delta pool.close() else: handleControlC('SIG') except KeyboardInterrupt: handleControlC('KBI') pool.join() batchWriteStderr(Msg.BATCH_CSV_PROCESSING_COMPLETE.format(currentISOformatTimeStamp(), numItems)) if mpQueueCSVFile: terminateCSVFileQueueHandler(mpQueueCSVFile, mpQueueHandlerCSVFile) if mpQueueStdout: mpQueueStdout.put((0, GM.REDIRECT_QUEUE_END, [GM.Globals[GM.SYSEXITRC], GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD].getvalue()])) GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD].close() GM.Globals[GM.STDOUT][GM.REDIRECT_MULTI_FD] = None terminateStdQueueHandler(mpQueueStdout, mpQueueHandlerStdout) if mpQueueStderr and mpQueueStderr is not mpQueueStdout: mpQueueStderr.put((0, GM.REDIRECT_QUEUE_END, [GM.Globals[GM.SYSEXITRC], GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD].getvalue()])) GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD].close() GM.Globals[GM.STDERR][GM.REDIRECT_MULTI_FD] = None terminateStdQueueHandler(mpQueueStderr, mpQueueHandlerStderr) def threadBatchWorker(showCmds=False, numItems=0): while True: pid, item, logCmd = GM.Globals[GM.TBATCH_QUEUE].get() try: sysRC = subprocess.call(item, stdout=GM.Globals[GM.STDOUT].get(GM.REDIRECT_MULTI_FD, sys.stdout), stderr=GM.Globals[GM.STDERR].get(GM.REDIRECT_MULTI_FD, sys.stderr)) if showCmds: batchWriteStderr(f'{currentISOformatTimeStamp()},{pid}/{numItems},End,{sysRC},{logCmd}\n') if GM.Globals[GM.MULTIPROCESS_EXIT_CONDITION] is not None and checkChildProcessRC(sysRC): GM.Globals[GM.MULTIPROCESS_EXIT_PROCESSING] = True except Exception as e: batchWriteStderr(f'{currentISOformatTimeStamp()},{pid}/{numItems},Error,{str(e)},{logCmd}\n') GM.Globals[GM.TBATCH_QUEUE].task_done() BATCH_COMMANDS = [Cmd.GAM_CMD, Cmd.COMMIT_BATCH_CMD, Cmd.PRINT_CMD, Cmd.SLEEP_CMD] TBATCH_COMMANDS = [Cmd.GAM_CMD, Cmd.COMMIT_BATCH_CMD, Cmd.EXECUTE_CMD, Cmd.PRINT_CMD, Cmd.SLEEP_CMD] def ThreadBatchGAMCommands(items, showCmds): if not items: return pythonCmd = [sys.executable] if not getattr(sys, 'frozen', False): # we're not frozen pythonCmd.append(os.path.realpath(Cmd.Argument(0))) GM.Globals[GM.NUM_BATCH_ITEMS] = numItems = len(items) numWorkerThreads = min(numItems, GC.Values[GC.NUM_TBATCH_THREADS]) # GM.Globals[GM.TBATCH_QUEUE].put() gets blocked when trying to create more items than there are workers GM.Globals[GM.TBATCH_QUEUE] = queue.Queue(maxsize=numWorkerThreads) batchWriteStderr(Msg.USING_N_PROCESSES.format(currentISOformatTimeStamp(), numItems, numWorkerThreads, THREAD_PLURAL_SINGULAR[numWorkerThreads == 1])) for _ in range(numWorkerThreads): t = threading.Thread(target=threadBatchWorker, kwargs={'showCmds': showCmds, 'numItems': numItems}) t.daemon = True t.start() pid = 0 numThreadsInUse = 0 for item in items: if GM.Globals[GM.MULTIPROCESS_EXIT_PROCESSING]: break if item[0] == Cmd.COMMIT_BATCH_CMD: batchWriteStderr(Msg.COMMIT_BATCH_WAIT_N_PROCESSES.format(currentISOformatTimeStamp(), numItems, numThreadsInUse, THREAD_PLURAL_SINGULAR[numThreadsInUse == 1])) GM.Globals[GM.TBATCH_QUEUE].join() batchWriteStderr(Msg.COMMIT_BATCH_COMPLETE.format(currentISOformatTimeStamp(), numItems, Msg.THREADS)) numThreadsInUse = 0 if len(item) > 1: readStdin(f'{currentISOformatTimeStamp()},0/{numItems},{Cmd.QuotedArgumentList(item[1:])}') continue if item[0] == Cmd.PRINT_CMD: batchWriteStderr(f'{currentISOformatTimeStamp()},0/{numItems},{Cmd.QuotedArgumentList(item[1:])}\n') continue if item[0] == Cmd.SLEEP_CMD: batchWriteStderr(f'{currentISOformatTimeStamp()},0/{numItems},Sleeping {item[1]} seconds\n') time.sleep(int(item[1])) continue pid += 1 if not showCmds and ((pid % 100 == 0) or (pid == numItems)): batchWriteStderr(Msg.PROCESSING_ITEM_N_OF_M.format(currentISOformatTimeStamp(), pid, numItems)) if showCmds: logCmd = Cmd.QuotedArgumentList(item) batchWriteStderr(f'{currentISOformatTimeStamp()},{pid}/{numItems},Start,{Cmd.QuotedArgumentList(item)}\n') else: logCmd = '' if item[0] == Cmd.GAM_CMD: GM.Globals[GM.TBATCH_QUEUE].put((pid, pythonCmd+item[1:], logCmd)) else: GM.Globals[GM.TBATCH_QUEUE].put((pid, item[1:], logCmd)) numThreadsInUse += 1 GM.Globals[GM.TBATCH_QUEUE].join() if showCmds: batchWriteStderr(f'{currentISOformatTimeStamp()},0/{numItems},Complete\n') def _getShowCommands(): if checkArgumentPresent('showcmds'): return getBoolean() return GC.Values[GC.SHOW_COMMANDS] def _getSkipRows(): if checkArgumentPresent('skiprows'): return getInteger(minVal=0) # return GC.Values[GC.CSV_INPUT_ROW_SKIP] return 0 def _getMaxRows(): if checkArgumentPresent('maxrows'): return getInteger(minVal=0) return GC.Values[GC.CSV_INPUT_ROW_LIMIT] # gam batch [showcmds []] def doBatch(threadBatch=False): filename = getString(Cmd.OB_FILE_NAME) if (filename == '-') and (GC.Values[GC.DEBUG_LEVEL] > 0): Cmd.Backup() usageErrorExit(Msg.BATCH_CSV_LOOP_DASH_DEBUG_INCOMPATIBLE.format(Cmd.BATCH_CMD)) filenameLower = filename.lower() if filenameLower not in {'gdoc', 'gcsdoc'}: encoding = getCharSet() f = openFile(filename, encoding=encoding, stripUTFBOM=True) elif filenameLower == 'gdoc': f = getGDocData(filenameLower) getCharSet() else: #filenameLower == 'gcsdoc': f = getStorageFileData(filenameLower) getCharSet() showCmds = _getShowCommands() checkForExtraneousArguments() validCommands = BATCH_COMMANDS if not threadBatch else TBATCH_COMMANDS kwValues = {} items = [] errors = 0 try: for line in f: if line.startswith('#'): continue if kwValues: for kw, value in iter(kwValues.items()): line = line.replace(f'%{kw}%', value) try: argv = shlex.split(line) except ValueError as e: writeStderr(f'Command: >>>{line.strip()}<<<\n') writeStderr(f'{ERROR_PREFIX}{str(e)}\n') errors += 1 continue if argv: cmd = argv[0].strip().lower() if cmd == Cmd.SET_CMD: if len(argv) == 3: kwValues[argv[1]] = argv[2] else: writeStderr(f'Command: >>>{Cmd.QuotedArgumentList([argv[0]])}<<< {Cmd.QuotedArgumentList(argv[1:])}\n') writeStderr(f'{ERROR_PREFIX}{Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_INVALID][1]}: {Msg.EXPECTED} <{Cmd.SET_CMD} keyword value>)>\n') errors += 1 continue if cmd == Cmd.CLEAR_CMD: if len(argv) == 2: kwValues.pop(argv[1], None) else: writeStderr(f'Command: >>>{Cmd.QuotedArgumentList([argv[0]])}<<< {Cmd.QuotedArgumentList(argv[1:])}\n') writeStderr(f'{ERROR_PREFIX}{Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_INVALID][1]}: {Msg.EXPECTED} <{Cmd.CLEAR_CMD} keyword>)>\n') errors += 1 continue if cmd == Cmd.SLEEP_CMD: if len(argv) != 2 or not argv[1].isdigit(): writeStderr(f'Command: >>>{Cmd.QuotedArgumentList([argv[0]])}<<< {Cmd.QuotedArgumentList(argv[1:])}\n') writeStderr(f'{ERROR_PREFIX}{Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_INVALID][1]}: {Msg.EXPECTED} <{Cmd.SLEEP_CMD} integer>)>\n') errors += 1 continue if (not cmd) or ((len(argv) == 1) and (cmd not in [Cmd.COMMIT_BATCH_CMD, Cmd.PRINT_CMD])): continue if cmd in validCommands: items.append(argv) else: writeStderr(f'Command: >>>{Cmd.QuotedArgumentList([argv[0]])}<<< {Cmd.QuotedArgumentList(argv[1:])}\n') writeStderr(f'{ERROR_PREFIX}{Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_INVALID][1]}: {Msg.EXPECTED} <{formatChoiceList(validCommands)}>\n') errors += 1 except IOError as e: systemErrorExit(FILE_ERROR_RC, fileErrorMessage(filename, e)) closeFile(f) if errors == 0: if not threadBatch: MultiprocessGAMCommands(items, showCmds) else: ThreadBatchGAMCommands(items, showCmds) else: writeStderr(Msg.BATCH_NOT_PROCESSED_ERRORS.format(ERROR_PREFIX, filename, errors, ERROR_PLURAL_SINGULAR[errors == 1])) setSysExitRC(USAGE_ERROR_RC) # gam tbatch [showcmds []] def doThreadBatch(): adjustRedirectedSTDFilesIfNotMultiprocessing() doBatch(True) def doAutoBatch(entityType, entityList, CL_command): remaining = Cmd.Remaining() items = [] initial_argv = [Cmd.GAM_CMD] if GM.Globals[GM.SECTION] and not GM.Globals[GM.GAM_CFG_SECTION]: initial_argv.extend([Cmd.SELECT_CMD, GM.Globals[GM.SECTION]]) for entity in entityList: items.append(initial_argv+[entityType, entity, CL_command]+remaining) MultiprocessGAMCommands(items, GC.Values[GC.SHOW_COMMANDS]) # Process command line arguments, find substitutions # An argument containing instances of ~~xxx~!~pattern~!~replacement~~ has ~~...~~ replaced by re.sub(pattern, replacement, value of field xxx from the CSV file) # For example, ~~primaryEmail~!~^(.+)@(.+)$~!~\1 AT \2~~ would replace foo@bar.com (from the primaryEmail column) with foo AT bar.com # An argument containing instances of ~~xxx~~ has xxx replaced by the value of field xxx from the CSV file # An argument containing exactly ~xxx is replaced by the value of field xxx from the CSV file # Otherwise, the argument is preserved as is SUB_PATTERN = re.compile(r'~~(.+?)~~') RE_PATTERN = re.compile(r'~~(.+?)~!~(.+?)~!~(.+?)~~') SUB_TYPE = 'sub' RE_TYPE = 're' # SubFields is a dictionary; the key is the argument number, the value is a list of tuples that mark # the substition (type, fieldname, start, end). Type is 'sub' for simple substitution, 're' for regex substitution. # Example: update user '~User' address type work unstructured '~~Street~~, ~~City~~, ~~State~~ ~~ZIP~~' primary # {2: [('sub', 'User', 0, 5)], 7: [('sub', 'Street', 0, 10), ('sub', 'City', 12, 20), ('sub', 'State', 22, 31), ('sub', 'ZIP', 32, 39)]} def getSubFields(initial_argv, fieldNames): subFields = {} GAM_argv = initial_argv[:] GAM_argvI = len(GAM_argv) while Cmd.ArgumentsRemaining(): myarg = Cmd.Current() if not myarg: GAM_argv.append(myarg) elif SUB_PATTERN.search(myarg): pos = 0 subFields.setdefault(GAM_argvI, []) while True: submatch = SUB_PATTERN.search(myarg, pos) if not submatch: break rematch = RE_PATTERN.match(submatch.group(0)) if not rematch: fieldName = submatch.group(1) if fieldName not in fieldNames: csvFieldErrorExit(fieldName, fieldNames) subFields[GAM_argvI].append((SUB_TYPE, fieldName, submatch.start(), submatch.end())) else: fieldName = rematch.group(1) if fieldName not in fieldNames: csvFieldErrorExit(fieldName, fieldNames) try: re.compile(rematch.group(2)) subFields[GAM_argvI].append((RE_TYPE, fieldName, submatch.start(), submatch.end(), rematch.group(2), rematch.group(3))) except re.error as e: usageErrorExit(f'{Cmd.OB_RE_PATTERN} {Msg.ERROR}: {e}') pos = submatch.end() GAM_argv.append(myarg) elif myarg[0] == '~': fieldName = myarg[1:] if fieldName in fieldNames: subFields[GAM_argvI] = [(SUB_TYPE, fieldName, 0, len(myarg))] GAM_argv.append(myarg) else: csvFieldErrorExit(fieldName, fieldNames) else: GAM_argv.append(myarg) GAM_argvI += 1 Cmd.Advance() return(GAM_argv, subFields) def processSubFields(GAM_argv, row, subFields): argv = GAM_argv[:] for GAM_argvI, fields in iter(subFields.items()): oargv = argv[GAM_argvI][:] argv[GAM_argvI] = '' pos = 0 for field in fields: argv[GAM_argvI] += oargv[pos:field[2]] if field[0] == SUB_TYPE: if row[field[1]]: argv[GAM_argvI] += row[field[1]] else: if row[field[1]]: argv[GAM_argvI] += re.sub(field[4], field[5], row[field[1]]) pos = field[3] argv[GAM_argvI] += oargv[pos:] return argv # gam csv [warnifnodata] # [columndelimiter ] [quotechar ] [fields ] # (matchfield|skipfield )* [showcmds []] # [skiprows ] [maxrows ] # gam def doCSV(testMode=False): filename = getString(Cmd.OB_FILE_NAME) if (filename == '-') and (GC.Values[GC.DEBUG_LEVEL] > 0): Cmd.Backup() usageErrorExit(Msg.BATCH_CSV_LOOP_DASH_DEBUG_INCOMPATIBLE.format(Cmd.CSV_CMD)) f, csvFile, fieldnames = openCSVFileReader(filename) matchFields, skipFields = getMatchSkipFields(fieldnames) showCmds = _getShowCommands() skipRows = _getSkipRows() maxRows = _getMaxRows() checkArgumentPresent(Cmd.GAM_CMD, required=True) if not Cmd.ArgumentsRemaining(): missingArgumentExit(Cmd.OB_GAM_ARGUMENT_LIST) initial_argv = [Cmd.GAM_CMD] if GM.Globals[GM.SECTION] and not GM.Globals[GM.GAM_CFG_SECTION] and not Cmd.PeekArgumentPresent(Cmd.SELECT_CMD): initial_argv.extend([Cmd.SELECT_CMD, GM.Globals[GM.SECTION]]) GAM_argv, subFields = getSubFields(initial_argv, fieldnames) if GC.Values[GC.CSV_INPUT_ROW_FILTER] or GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER]: CheckInputRowFilterHeaders(fieldnames, GC.Values[GC.CSV_INPUT_ROW_FILTER], GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER]) items = [] i = 0 for row in csvFile: if checkMatchSkipFields(row, fieldnames, matchFields, skipFields): i += 1 if skipRows: if i <= skipRows: continue i = 1 skipRows = 0 items.append(processSubFields(GAM_argv, row, subFields)) if maxRows and i >= maxRows: break closeFile(f) if not testMode: MultiprocessGAMCommands(items, showCmds) else: numItems = min(len(items), 10) writeStdout(Msg.CSV_FILE_HEADERS.format(filename)) Ind.Increment() for field in fieldnames: writeStdout(f'{Ind.Spaces()}{field}\n') Ind.Decrement() writeStdout(Msg.CSV_SAMPLE_COMMANDS.format(numItems, GAM)) Ind.Increment() for i in range(numItems): writeStdout(f'{Ind.Spaces()}{Cmd.QuotedArgumentList(items[i])}\n') Ind.Decrement() def doCSVTest(): doCSV(testMode=True) # gam loop [warnifnodata] # [columndelimiter ] [quotechar ] [fields ] # (matchfield|skipfield )* [showcmds []] # [skiprows ] [maxrows ] # gam def doLoop(loopCmd): filename = getString(Cmd.OB_FILE_NAME) if (filename == '-') and (GC.Values[GC.DEBUG_LEVEL] > 0): Cmd.Backup() usageErrorExit(Msg.BATCH_CSV_LOOP_DASH_DEBUG_INCOMPATIBLE.format(Cmd.LOOP_CMD)) f, csvFile, fieldnames = openCSVFileReader(filename) matchFields, skipFields = getMatchSkipFields(fieldnames) showCmds = _getShowCommands() skipRows = _getSkipRows() maxRows = _getMaxRows() checkArgumentPresent(Cmd.GAM_CMD, required=True) if not Cmd.ArgumentsRemaining(): missingArgumentExit(Cmd.OB_GAM_ARGUMENT_LIST) if GC.Values[GC.CSV_INPUT_ROW_FILTER] or GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER]: CheckInputRowFilterHeaders(fieldnames, GC.Values[GC.CSV_INPUT_ROW_FILTER], GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER]) choice = Cmd.Current().strip().lower() if choice == Cmd.LOOP_CMD: usageErrorExit(Msg.NESTED_LOOP_CMD_NOT_ALLOWED) # gam loop ... gam redirect|select|config ... process gam.cfg on each iteration # gam redirect|select|config ... loop ... gam redirect|select|config ... process gam.cfg on each iteration # gam loop ... gam !redirect|select|config ... no further processing of gam.cfg # gam redirect|select|config ... loop ... gam !redirect|select|config ... no further processing of gam.cfg processGamCfg = choice in Cmd.GAM_META_COMMANDS GAM_argv, subFields = getSubFields([Cmd.GAM_CMD], fieldnames) multi = GM.Globals[GM.CSVFILE][GM.REDIRECT_MULTIPROCESS] if multi: mpManager = multiprocessing.Manager() mpQueue, mpQueueHandler = initializeCSVFileQueueHandler(mpManager, None, None) else: mpQueue = None GM.Globals[GM.CSVFILE][GM.REDIRECT_QUEUE] = mpQueue # Set up command logging at top level only if GM.Globals[GM.CMDLOG_LOGGER]: LoopGlobals = GM.Globals else: LoopGlobals = {GM.CMDLOG_LOGGER: None, GM.CMDLOG_HANDLER: None} if (GM.Globals[GM.PID] > 0) and GC.Values[GC.CMDLOG]: openGAMCommandLog(LoopGlobals, 'looplog') if LoopGlobals[GM.CMDLOG_LOGGER]: writeGAMCommandLog(LoopGlobals, loopCmd, '*') if not showCmds: i = 0 for row in csvFile: if checkMatchSkipFields(row, fieldnames, matchFields, skipFields): i += 1 if skipRows: if i <= skipRows: continue i = 1 skipRows = 0 item = processSubFields(GAM_argv, row, subFields) logCmd = Cmd.QuotedArgumentList(item) if i % 100 == 0: batchWriteStderr(Msg.PROCESSING_ITEM_N.format(currentISOformatTimeStamp(), i)) sysRC = ProcessGAMCommand(item, processGamCfg=processGamCfg, inLoop=True) if (GM.Globals[GM.PID] > 0) and LoopGlobals[GM.CMDLOG_LOGGER]: writeGAMCommandLog(LoopGlobals, logCmd, sysRC) if (sysRC > 0) and (GM.Globals[GM.SYSEXITRC] <= HARD_ERROR_RC): break if maxRows and i >= maxRows: break closeFile(f) else: items = [] i = 0 for row in csvFile: if checkMatchSkipFields(row, fieldnames, matchFields, skipFields): i += 1 if skipRows: if i <= skipRows: continue i = 1 skipRows = 0 items.append(processSubFields(GAM_argv, row, subFields)) if maxRows and i >= maxRows: break closeFile(f) numItems = len(items) pid = 0 for item in items: pid += 1 logCmd = Cmd.QuotedArgumentList(item) batchWriteStderr(f'{currentISOformatTimeStamp()},{pid}/{numItems},Start,0,{logCmd}\n') sysRC = ProcessGAMCommand(item, processGamCfg=processGamCfg, inLoop=True) batchWriteStderr(f'{currentISOformatTimeStamp()},{pid}/{numItems},End,{sysRC},{logCmd}\n') if (GM.Globals[GM.PID] > 0) and LoopGlobals[GM.CMDLOG_LOGGER]: writeGAMCommandLog(LoopGlobals, logCmd, sysRC) if (sysRC > 0) and (GM.Globals[GM.SYSEXITRC] <= HARD_ERROR_RC): break if (GM.Globals[GM.PID] > 0) and LoopGlobals[GM.CMDLOG_LOGGER]: closeGAMCommandLog(LoopGlobals) if multi: terminateCSVFileQueueHandler(mpQueue, mpQueueHandler) def _doList(entityList, entityType): buildGAPIObject(API.DIRECTORY) if GM.Globals[GM.CSV_DATA_DICT]: keyField = GM.Globals[GM.CSV_KEY_FIELD] dataField = GM.Globals[GM.CSV_DATA_FIELD] else: keyField = 'Entity' dataField = 'Data' csvPF = CSVPrintFile(keyField) if checkArgumentPresent('todrive'): csvPF.GetTodriveParameters() if entityList is None: entityList = getEntityList(Cmd.OB_ENTITY) showData = checkArgumentPresent('data') if showData: if not entityType: itemType, itemList = getEntityToModify(crosAllowed=True) else: itemType = None itemList = getEntityList(Cmd.OB_ENTITY) entityItemLists = itemList if isinstance(itemList, dict) else None csvPF.AddTitle(dataField) else: entityItemLists = None dataDelimiter = getDelimiter() checkForExtraneousArguments() _, _, entityList = getEntityArgument(entityList) for entity in entityList: entityEmail = normalizeEmailAddressOrUID(entity) if showData: if entityItemLists: if entity not in entityItemLists: csvPF.WriteRow({keyField: entityEmail}) continue itemList = entityItemLists[entity] if itemType == Cmd.ENTITY_USERS: for i, item in enumerate(itemList): itemList[i] = normalizeEmailAddressOrUID(item) if dataDelimiter: csvPF.WriteRow({keyField: entityEmail, dataField: dataDelimiter.join(itemList)}) else: for item in itemList: csvPF.WriteRow({keyField: entityEmail, dataField: item}) else: csvPF.WriteRow({keyField: entityEmail}) csvPF.writeCSVfile('Entity') # gam list [todrive *] [data | [delimiter ]] def doListType(): _doList(None, None) # gam list [todrive *] [data [delimiter ]] def doListCrOS(entityList): _doList(entityList, Cmd.ENTITY_CROS) # gam list [todrive *] [data [delimiter ]] def doListUser(entityList): _doList(entityList, Cmd.ENTITY_USERS) def _showCount(entityList, entityType): buildGAPIObject(API.DIRECTORY) checkForExtraneousArguments() _, count, entityList = getEntityArgument(entityList) actionPerformedNumItems(count, entityType) # gam show count def showCountCrOS(entityList): _showCount(entityList, Ent.CHROME_DEVICE) # gam show count def showCountUser(entityList): _showCount(entityList, Ent.USER) VALIDEMAIL_PATTERN = re.compile(r'^[^@]+@[^@]+\.[^@]+$') def _getValidateLoginHint(login_hint, projectId=None): while True: if not login_hint: if not projectId: login_hint = readStdin(Msg.ENTER_GSUITE_ADMIN_EMAIL_ADDRESS).strip() else: login_hint = readStdin(Msg.ENTER_MANAGE_GCP_PROJECT_EMAIL_ADDRESS.format(projectId)).strip() if login_hint.find('@') == -1 and GC.Values[GC.DOMAIN]: login_hint = f'{login_hint}@{GC.Values[GC.DOMAIN]}' if VALIDEMAIL_PATTERN.match(login_hint): return login_hint sys.stdout.write(f'{ERROR_PREFIX}Invalid email address: {login_hint}\n') login_hint = None def getOAuthClientIDAndSecret(): cs_data = readFile(GC.Values[GC.CLIENT_SECRETS_JSON], continueOnError=True, displayError=True) if not cs_data: invalidClientSecretsJsonExit(Msg.NO_DATA) try: cs_json = json.loads(cs_data) if not cs_json: systemErrorExit(CLIENT_SECRETS_JSON_REQUIRED_RC, Msg.NO_CLIENT_ACCESS_CREATE_UPDATE_ALLOWED) return (cs_json['installed']['client_id'], cs_json['installed']['client_secret']) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: invalidClientSecretsJsonExit(str(e)) def getScopesFromUser(scopesList, clientAccess, currentScopes=None): OAUTH2_CMDS = ['s', 'u', 'e', 'c'] oauth2_menu = '' numScopes = len(scopesList) for a_scope in scopesList: oauth2_menu += f"[%%s] %2d) {a_scope['name']}" if a_scope['subscopes']: oauth2_menu += f' (supports {" and ".join(a_scope["subscopes"])})' oauth2_menu += '\n' oauth2_menu += ''' Select an unselected scope [ ] by entering a number; yields [*] For scopes that support readonly, enter a number and an 'r' to grant read-only access; yields [R] For scopes that support action, enter a number and an 'a' to grant action-only access; yields [A] Clear read-only access [R] or action-only access [A] from a scope by entering a number; yields [*] Unselect a selected scope [*] by entering a number; yields [ ] Select all default scopes by entering an 's'; yields [*] for default scopes, [ ] for others Unselect all scopes by entering a 'u'; yields [ ] for all scopes Exit without changes/authorization by entering an 'e' Continue to authorization by entering a 'c' ''' if clientAccess: oauth2_menu += ''' Note, if all scopes are selected, Google will probably generate an authorization error ''' menu = oauth2_menu % tuple(range(numScopes)) selectedScopes = ['*'] * numScopes if currentScopes is None and clientAccess: lock = FileLock(GM.Globals[GM.OAUTH2_TXT_LOCK]) with lock: _, credentials = getOauth2TxtCredentials(exitOnError=False) if credentials and credentials.scopes is not None: currentScopes = sorted(credentials.scopes) if currentScopes is not None: if clientAccess: i = 0 for a_scope in scopesList: selectedScopes[i] = ' ' possibleScope = a_scope['scope'] for currentScope in currentScopes: if currentScope == possibleScope: selectedScopes[i] = '*' break if 'readonly' in a_scope['subscopes']: if currentScope == possibleScope+'.readonly': selectedScopes[i] = 'R' break if 'action' in a_scope['subscopes']: if currentScope == possibleScope+'.action': selectedScopes[i] = 'A' break i += 1 else: i = 0 for a_scope in scopesList: selectedScopes[i] = ' ' api = a_scope['api'] possibleScope = a_scope['scope'] if api in currentScopes: for scope in currentScopes[api]: if scope == possibleScope: selectedScopes[i] = '*' break if 'readonly' in a_scope['subscopes']: if (scope == possibleScope+'.readonly') or (scope == a_scope.get('roscope')): selectedScopes[i] = 'R' break i += 1 else: i = 0 for a_scope in scopesList: if a_scope.get('offByDefault'): selectedScopes[i] = ' ' elif a_scope.get('roByDefault'): selectedScopes[i] = 'R' else: selectedScopes[i] = '*' i += 1 prompt = f'\nPlease enter 0-{numScopes-1}[a|r] or {"|".join(OAUTH2_CMDS)}: ' while True: os.system(['clear', 'cls'][sys.platform.startswith('win')]) sys.stdout.write(menu % tuple(selectedScopes)) while True: choice = readStdin(prompt) if choice: selection = choice.lower() if selection.find('r') >= 0: mode = 'R' selection = selection.replace('r', '') elif selection.find('a') >= 0: mode = 'A' selection = selection.replace('a', '') else: mode = ' ' if selection and selection.isdigit(): selection = int(selection) if isinstance(selection, int) and selection < numScopes: if mode == 'R': if 'readonly' not in scopesList[selection]['subscopes']: sys.stdout.write(f'{ERROR_PREFIX}Scope {selection} does not support read-only mode!\n') continue elif mode == 'A': if 'action' not in scopesList[selection]['subscopes']: sys.stdout.write(f'{ERROR_PREFIX}Scope {selection} does not support action-only mode!\n') continue elif selectedScopes[selection] != '*': mode = '*' else: mode = ' ' selectedScopes[selection] = mode break if isinstance(selection, str) and selection in OAUTH2_CMDS: if selection == 's': i = 0 for a_scope in scopesList: selectedScopes[i] = ' ' if a_scope.get('offByDefault', False) else '*' i += 1 elif selection == 'u': for i in range(numScopes): selectedScopes[i] = ' ' elif selection == 'e': return None break sys.stdout.write(f'{ERROR_PREFIX}Invalid input "{choice}"\n') if selection == 'c': break return selectedScopes def _localhost_to_ip(): '''returns IPv4 or IPv6 loopback address which localhost resolves to. If localhost does not resolve to valid loopback IP address then returns 127.0.0.1''' # TODO gethostbyname() will only ever return ipv4 # find a way to support IPv6 here and get preferred IP # note that IPv6 may be broken on some systems also :-( # for now IPv4 should do. local_ip = socket.gethostbyname('localhost') # local_ip = socket.getaddrinfo('localhost', None)[0][-1][0] # works with ipv6, makes wsgiref fail if not ipaddress.ip_address(local_ip).is_loopback: local_ip = '127.0.0.1' return local_ip def _waitForHttpClient(d): wsgi_app = google_auth_oauthlib.flow._RedirectWSGIApp(Msg.AUTHENTICATION_FLOW_COMPLETE_CLOSE_BROWSER.format(GAM)) wsgiref.simple_server.WSGIServer.allow_reuse_address = False # Convert hostname to IP since apparently binding to the IP # reduces odds of firewall blocking us local_ip = _localhost_to_ip() for port in range(8080, 8099): try: local_server = wsgiref.simple_server.make_server( local_ip, port, wsgi_app, handler_class=wsgiref.simple_server.WSGIRequestHandler ) break except OSError: pass redirect_uri_format = "http://{}:{}/" if d['trailing_slash'] else "http://{}:{}" # provide redirect_uri to main process so it can formulate auth_url d['redirect_uri'] = redirect_uri_format.format(*local_server.server_address) # wait until main process provides auth_url # so we can open it in web browser. while 'auth_url' not in d: time.sleep(0.1) if d['open_browser']: webbrowser.open(d['auth_url'], new=1, autoraise=True) try: local_server.handle_request() authorization_response = wsgi_app.last_request_uri.replace("http", "https") d['code'] = authorization_response except: pass local_server.server_close() def _waitForUserInput(d): sys.stdin = open(0, DEFAULT_FILE_READ_MODE, encoding=UTF8) d['code'] = readStdin(Msg.ENTER_VERIFICATION_CODE_OR_URL) class _GamOauthFlow(google_auth_oauthlib.flow.InstalledAppFlow): def run_dual(self, **kwargs): mgr = multiprocessing.Manager() d = mgr.dict() d['trailing_slash'] = True d['open_browser'] = not GC.Values[GC.NO_BROWSER] httpClientProcess = multiprocessing.Process(target=_waitForHttpClient, args=(d,)) userInputProcess = multiprocessing.Process(target=_waitForUserInput, args=(d,)) httpClientProcess.start() # we need to wait until web server starts on avail port # so we know redirect_uri to use while 'redirect_uri' not in d: time.sleep(0.1) self.redirect_uri = d['redirect_uri'] d['auth_url'], _ = super().authorization_url(**kwargs) d['auth_url'] = shortenURL(d['auth_url']) print(Msg.OAUTH2_GO_TO_LINK_MESSAGE.format(url=d['auth_url'])) userInputProcess.start() userInput = False checkHttp = checkUser = True alive = 2 while alive > 0: time.sleep(0.1) if checkHttp and not httpClientProcess.is_alive(): if 'code' in d: if checkUser: userInputProcess.terminate() break checkHttp = False alive -= 1 if checkUser and not userInputProcess.is_alive(): userInput = True if 'code' in d: if checkHttp: httpClientProcess.terminate() break checkUser = False alive -= 1 if 'code' not in d: systemErrorExit(SYSTEM_ERROR_RC, Msg.AUTHENTICATION_FLOW_FAILED) while True: code = d['code'] if code.startswith('http'): parsed_url = urlparse(code) parsed_params = parse_qs(parsed_url.query) code = parsed_params.get('code', [None])[0] try: fetch_args = {'code': code} if GC.Values[GC.CACERTS_PEM]: fetch_args['verify'] = GC.Values[GC.CACERTS_PEM] self.fetch_token(**fetch_args) break except Exception as e: if not userInput: systemErrorExit(INVALID_TOKEN_RC, str(e)) stderrErrorMsg(str(e)) _waitForUserInput(d) print(Msg.AUTHENTICATION_FLOW_COMPLETE) return self.credentials class Credentials(google.oauth2.credentials.Credentials): """Google OAuth2.0 Credentials with GAM-specific properties and methods.""" def __init__(self, token, refresh_token=None, id_token=None, token_uri=None, client_id=None, client_secret=None, scopes=None, quota_project_id=None, expiry=None, id_token_data=None, filename=None): """A thread-safe OAuth2.0 credentials object. Credentials adds additional utility properties and methods to a standard OAuth2.0 credentials object. When used to store credentials on disk, it implements a file lock to avoid collision during writes. Args: token: Optional String, The OAuth 2.0 access token. Can be None if refresh information is provided. refresh_token: String, The OAuth 2.0 refresh token. If specified, credentials can be refreshed. id_token: String, The Open ID Connect ID Token. token_uri: String, The OAuth 2.0 authorization server's token endpoint URI. Must be specified for refresh, can be left as None if the token can not be refreshed. client_id: String, The OAuth 2.0 client ID. Must be specified for refresh, can be left as None if the token can not be refreshed. client_secret: String, The OAuth 2.0 client secret. Must be specified for refresh, can be left as None if the token can not be refreshed. scopes: Sequence[str], The scopes used to obtain authorization. This parameter is used by :meth:`has_scopes`. OAuth 2.0 credentials can not request additional scopes after authorization. The scopes must be derivable from the refresh token if refresh information is provided (e.g. The refresh token scopes are a superset of this or contain a wild card scope like 'https://www.googleapis.com/auth/any-api'). quota_project_id: String, The project ID used for quota and billing. This project may be different from the project used to create the credentials. expiry: datetime.datetime, The time at which the provided token will expire. id_token_data: Oauth2.0 ID Token data which was previously fetched for this access token against the google.oauth2.id_token library. filename: String, Path to a file that will be used to store the credentials. If provided, a lock file of the same name and a ".lock" extension will be created for concurrency controls. Note: New credentials are not saved to disk until write() or refresh() are called. Raises: TypeError: If id_token_data is not the required dict type. """ super().__init__(token=token, refresh_token=refresh_token, id_token=id_token, token_uri=token_uri, client_id=client_id, client_secret=client_secret, scopes=scopes, quota_project_id=quota_project_id) # Load data not restored by the super class self.expiry = expiry if id_token_data and not isinstance(id_token_data, dict): raise TypeError(f'Expected type id_token_data dict but received {type(id_token_data)}') self._id_token_data = id_token_data.copy() if id_token_data else None # If a filename is provided, use a lock file to control concurrent access # to the resource. If no filename is provided, use a thread lock that has # the same interface as FileLock in order to simplify the implementation. if filename: # Convert relative paths into absolute self._filename = os.path.abspath(filename) else: self._filename = None # Use a property to prevent external mutation of the filename. @property def filename(self): return self._filename @classmethod def from_authorized_user_info_gam(cls, info, filename=None): """Generates Credentials from JSON containing authorized user info. Args: info: Dict, authorized user info in Google format. filename: String, the filename used to store these credentials on disk. If no filename is provided, the credentials will not be saved to disk. Raises: ValueError: If missing fields are detected in the info. """ # We need all of these keys keys_needed = {'client_id', 'client_secret'} # We need 1 or more of these keys keys_need_one_of = {'refresh_token', 'auth_token', 'token'} missing = keys_needed.difference(info.keys()) has_one_of = set(info) & keys_need_one_of if missing or not has_one_of: raise ValueError( 'Authorized user info was not in the expected format, missing ' f'fields {", ".join(missing)} and one of {", ".join(keys_need_one_of)}.') expiry = info.get('token_expiry') if expiry: # Convert the raw expiry to datetime expiry = datetime.datetime.strptime(expiry, YYYYMMDDTHHMMSSZ_FORMAT) id_token_data = info.get('decoded_id_token') # Provide backwards compatibility with field names when loading from JSON. # Some field names may be different, depending on when/how the credentials # were pickled. return cls(token=info.get('token', info.get('auth_token', '')), refresh_token=info.get('refresh_token', ''), id_token=info.get('id_token_jwt', info.get('id_token')), token_uri=info.get('token_uri'), client_id=info['client_id'], client_secret=info['client_secret'], scopes=info.get('scopes'), quota_project_id=info.get('quota_project_id'), expiry=expiry, id_token_data=id_token_data, filename=filename) @classmethod def from_google_oauth2_credentials(cls, credentials, filename=None): """Generates Credentials from a google.oauth2.Credentials object.""" info = json.loads(credentials.to_json()) # Add properties which are not exported with the native to_json() output. info['id_token'] = credentials.id_token if credentials.expiry: info['token_expiry'] = credentials.expiry.strftime(YYYYMMDDTHHMMSSZ_FORMAT) info['quota_project_id'] = credentials.quota_project_id return cls.from_authorized_user_info_gam(info, filename=filename) @classmethod def from_client_secrets(cls, client_id, client_secret, scopes, access_type='offline', login_hint=None, filename=None, open_browser=True): """Runs an OAuth Flow from client secrets to generate credentials. Args: client_id: String, The OAuth2.0 Client ID. client_secret: String, The OAuth2.0 Client Secret. scopes: Sequence[str], A list of scopes to include in the credentials. access_type: String, 'offline' or 'online'. Indicates whether your application can refresh access tokens when the user is not present at the browser. Valid parameter values are online, which is the default value, and offline. Set the value to offline if your application needs to refresh access tokens when the user is not present at the browser. This is the method of refreshing access tokens described later in this document. This value instructs the Google authorization server to return a refresh token and an access token the first time that your application exchanges an authorization code for tokens. login_hint: String, The email address that will be displayed on the Google login page as a hint for the user to login to the correct account. filename: String, the path to a file to use to save the credentials. open_browser: Boolean: whether or not GAM should try to open the browser automatically. Returns: Credentials """ client_config = { 'installed': { 'client_id': client_id, 'client_secret': client_secret, 'redirect_uris': ['http://localhost'], 'auth_uri': API.GOOGLE_OAUTH2_ENDPOINT, 'token_uri': API.GOOGLE_OAUTH2_TOKEN_ENDPOINT, } } flow = _GamOauthFlow.from_client_config(client_config, scopes, autogenerate_code_verifier=True) flow_kwargs = {'access_type': access_type, 'open_browser': open_browser} if login_hint: flow_kwargs['login_hint'] = login_hint flow.run_dual(**flow_kwargs) return cls.from_google_oauth2_credentials(flow.credentials, filename=filename) def to_json(self, strip=None): """Creates a JSON representation of a Credentials. Args: strip: Sequence[str], Optional list of members to exclude from the generated JSON. Returns: str: A JSON representation of this instance, suitable to pass to from_json(). """ expiry = self.expiry.strftime(YYYYMMDDTHHMMSSZ_FORMAT) if self.expiry else None prep = { 'token': self.token, 'refresh_token': self.refresh_token, 'token_uri': self.token_uri, 'client_id': self.client_id, 'client_secret': self.client_secret, 'id_token': self.id_token, # Google auth doesn't currently give us scopes back on refresh. # 'scopes': sorted(self.scopes), 'token_expiry': expiry, 'decoded_id_token': self._id_token_data, } # Remove empty entries prep = {k: v for k, v in prep.items() if v is not None} # Remove entries that explicitly need to be removed if strip is not None: prep = {k: v for k, v in prep.items() if k not in strip} return json.dumps(prep, indent=2, sort_keys=True) def doOAuthRequest(currentScopes, login_hint, verifyScopes=False): client_id, client_secret = getOAuthClientIDAndSecret() scopesList = API.getClientScopesList(GC.Values[GC.TODRIVE_CLIENTACCESS]) if not currentScopes or verifyScopes: selectedScopes = getScopesFromUser(scopesList, True, currentScopes) if selectedScopes is None: return False scopes = set(API.REQUIRED_SCOPES) i = 0 for scope in scopesList: if selectedScopes[i] == '*': if scope['scope']: scopes.add(scope['scope']) elif selectedScopes[i] == 'R': scopes.add(f'{scope["scope"]}.readonly') elif selectedScopes[i] == 'A': scopes.add(f'{scope["scope"]}.action') i += 1 else: scopes = set(currentScopes+API.REQUIRED_SCOPES) if API.STORAGE_READWRITE_SCOPE in scopes: scopes.discard(API.STORAGE_READONLY_SCOPE) login_hint = _getValidateLoginHint(login_hint) # Needs to be set so oauthlib doesn't puke when Google changes our scopes os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE'] = 'true' credentials = Credentials.from_client_secrets( client_id, client_secret, scopes=list(scopes), access_type='offline', login_hint=login_hint, open_browser=not GC.Values[GC.NO_BROWSER]) lock = FileLock(GM.Globals[GM.OAUTH2_TXT_LOCK]) with lock: writeClientCredentials(credentials, GC.Values[GC.OAUTH2_TXT]) entityActionPerformed([Ent.OAUTH2_TXT_FILE, GC.Values[GC.OAUTH2_TXT]]) return True # gam oauth|oauth2 create|request [] # gam oauth|oauth2 create|request [admin ] [scope|scopes ] def doOAuthCreate(): if not Cmd.PeekArgumentPresent(['admin', 'scope', 'scopes']): login_hint = getEmailAddress(noUid=True, optional=True) scopes = None checkForExtraneousArguments() else: login_hint = None scopes = [] scopesList = API.getClientScopesList(GC.Values[GC.TODRIVE_CLIENTACCESS]) while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'admin': login_hint = getEmailAddress(noUid=True) elif myarg in {'scope', 'scopes'}: for uscope in getString(Cmd.OB_API_SCOPE_URL_LIST).lower().replace(',', ' ').split(): if uscope in {'openid', 'email', API.USERINFO_EMAIL_SCOPE, 'profile', API.USERINFO_PROFILE_SCOPE}: continue for scope in scopesList: if ((uscope == scope['scope']) or (uscope.endswith('.action') and 'action' in scope['subscopes']) or (uscope.endswith('.readonly') and 'readonly' in scope['subscopes'])): scopes.append(uscope) break else: invalidChoiceExit(uscope, API.getClientScopesURLs(GC.Values[GC.TODRIVE_CLIENTACCESS]), True) else: unknownArgumentExit() if len(scopes) == 0: scopes = None doOAuthRequest(scopes, login_hint) def exitIfNoOauth2Txt(): if not os.path.isfile(GC.Values[GC.OAUTH2_TXT]): entityActionNotPerformedWarning([Ent.OAUTH2_TXT_FILE, GC.Values[GC.OAUTH2_TXT]], Msg.DOES_NOT_EXIST) sys.exit(GM.Globals[GM.SYSEXITRC]) # gam oauth|oauth2 delete|revoke def doOAuthDelete(): checkForExtraneousArguments() exitIfNoOauth2Txt() lock = FileLock(GM.Globals[GM.OAUTH2_TXT_LOCK], timeout=10) with lock: _, credentials = getOauth2TxtCredentials(noScopes=True) if not credentials: return entityType = Ent.OAUTH2_TXT_FILE entityName = GC.Values[GC.OAUTH2_TXT] sys.stdout.write(f'{Ent.Singular(entityType)}: {entityName}, will be Deleted in 3...') sys.stdout.flush() time.sleep(1) sys.stdout.write('2...') sys.stdout.flush() time.sleep(1) sys.stdout.write('1...') sys.stdout.flush() time.sleep(1) sys.stdout.write('boom!\n') sys.stdout.flush() httpObj = getHttpObj() params = {'token': credentials.refresh_token} revoke_uri = f'https://accounts.google.com/o/oauth2/revoke?{urlencode(params)}' httpObj.request(revoke_uri, 'GET') deleteFile(GC.Values[GC.OAUTH2_TXT], continueOnError=True) entityActionPerformed([entityType, entityName]) # gam oauth|oauth2 info|verify [showsecret] [accesstoken idtoken ] [showdetails] def doOAuthInfo(): credentials = access_token = id_token = None showDetails = showSecret = False while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'accesstoken': access_token = getString(Cmd.OB_ACCESS_TOKEN) elif myarg == 'idtoken': id_token = getString(Cmd.OB_ID_TOKEN) elif myarg == 'showdetails': showDetails = True elif myarg == 'showsecret': showSecret = True else: unknownArgumentExit() exitIfNoOauth2Txt() if not access_token and not id_token: credentials = getClientCredentials(noScopes=True) access_token = credentials.token printEntity([Ent.OAUTH2_TXT_FILE, GC.Values[GC.OAUTH2_TXT]]) oa2 = buildGAPIObject(API.OAUTH2) try: token_info = callGAPI(oa2, 'tokeninfo', throwReasons=[GAPI.INVALID], access_token=access_token, id_token=id_token) except GAPI.invalid as e: entityActionFailedExit([Ent.OAUTH2_TXT_FILE, GC.Values[GC.OAUTH2_TXT]], str(e)) if 'issued_to' in token_info: printKeyValueList(['Client ID', token_info['issued_to']]) if credentials is not None and showSecret: printKeyValueList(['Secret', credentials.client_secret]) if 'scope' in token_info: scopes = token_info['scope'].split(' ') printKeyValueList(['Scopes', len(scopes)]) Ind.Increment() for scope in sorted(scopes): printKeyValueList([scope]) Ind.Decrement() if 'email' in token_info: printKeyValueList(['Google Workspace Admin', f'{token_info["email"]}']) if 'expires_in' in token_info: printKeyValueList(['Expires', ISOformatTimeStamp((datetime.datetime.now()+datetime.timedelta(seconds=token_info['expires_in'])).replace(tzinfo=GC.Values[GC.TIMEZONE]))]) if showDetails: for k, v in sorted(iter(token_info.items())): if k not in ['email', 'expires_in', 'issued_to', 'scope']: printKeyValueList([k, v]) printBlankLine() # gam oauth|oauth2 update [] # gam oauth|oauth2 update [admin ] def doOAuthUpdate(): if Cmd.PeekArgumentPresent(['admin']): Cmd.Advance() login_hint = getEmailAddress(noUid=True) else: login_hint = getEmailAddress(noUid=True, optional=True) checkForExtraneousArguments() exitIfNoOauth2Txt() lock = FileLock(GM.Globals[GM.OAUTH2_TXT_LOCK]) with lock: jsonData = readFile(GC.Values[GC.OAUTH2_TXT], continueOnError=True, displayError=False) if not jsonData: invalidOauth2TxtExit(Msg.NO_DATA) try: jsonDict = json.loads(jsonData) if 'client_id' in jsonDict: if 'scopes' in jsonDict: currentScopes = jsonDict['scopes'] else: currentScopes = API.getClientScopesURLs(GC.Values[GC.TODRIVE_CLIENTACCESS]) else: currentScopes = [] except (AttributeError, IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: invalidOauth2TxtExit(str(e)) if not doOAuthRequest(currentScopes, login_hint, verifyScopes=True): entityActionNotPerformedWarning([Ent.OAUTH2_TXT_FILE, GC.Values[GC.OAUTH2_TXT]], Msg.USER_CANCELLED) sys.exit(GM.Globals[GM.SYSEXITRC]) # gam oauth|oauth2 refresh def doOAuthRefresh(): checkForExtraneousArguments() exitIfNoOauth2Txt() getClientCredentials(forceRefresh=True, forceWrite=True, filename=GC.Values[GC.OAUTH2_TXT], refreshOnly=True) entityActionPerformed([Ent.OAUTH2_TXT_FILE, GC.Values[GC.OAUTH2_TXT]]) # gam oauth|oauth2 export [] def doOAuthExport(): if Cmd.ArgumentsRemaining(): filename = getString(Cmd.OB_FILE_NAME) checkForExtraneousArguments() else: filename = GC.Values[GC.OAUTH2_TXT] getClientCredentials(forceRefresh=True, forceWrite=True, filename=filename, refreshOnly=True) if filename != '-': entityModifierNewValueActionPerformed([Ent.OAUTH2_TXT_FILE, GC.Values[GC.OAUTH2_TXT]], Act.MODIFIER_TO, filename) def getCRMService(login_hint): scopes = [API.CLOUD_PLATFORM_SCOPE] client_id = GAM_PROJECT_CREATION_CLIENT_ID client_secret = 'qM3dP8f_4qedwzWQE1VR4zzU' credentials = Credentials.from_client_secrets( client_id, client_secret, scopes=scopes, access_type='online', login_hint=login_hint, open_browser=not GC.Values[GC.NO_BROWSER]) httpObj = transportAuthorizedHttp(credentials, http=getHttpObj()) return (httpObj, getAPIService(API.CLOUDRESOURCEMANAGER, httpObj)) def enableGAMProjectAPIs(httpObj, projectId, login_hint, checkEnabled, i=0, count=0): apis = API.PROJECT_APIS[:] projectName = f'projects/{projectId}' serveu = getAPIService(API.SERVICEUSAGE, httpObj) status = True if checkEnabled: try: services = callGAPIpages(serveu.services(), 'list', 'services', throwReasons=[GAPI.NOT_FOUND, GAPI.PERMISSION_DENIED], parent=projectName, filter='state:ENABLED', fields='nextPageToken,services(name)') Act.Set(Act.CHECK) jcount = len(services) entityPerformActionNumItems([Ent.PROJECT, projectId], jcount, Ent.API, i, count) Ind.Increment() j = 0 for service in sorted(services, key=lambda k: k['name']): j += 1 if 'name' in service: serviceName = service['name'].split('/')[-1] if serviceName in apis: printEntityKVList([Ent.API, serviceName], ['Already enabled'], j, jcount) apis.remove(serviceName) else: printEntityKVList([Ent.API, serviceName], ['Already enabled (non-GAM which is fine)'], j, jcount) Ind.Decrement() except (GAPI.notFound, GAPI.permissionDenied) as e: entityActionFailedWarning([Ent.PROJECT, projectId], str(e), i, count) status = False jcount = len(apis) if status and jcount > 0: Act.Set(Act.ENABLE) entityPerformActionNumItems([Ent.PROJECT, projectId], jcount, Ent.API, i, count) failed = 0 Ind.Increment() j = 0 for api in apis: j += 1 serviceName = f'projects/{projectId}/services/{api}' while True: try: callGAPI(serveu.services(), 'enable', throwReasons=[GAPI.FAILED_PRECONDITION, GAPI.FORBIDDEN, GAPI.PERMISSION_DENIED, GAPI.INTERNAL_ERROR], retryReasons=[GAPI.INTERNAL_ERROR], name=serviceName) entityActionPerformed([Ent.API, api], j, jcount) break except GAPI.failedPrecondition as e: entityActionFailedWarning([Ent.API, api], str(e), j, jcount) readStdin(Msg.ACCEPT_CLOUD_TOS.format(login_hint)) except (GAPI.forbidden, GAPI.permissionDenied, GAPI.internalError) as e: entityActionFailedWarning([Ent.API, api], str(e), j, jcount) failed += 1 break Ind.Decrement() if not checkEnabled: status = failed <= 2 else: status = failed == 0 return status # gam enable apis [auto|manual] def doEnableAPIs(): automatic = None while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'auto': automatic = True elif myarg == 'manual': automatic = False else: unknownArgumentExit() request = getTLSv1_2Request() try: _, projectId = google.auth.default(scopes=[API.IAM_SCOPE], request=request) except (google.auth.exceptions.DefaultCredentialsError, google.auth.exceptions.RefreshError): projectId = readStdin(Msg.WHAT_IS_YOUR_PROJECT_ID).strip() while automatic is None: a_or_m = readStdin(Msg.ENABLE_PROJECT_APIS_AUTOMATICALLY_OR_MANUALLY).strip().lower() if a_or_m.startswith('a'): automatic = True break if a_or_m.startswith('m'): automatic = False break writeStdout(Msg.PLEASE_ENTER_A_OR_M) if automatic: login_hint = _getValidateLoginHint(None) httpObj, _ = getCRMService(login_hint) enableGAMProjectAPIs(httpObj, projectId, login_hint, True) else: apis = API.PROJECT_APIS[:] chunk_size = 20 writeStdout('Using an account with project access, please use ALL of these URLs to enable 20 APIs at a time:\n\n') for chunk in range(0, len(apis), chunk_size): apiid = ",".join(apis[chunk:chunk+chunk_size]) url = f'https://console.cloud.google.com/apis/enableflow?apiid={apiid}&project={projectId}' writeStdout(f' {url}\n\n') def _waitForSvcAcctCompletion(i): sleep_time = i*5 if i > 3: sys.stdout.write(Msg.WAITING_FOR_ITEM_CREATION_TO_COMPLETE_SLEEPING.format(Ent.Singular(Ent.SVCACCT), sleep_time)) time.sleep(sleep_time) def _grantRotateRights(iam, projectId, service_account, account_type='serviceAccount'): body = {'policy': {'bindings': [{'role': 'roles/iam.serviceAccountKeyAdmin', 'members': [f'{account_type}:{service_account}']}]}} maxRetries = 10 kvList = [Ent.PROJECT, projectId, Ent.SVCACCT, service_account] printEntityMessage(kvList, Msg.GRANTING_RIGHTS_TO_ROTATE_ITS_OWN_PRIVATE_KEY.format('Granting')) for retry in range(1, maxRetries+1): try: callGAPI(iam.projects().serviceAccounts(), 'setIamPolicy', throwReasons=[GAPI.INVALID_ARGUMENT], resource=f'projects/{projectId}/serviceAccounts/{service_account}', body=body) printEntityMessage(kvList, Msg.GRANTING_RIGHTS_TO_ROTATE_ITS_OWN_PRIVATE_KEY.format('Granted')) return True except GAPI.invalidArgument as e: entityActionFailedWarning(kvList, str(e)) if 'does not exist' not in str(e) or retry == maxRetries: return False _waitForSvcAcctCompletion(retry) except Exception as e: entityActionFailedWarning(kvList, str(e)) return False def _createOauth2serviceJSON(httpObj, projectInfo, svcAcctInfo, create_key=True): iam = getAPIService(API.IAM, httpObj) try: service_account = callGAPI(iam.projects().serviceAccounts(), 'create', throwReasons=[GAPI.NOT_FOUND, GAPI.PERMISSION_DENIED, GAPI.ALREADY_EXISTS], name=f'projects/{projectInfo["projectId"]}', body={'accountId': svcAcctInfo['name'], 'serviceAccount': {'displayName': svcAcctInfo['displayName'], 'description': svcAcctInfo['description']}}) entityActionPerformed([Ent.PROJECT, projectInfo['projectId'], Ent.SVCACCT, service_account['name'].rsplit('/', 1)[-1]]) except (GAPI.notFound, GAPI.permissionDenied) as e: entityActionFailedWarning([Ent.PROJECT, projectInfo['projectId']], str(e)) return False except GAPI.alreadyExists as e: entityActionFailedWarning([Ent.PROJECT, projectInfo['projectId'], Ent.SVCACCT, svcAcctInfo['name']], str(e)) writeStderr(Msg.RERUN_THE_COMMAND_AND_SPECIFY_A_NEW_SANAME) return False GM.Globals[GM.SVCACCT_SCOPES_DEFINED] = False if create_key and not doProcessSvcAcctKeys(mode='retainexisting', iam=iam, projectId=service_account['projectId'], clientEmail=service_account['email'], clientId=service_account['uniqueId']): return False sa_email = service_account['name'].rsplit('/', 1)[-1] return _grantRotateRights(iam, projectInfo['projectId'], sa_email) def _createClientSecretsOauth2service(httpObj, login_hint, appInfo, projectInfo, svcAcctInfo, create_key=True): def _checkClientAndSecret(csHttpObj, client_id, client_secret): post_data = {'client_id': client_id, 'client_secret': client_secret, 'code': 'ThisIsAnInvalidCodeOnlyBeingUsedToTestIfClientAndSecretAreValid', 'redirect_uri': 'http://127.0.0.1:8080', 'grant_type': 'authorization_code'} _, content = csHttpObj.request(API.GOOGLE_OAUTH2_TOKEN_ENDPOINT, 'POST', urlencode(post_data), headers={'Content-type': 'application/x-www-form-urlencoded'}) try: content = json.loads(content) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: sys.stderr.write(f'{str(e)}: {content}') return False if not 'error' in content or not 'error_description' in content: sys.stderr.write(f'Unknown error: {content}\n') return False if content['error'] == 'invalid_grant': return True if content['error_description'] == 'The OAuth client was not found.': sys.stderr.write(Msg.IS_NOT_A_VALID_CLIENT_ID.format(client_id)) return False if content['error_description'] == 'Unauthorized': sys.stderr.write(Msg.IS_NOT_A_VALID_CLIENT_SECRET.format(client_secret)) return False sys.stderr.write(f'Unknown error: {content}\n') return False if not enableGAMProjectAPIs(httpObj, projectInfo['projectId'], login_hint, False): return sys.stdout.write(Msg.SETTING_GAM_PROJECT_CONSENT_SCREEN_CREATING_CLIENT) console_url = f'https://console.cloud.google.com/auth/clients?project={projectInfo["projectId"]}&authuser={login_hint}' csHttpObj = getHttpObj() while True: sys.stdout.write(Msg.CREATE_CLIENT_INSTRUCTIONS.format(console_url, appInfo['applicationTitle'], appInfo['supportEmail'])) client_id = readStdin(Msg.ENTER_YOUR_CLIENT_ID).strip() if not client_id: client_id = readStdin('').strip() client_secret = readStdin(Msg.ENTER_YOUR_CLIENT_SECRET).strip() if not client_secret: client_secret = readStdin('').strip() client_valid = _checkClientAndSecret(csHttpObj, client_id, client_secret) if client_valid: break sys.stdout.write('\n') cs_data = f'''{{ "installed": {{ "auth_provider_x509_cert_url": "{API.GOOGLE_AUTH_PROVIDER_X509_CERT_URL}", "auth_uri": "{API.GOOGLE_OAUTH2_ENDPOINT}", "client_id": "{client_id}", "client_secret": "{client_secret}", "created_by": "{login_hint}", "project_id": "{projectInfo['projectId']}", "token_uri": "{API.GOOGLE_OAUTH2_TOKEN_ENDPOINT}" }} }}''' writeFile(GC.Values[GC.CLIENT_SECRETS_JSON], cs_data, continueOnError=False) sys.stdout.write(Msg.TRUST_GAM_CLIENT_ID.format(GAM, client_id)) readStdin('') if not _createOauth2serviceJSON(httpObj, projectInfo, svcAcctInfo, create_key): return sys.stdout.write(Msg.YOUR_GAM_PROJECT_IS_CREATED_AND_READY_TO_USE) def _getProjects(crm, pfilter, returnNF=False): try: projects = callGAPIpages(crm.projects(), 'search', 'projects', throwReasons=[GAPI.BAD_REQUEST, GAPI.INVALID_ARGUMENT, GAPI.PERMISSION_DENIED], query=pfilter) if projects: return projects if (not pfilter) or pfilter == GAM_PROJECT_FILTER: return [] if pfilter.startswith('id:'): projects = [callGAPI(crm.projects(), 'get', throwReasons=[GAPI.BAD_REQUEST, GAPI.INVALID_ARGUMENT, GAPI.PERMISSION_DENIED], name=f'projects/{pfilter[3:]}')] if projects or not returnNF: return projects return [] except (GAPI.badRequest, GAPI.invalidArgument) as e: entityActionFailedExit([Ent.PROJECT, pfilter], str(e)) except GAPI.permissionDenied: if (not pfilter) or (not pfilter.startswith('id:')) or (not returnNF): return [] return [{'projectId': pfilter[3:], 'state': 'NF'}] def _checkProjectFound(project, i, count): if project.get('state', '') != 'NF': return True entityActionFailedWarning([Ent.PROJECT, project['projectId']], Msg.DOES_NOT_EXIST, i, count) return False def convertGCPFolderNameToID(parent, crm): folders = callGAPIpages(crm.folders(), 'search', 'folders', query=f'displayName="{parent}"') if not folders: entityActionFailedExit([Ent.PROJECT_FOLDER, parent], Msg.NOT_FOUND) jcount = len(folders) if jcount > 1: entityActionNotPerformedWarning([Ent.PROJECT_FOLDER, parent], Msg.PLEASE_SELECT_ENTITY_TO_PROCESS.format(jcount, Ent.Plural(Ent.PROJECT_FOLDER), 'use in create', 'parent ')) Ind.Increment() j = 0 for folder in folders: j += 1 printKeyValueListWithCount(['Name', folder['name'], 'ID', folder['displayName']], j, jcount) Ind.Decrement() systemErrorExit(MULTIPLE_PROJECT_FOLDERS_FOUND_RC, None) return folders[0]['name'] PROJECTID_PATTERN = re.compile(r'^[a-z][a-z0-9-]{4,28}[a-z0-9]$') PROJECTID_FORMAT_REQUIRED = '[a-z][a-z0-9-]{4,28}[a-z0-9]' def _checkProjectId(projectId): if not PROJECTID_PATTERN.match(projectId): Cmd.Backup() invalidArgumentExit(PROJECTID_FORMAT_REQUIRED) PROJECTNAME_PATTERN = re.compile('^[a-zA-Z0-9 '+"'"+'"!-]{4,30}$') PROJECTNAME_FORMAT_REQUIRED = '[a-zA-Z0-9 \'"!-]{4,30}' def _checkProjectName(projectName): if not PROJECTNAME_PATTERN.match(projectName): Cmd.Backup() invalidArgumentExit(PROJECTNAME_FORMAT_REQUIRED) def _getSvcAcctInfo(myarg, svcAcctInfo): if myarg == 'saname': svcAcctInfo['name'] = getString(Cmd.OB_STRING, minLen=6, maxLen=30) _checkProjectId(svcAcctInfo['name']) elif myarg == 'sadisplayname': svcAcctInfo['displayName'] = getString(Cmd.OB_STRING, maxLen=100) elif myarg == 'sadescription': svcAcctInfo['description'] = getString(Cmd.OB_STRING, maxLen=256) else: return False return True def _getAppInfo(myarg, appInfo): if myarg == 'appname': appInfo['applicationTitle'] = getString(Cmd.OB_STRING) elif myarg == 'supportemail': appInfo['supportEmail'] = getEmailAddress(noUid=True) else: return False return True def _generateProjectSvcAcctId(prefix): return f'{prefix}-{"".join(random.choice(LOWERNUMERIC_CHARS) for _ in range(5))}' def _getLoginHintProjectInfo(createCmd): login_hint = None create_key = True appInfo = {'applicationTitle': '', 'supportEmail': ''} projectInfo = {'projectId': '', 'parent': '', 'name': ''} svcAcctInfo = {'name': '', 'displayName': '', 'description': ''} if not Cmd.PeekArgumentPresent(['admin', 'appname', 'supportemail', 'project', 'parent', 'projectname', 'saname', 'sadisplayname', 'sadescription', 'algorithm', 'localkeysize', 'validityhours', 'yubikey', 'nokey']): login_hint = getString(Cmd.OB_EMAIL_ADDRESS, optional=True) if login_hint and login_hint.find('@') == -1: Cmd.Backup() login_hint = None projectInfo['projectId'] = getString(Cmd.OB_STRING, optional=True, minLen=6, maxLen=30).strip() if projectInfo['projectId']: _checkProjectId(projectInfo['projectId']) checkForExtraneousArguments() else: while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'admin': login_hint = getEmailAddress(noUid=True) elif myarg == 'nokey': create_key = False elif myarg == 'project': projectInfo['projectId'] = getString(Cmd.OB_STRING, minLen=6, maxLen=30) _checkProjectId(projectInfo['projectId']) elif createCmd and myarg == 'parent': projectInfo['parent'] = getString(Cmd.OB_STRING) elif myarg == 'projectname': projectInfo['name'] = getString(Cmd.OB_STRING, minLen=4, maxLen=30) _checkProjectName(projectInfo['name']) elif _getSvcAcctInfo(myarg, svcAcctInfo): pass elif _getAppInfo(myarg, appInfo): pass elif myarg in {'algorithm', 'localkeysize', 'validityhours', 'yubikey'}: Cmd.Backup() break else: unknownArgumentExit() if not projectInfo['projectId']: if createCmd: projectInfo['projectId'] = _generateProjectSvcAcctId('gam-project') else: projectInfo['projectId'] = readStdin(Msg.WHAT_IS_YOUR_PROJECT_ID).strip() if not PROJECTID_PATTERN.match(projectInfo['projectId']): systemErrorExit(USAGE_ERROR_RC, f'{Cmd.ARGUMENT_ERROR_NAMES[Cmd.ARGUMENT_INVALID][1]} {Cmd.OB_PROJECT_ID}: {Msg.EXPECTED} <{PROJECTID_FORMAT_REQUIRED}>') if not projectInfo['name']: projectInfo['name'] = 'GAM Project' if not GC.Values[GC.USE_PROJECTID_AS_NAME] else projectInfo['projectId'] if not svcAcctInfo['name']: svcAcctInfo['name'] = projectInfo['projectId'] if not svcAcctInfo['displayName']: svcAcctInfo['displayName'] = projectInfo['name'] if not svcAcctInfo['description']: svcAcctInfo['description'] = svcAcctInfo['displayName'] login_hint = _getValidateLoginHint(login_hint, projectInfo['projectId']) if not appInfo['applicationTitle']: appInfo['applicationTitle'] = 'GAM' if not GC.Values[GC.USE_PROJECTID_AS_NAME] else projectInfo['projectId'] if not appInfo['supportEmail']: appInfo['supportEmail'] = login_hint httpObj, crm = getCRMService(login_hint) if projectInfo['parent'] and not projectInfo['parent'].startswith('organizations/') and not projectInfo['parent'].startswith('folders/'): projectInfo['parent'] = convertGCPFolderNameToID(projectInfo['parent'], crm) projects = _getProjects(crm, f'id:{projectInfo["projectId"]}') if not createCmd: if not projects: entityActionFailedExit([Ent.USER, login_hint, Ent.PROJECT, projectInfo['projectId']], Msg.DOES_NOT_EXIST) if projects[0]['state'] != 'ACTIVE': entityActionFailedExit([Ent.USER, login_hint, Ent.PROJECT, projectInfo['projectId']], Msg.NOT_ACTIVE) else: if projects: entityActionFailedExit([Ent.USER, login_hint, Ent.PROJECT, projectInfo['projectId']], Msg.DUPLICATE) return (crm, httpObj, login_hint, appInfo, projectInfo, svcAcctInfo, create_key) def _getCurrentProjectId(): jsonData = readFile(GC.Values[GC.OAUTH2SERVICE_JSON], continueOnError=True, displayError=False) if jsonData: try: return json.loads(jsonData)['project_id'] except (IndexError, KeyError, SyntaxError, TypeError, ValueError): pass jsonData = readFile(GC.Values[GC.CLIENT_SECRETS_JSON], continueOnError=True, displayError=True) if not jsonData: invalidClientSecretsJsonExit(Msg.NO_DATA) try: return json.loads(jsonData)['installed']['project_id'] except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: invalidClientSecretsJsonExit(str(e)) GAM_PROJECT_FILTER = 'id:gam-project-*' PROJECTID_FILTER_REQUIRED = '' PROJECTS_CREATESVCACCT_OPTIONS = {'saname', 'sadisplayname', 'sadescription'} PROJECTS_DELETESVCACCT_OPTIONS = {'saemail', 'saname', 'sauniqueid'} PROJECTS_PRINTSHOW_OPTIONS = {'showsakeys', 'showiampolicies', 'onememberperrow', 'states', 'todrive', 'delimiter', 'formatjson', 'quotechar'} def _getLoginHintProjects(createSvcAcctCmd=False, deleteSvcAcctCmd=False, printShowCmd=False, readOnly=False): if checkArgumentPresent(['admin']): login_hint = getEmailAddress(noUid=True) else: login_hint = getString(Cmd.OB_EMAIL_ADDRESS, optional=True) if login_hint and login_hint.find('@') == -1: Cmd.Backup() login_hint = None if readOnly and login_hint and login_hint != _getAdminEmail(): readOnly = False projectIds = None pfilter = getString(Cmd.OB_STRING, optional=True) if not pfilter: pfilter = 'current' if not printShowCmd else GAM_PROJECT_FILTER elif printShowCmd and pfilter in PROJECTS_PRINTSHOW_OPTIONS: pfilter = GAM_PROJECT_FILTER Cmd.Backup() elif createSvcAcctCmd and pfilter in PROJECTS_CREATESVCACCT_OPTIONS: pfilter = 'current' Cmd.Backup() elif deleteSvcAcctCmd and pfilter in PROJECTS_DELETESVCACCT_OPTIONS: pfilter = 'current' Cmd.Backup() elif printShowCmd and pfilter.lower() == 'all': pfilter = None elif pfilter.lower() == 'current': pfilter = 'current' elif pfilter.lower() == 'gam': pfilter = GAM_PROJECT_FILTER elif pfilter.lower() == 'filter': pfilter = getString(Cmd.OB_STRING) elif pfilter.lower() == 'select': projectIds = getEntityList(Cmd.OB_PROJECT_ID_ENTITY, False) projectId = None elif PROJECTID_PATTERN.match(pfilter): pfilter = f'id:{pfilter}' elif pfilter.startswith('id:') and PROJECTID_PATTERN.match(pfilter[3:]): pass else: Cmd.Backup() invalidArgumentExit(['', 'all|'][printShowCmd]+PROJECTID_FILTER_REQUIRED) if not printShowCmd and not createSvcAcctCmd and not deleteSvcAcctCmd: checkForExtraneousArguments() if projectIds is None: if pfilter in {'current', 'id:current'}: projectId = _getCurrentProjectId() else: projectId = f'filter {pfilter or "all"}' login_hint = _getValidateLoginHint(login_hint, projectId) crm = None if readOnly: _, crm = buildGAPIServiceObject(API.CLOUDRESOURCEMANAGER, None) if crm: httpObj = crm._http if not crm: httpObj, crm = getCRMService(login_hint) if projectIds is None: if pfilter in {'current', 'id:current'}: if not printShowCmd: projects = [{'projectId': projectId}] else: projects = _getProjects(crm, f'id:{projectId}', returnNF=True) else: projects = _getProjects(crm, pfilter, returnNF=printShowCmd) else: projects = [] for projectId in projectIds: projects.extend(_getProjects(crm, f'id:{projectId}', returnNF=True)) return (crm, httpObj, login_hint, projects) def _checkForExistingProjectFiles(projectFiles): for a_file in projectFiles: if os.path.exists(a_file): systemErrorExit(JSON_ALREADY_EXISTS_RC, Msg.AUTHORIZATION_FILE_ALREADY_EXISTS.format(a_file, Act.ToPerform())) def getGCPOrg(crm, login_hint, login_domain): try: getorg = callGAPI(crm.organizations(), 'search', throwReasons=[GAPI.INVALID_ARGUMENT, GAPI.PERMISSION_DENIED], query=f'domain:{login_domain}') except (GAPI.invalidArgument, GAPI.permissionDenied) as e: entityActionFailedExit([Ent.USER, login_hint, Ent.DOMAIN, login_domain], str(e)) try: organization = getorg['organizations'][0]['name'] sys.stdout.write(Msg.YOUR_ORGANIZATION_NAME_IS.format(organization)) return organization except (KeyError, IndexError): systemErrorExit(3, Msg.YOU_HAVE_NO_RIGHTS_TO_CREATE_PROJECTS_AND_YOU_ARE_NOT_A_SUPER_ADMIN) # gam create gcpfolder # gam create gcpfolder [admin def doCreateGCPFolder(): login_hint = None if not Cmd.PeekArgumentPresent(['admin', 'folder']): name = getString(Cmd.OB_STRING) checkForExtraneousArguments() else: name = '' while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'admin': login_hint = getEmailAddress(noUid=True) elif myarg == 'folder': name = getString(Cmd.OB_STRING) else: unknownArgumentExit() if not name: missingChoiceExit('folder') login_hint = _getValidateLoginHint(login_hint) login_domain = getEmailAddressDomain(login_hint) _, crm = getCRMService(login_hint) organization = getGCPOrg(crm, login_hint, login_domain) try: result = callGAPI(crm.folders(), 'create', throwReasons=[GAPI.INVALID_ARGUMENT, GAPI.PERMISSION_DENIED], body={'parent': organization, 'displayName': name}) except (GAPI.invalidArgument, GAPI.permissionDenied) as e: entityActionFailedExit([Ent.USER, login_hint, Ent.GCP_FOLDER, name], str(e)) entityActionPerformed([Ent.USER, login_hint, Ent.GCP_FOLDER, name, Ent.GCP_FOLDER_NAME, result['name']]) # gam create project [] [] # gam create project [admin ] [project ] # [appname ] [supportemail ] # [projectname ] [parent ] # [saname ] [sadisplayname ] [sadescription ] # [(algorithm KEY_ALG_RSA_1024|KEY_ALG_RSA_2048)| # (localkeysize 1024|2048|4096 [validityhours ])| # (yubikey yubikey_pin yubikey_slot AUTHENTICATION yubikey_serialnumber )| # nokey] def doCreateProject(): _checkForExistingProjectFiles([GC.Values[GC.OAUTH2SERVICE_JSON], GC.Values[GC.CLIENT_SECRETS_JSON]]) sys.stdout.write(Msg.TRUST_GAM_CLIENT_ID.format(GAM_PROJECT_CREATION, GAM_PROJECT_CREATION_CLIENT_ID)) readStdin('') crm, httpObj, login_hint, appInfo, projectInfo, svcAcctInfo, create_key = _getLoginHintProjectInfo(True) login_domain = getEmailAddressDomain(login_hint) body = {'projectId': projectInfo['projectId'], 'displayName': projectInfo['name']} if projectInfo['parent']: body['parent'] = projectInfo['parent'] while True: create_again = False sys.stdout.write(Msg.CREATING_PROJECT.format(body['displayName'])) try: create_operation = callGAPI(crm.projects(), 'create', throwReasons=[GAPI.BAD_REQUEST, GAPI.ALREADY_EXISTS, GAPI.FAILED_PRECONDITION, GAPI.PERMISSION_DENIED], body=body) except (GAPI.badRequest, GAPI.alreadyExists, GAPI.failedPrecondition, GAPI.permissionDenied) as e: entityActionFailedExit([Ent.USER, login_hint, Ent.PROJECT, projectInfo['projectId']], str(e)) operation_name = create_operation['name'] time.sleep(5) # Google recommends always waiting at least 5 seconds for i in range(1, 10): sys.stdout.write(Msg.CHECKING_PROJECT_CREATION_STATUS) status = callGAPI(crm.operations(), 'get', name=operation_name) if 'error' in status: if status['error'].get('message', '') == 'No permission to create project in organization': sys.stdout.write(Msg.NO_RIGHTS_GOOGLE_CLOUD_ORGANIZATION) organization = getGCPOrg(crm, login_hint, login_domain) org_policy = callGAPI(crm.organizations(), 'getIamPolicy', resource=organization) if 'bindings' not in org_policy: org_policy['bindings'] = [] sys.stdout.write(Msg.LOOKS_LIKE_NO_ONE_HAS_RIGHTS_TO_YOUR_GOOGLE_CLOUD_ORGANIZATION_ATTEMPTING_TO_GIVE_YOU_CREATE_RIGHTS) else: sys.stdout.write(Msg.THE_FOLLOWING_RIGHTS_SEEM_TO_EXIST) for a_policy in org_policy['bindings']: if 'role' in a_policy: sys.stdout.write(f' Role: {a_policy["role"]}\n') if 'members' in a_policy: sys.stdout.write(' Members:\n') for member in a_policy['members']: sys.stdout.write(f' {member}\n') my_role = 'roles/resourcemanager.projectCreator' sys.stdout.write(Msg.GIVING_LOGIN_HINT_THE_CREATOR_ROLE.format(login_hint, my_role)) org_policy['bindings'].append({'role': my_role, 'members': [f'user:{login_hint}']}) callGAPI(crm.organizations(), 'setIamPolicy', resource=organization, body={'policy': org_policy}) create_again = True break try: if status['error']['details'][0]['violations'][0]['description'] == 'Callers must accept Terms of Service': readStdin(Msg.ACCEPT_CLOUD_TOS.format(login_hint)) create_again = True break except (IndexError, KeyError): pass systemErrorExit(1, str(status)+'\n') if status.get('done', False): break sleep_time = min(2 ** i, 60) sys.stdout.write(Msg.PROJECT_STILL_BEING_CREATED_SLEEPING.format(sleep_time)) time.sleep(sleep_time) if create_again: continue if not status.get('done', False): systemErrorExit(1, Msg.FAILED_TO_CREATE_PROJECT.format(status)) elif 'error' in status: systemErrorExit(2, status['error']+'\n') break # Try to set policy on project to allow Service Account Key Upload # orgp = getAPIService(API.ORGPOLICY, httpObj) # projectParent = f"projects/{projectInfo['projectId']}" # policyName = f'{projectParent}/policies/iam.managed.disableServiceAccountKeyUpload' # try: # result = callGAPI(orgp.projects().policies(), 'get', # throwReasons=[GAPI.NOT_FOUND, GAPI.FAILED_PRECONDITION, GAPI.PERMISSION_DENIED], # name=policyName) # if result['spec']['rules'][0]['enforce']: # callGAPI(orgp.projects().policies(), 'patch', # throwReasons=[GAPI.FAILED_PRECONDITION, GAPI.PERMISSION_DENIED], # name=policyName, body={'spec': {'rules': [{'enforce': False}]}}, updateMask='policy.spec') # except GAPI.notFound: # callGAPI(orgp.projects().policies(), 'create', # throwReasons=[GAPI.BAD_REQUEST, GAPI.FAILED_PRECONDITION, GAPI.PERMISSION_DENIED], # parent=projectParent, body={'name': policyName, 'spec': {'rules': [{'enforce': False}]}}) # except (GAPI.badRequest, GAPI.failedPrecondition, GAPI.permissionDenied): # pass # Create client_secrets.json and oauth2service.json _createClientSecretsOauth2service(httpObj, login_hint, appInfo, projectInfo, svcAcctInfo, create_key) # gam use project [] [] # gam use project [admin ] [project ] # [appname ] [supportemail ] # [saname ] [sadisplayname ] [sadescription ] # [(algorithm KEY_ALG_RSA_1024|KEY_ALG_RSA_2048)| # (localkeysize 1024|2048|4096 [validityhours ])| # (yubikey yubikey_pin yubikey_slot AUTHENTICATION yubikey_serialnumber )] def doUseProject(): _checkForExistingProjectFiles([GC.Values[GC.OAUTH2SERVICE_JSON], GC.Values[GC.CLIENT_SECRETS_JSON]]) _, httpObj, login_hint, appInfo, projectInfo, svcAcctInfo, create_key = _getLoginHintProjectInfo(False) _createClientSecretsOauth2service(httpObj, login_hint, appInfo, projectInfo, svcAcctInfo, create_key) # gam update project [[admin] ] [] def doUpdateProject(): _, httpObj, login_hint, projects = _getLoginHintProjects() count = len(projects) entityPerformActionNumItems([Ent.USER, login_hint], count, Ent.PROJECT) Ind.Increment() i = 0 for project in projects: i += 1 if not _checkProjectFound(project, i, count): continue projectId = project['projectId'] Act.Set(Act.UPDATE) if not enableGAMProjectAPIs(httpObj, projectId, login_hint, True, i, count): continue iam = getAPIService(API.IAM, httpObj) _getSvcAcctData() # needed to read in GM.OAUTH2SERVICE_JSON_DATA _grantRotateRights(iam, projectId, GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['client_email']) Ind.Decrement() # gam delete project [[admin] ] [] def doDeleteProject(): crm, _, login_hint, projects = _getLoginHintProjects() count = len(projects) entityPerformActionNumItems([Ent.USER, login_hint], count, Ent.PROJECT) Ind.Increment() i = 0 for project in projects: i += 1 if not _checkProjectFound(project, i, count): continue projectId = project['projectId'] try: callGAPI(crm.projects(), 'delete', throwReasons=[GAPI.FORBIDDEN, GAPI.PERMISSION_DENIED], name=project['name']) entityActionPerformed([Ent.PROJECT, projectId]) except (GAPI.forbidden, GAPI.permissionDenied) as e: entityActionFailedWarning([Ent.PROJECT, projectId], str(e)) Ind.Decrement() PROJECT_TIMEOBJECTS = ['createTime'] PROJECT_STATE_CHOICE_MAP = { 'all': {'ACTIVE', 'DELETE_REQUESTED'}, 'active': {'ACTIVE'}, 'deleterequested': {'DELETE_REQUESTED'} } # gam print projects [[admin] ] [all|] [todrive *] # [states all|active|deleterequested] [showiampolicies 0|1|3 [onememberperrow]] # [delimiter ] [formatjson [quotechar ]] # gam show projects [[admin] ] [all|] # [states all|active|deleterequested] [showiampolicies 0|1|3] def doPrintShowProjects(): def _getProjectPolicies(crm, project, policyBody, i, count): try: policy = callGAPI(crm.projects(), 'getIamPolicy', throwReasons=[GAPI.FORBIDDEN, GAPI.PERMISSION_DENIED], resource=project['name'], body=policyBody) return policy except (GAPI.forbidden, GAPI.permissionDenied) as e: entityActionFailedWarning([Ent.PROJECT, project['projectId'], Ent.IAM_POLICY, None], str(e), i, count) return {} readOnly = not Cmd.ArgumentIsAhead('showiampolicies') crm, _, login_hint, projects = _getLoginHintProjects(printShowCmd=True, readOnly=readOnly) csvPF = CSVPrintFile(['User', 'projectId']) if Act.csvFormat() else None FJQC = FormatJSONQuoteChar(csvPF) oneMemberPerRow = False showIAMPolicies = -1 lifecycleStates = PROJECT_STATE_CHOICE_MAP['active'] policy = None delimiter = GC.Values[GC.CSV_OUTPUT_FIELD_DELIMITER] while Cmd.ArgumentsRemaining(): myarg = getArgument() if csvPF and myarg == 'todrive': csvPF.GetTodriveParameters() elif csvPF and myarg == 'onememberperrow': oneMemberPerRow = True elif myarg == 'states': lifecycleStates = getChoice(PROJECT_STATE_CHOICE_MAP, mapChoice=True) elif myarg == 'showiampolicies': showIAMPolicies = int(getChoice(['0', '1', '3'])) policyBody = {'options': {"requestedPolicyVersion": showIAMPolicies}} elif myarg == 'delimiter': delimiter = getCharacter() else: FJQC.GetFormatJSONQuoteChar(myarg, True) if not csvPF: count = len(projects) entityPerformActionNumItems([Ent.USER, login_hint], count, Ent.PROJECT) Ind.Increment() i = 0 for project in projects: i += 1 if not _checkProjectFound(project, i, count): continue if project['state'] not in lifecycleStates: continue projectId = project['projectId'] if showIAMPolicies >= 0: policy = _getProjectPolicies(crm, project, policyBody, i, count) printEntity([Ent.PROJECT, projectId], i, count) Ind.Increment() printKeyValueList(['name', project['name']]) printKeyValueList(['displayName', project['displayName']]) for field in ['createTime', 'updateTime', 'deleteTime']: if field in project: printKeyValueList([field, formatLocalTime(project[field])]) printKeyValueList(['state', project['state']]) jcount = len(project.get('labels', [])) if jcount > 0: printKeyValueList(['labels', jcount]) Ind.Increment() for k, v in iter(project['labels'].items()): printKeyValueList([k, v]) Ind.Decrement() if 'parent' in project: printKeyValueList(['parent', project['parent']]) if policy: printKeyValueList([Ent.Singular(Ent.IAM_POLICY), '']) Ind.Increment() bindings = policy.get('bindings', []) jcount = len(bindings) printKeyValueList(['version', policy['version']]) printKeyValueList(['bindings', jcount]) Ind.Increment() j = 0 for binding in bindings: j += 1 printKeyValueListWithCount(['role', binding['role']], j, jcount) Ind.Increment() for member in binding.get('members', []): printKeyValueList(['member', member]) if 'condition' in binding: printKeyValueList(['condition', '']) Ind.Increment() for k, v in iter(binding['condition'].items()): printKeyValueList([k, v]) Ind.Decrement() Ind.Decrement() Ind.Decrement() Ind.Decrement() Ind.Decrement() Ind.Decrement() else: if not FJQC.formatJSON: csvPF.AddTitles(['projectId', 'name', 'displayName', 'createTime', 'updateTime', 'deleteTime', 'state']) csvPF.SetSortAllTitles() count = len(projects) i = 0 for project in projects: i += 1 if not _checkProjectFound(project, i, count): continue if project['state'] not in lifecycleStates: continue projectId = project['projectId'] if showIAMPolicies >= 0: policy = _getProjectPolicies(crm, project, policyBody, i, count) if FJQC.formatJSON: if policy is not None: project['policy'] = policy row = flattenJSON(project, flattened={'User': login_hint}, timeObjects=PROJECT_TIMEOBJECTS) if csvPF.CheckRowTitles(row): csvPF.WriteRowNoFilter({'User': login_hint, 'projectId': projectId, 'JSON': json.dumps(cleanJSON(project), ensure_ascii=False, sort_keys=True)}) continue row = flattenJSON(project, flattened={'User': login_hint}, timeObjects=PROJECT_TIMEOBJECTS) if not policy: csvPF.WriteRowTitles(row) continue row[f'policy{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}version'] = policy['version'] for binding in policy.get('bindings', []): prow = row.copy() prow[f'policy{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}role'] = binding['role'] if 'condition' in binding: for k, v in iter(binding['condition'].items()): prow[f'policy{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}condition{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}'] = v members = binding.get('members', []) if not oneMemberPerRow: prow[f'policy{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}members'] = delimiter.join(members) csvPF.WriteRowTitles(prow) else: for member in members: mrow = prow.copy() mrow[f'policy{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}member'] = member csvPF.WriteRowTitles(mrow) csvPF.writeCSVfile('Projects') # gam info currentprojectid def doInfoCurrentProjectId(): checkForExtraneousArguments() printEntity([Ent.PROJECT_ID, _getCurrentProjectId()]) # gam create svcacct [[admin] ] [] # [saname ] [sadisplayname ] [sadescription ] # [(algorithm KEY_ALG_RSA_1024|KEY_ALG_RSA_2048)| # (localkeysize 1024|2048|4096 [validityhours ])| # (yubikey yubikey_pin yubikey_slot AUTHENTICATION yubikey_serialnumber )] def doCreateSvcAcct(): _checkForExistingProjectFiles([GC.Values[GC.OAUTH2SERVICE_JSON]]) _, httpObj, login_hint, projects = _getLoginHintProjects(createSvcAcctCmd=True) svcAcctInfo = {'name': '', 'displayName': '', 'description': ''} while Cmd.ArgumentsRemaining(): myarg = getArgument() if _getSvcAcctInfo(myarg, svcAcctInfo): pass else: unknownArgumentExit() if not svcAcctInfo['name']: svcAcctInfo['name'] = _generateProjectSvcAcctId('gam-svcacct') if not svcAcctInfo['displayName']: svcAcctInfo['displayName'] = svcAcctInfo['name'] if not svcAcctInfo['description']: svcAcctInfo['description'] = svcAcctInfo['displayName'] count = len(projects) entityPerformActionSubItemModifierNumItems([Ent.USER, login_hint], Ent.SVCACCT, Act.MODIFIER_TO, count, Ent.PROJECT) Ind.Increment() i = 0 for project in projects: i += 1 if not _checkProjectFound(project, i, count): continue projectInfo = {'projectId': project['projectId']} _createOauth2serviceJSON(httpObj, projectInfo, svcAcctInfo) Ind.Decrement() # gam delete svcacct [[admin] ] [] # (saemail )|(saname )|(sauniqueid ) def doDeleteSvcAcct(): _, httpObj, login_hint, projects = _getLoginHintProjects(deleteSvcAcctCmd=True) iam = getAPIService(API.IAM, httpObj) clientEmail = clientId = clientName = None while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'saemail': clientEmail = getEmailAddress(noUid=True) clientName = clientId = None elif myarg == 'saname': clientName = getString(Cmd.OB_STRING, minLen=6, maxLen=30).strip() _checkProjectId(clientName) clientEmail = clientId = None elif myarg == 'sauniqueid': clientId = getInteger(minVal=0) clientEmail = clientName = None else: unknownArgumentExit() if not clientEmail and not clientId and not clientName: missingArgumentExit('email|name|uniqueid') count = len(projects) entityPerformActionSubItemModifierNumItems([Ent.USER, login_hint], Ent.SVCACCT, Act.MODIFIER_FROM, count, Ent.PROJECT) Ind.Increment() i = 0 for project in projects: i += 1 if not _checkProjectFound(project, i, count): continue projectId = project['projectId'] try: if clientEmail: saName = clientEmail elif clientName: saName = f'{clientName}@{projectId}.iam.gserviceaccount.com' else: #clientId saName = clientId callGAPI(iam.projects().serviceAccounts(), 'delete', throwReasons=[GAPI.NOT_FOUND, GAPI.BAD_REQUEST], name=f'projects/{projectId}/serviceAccounts/{saName}') entityActionPerformed([Ent.PROJECT, projectId, Ent.SVCACCT, saName], i, count) except (GAPI.notFound, GAPI.badRequest) as e: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, saName], str(e), i, count) Ind.Decrement() def _getSvcAcctKeyProjectClientFields(): return (GM.Globals[GM.OAUTH2SERVICE_JSON_DATA].get('private_key_id', ''), GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['project_id'], GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['client_email'], GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['client_id']) # gam check serviceaccount (scope|scopes )* [usecolor] # gam update serviceaccount (scope|scopes )* [usecolor] def checkServiceAccount(users): def printMessage(message): writeStdout(Ind.Spaces()+message+'\n') def printPassFail(description, result): writeStdout(Ind.Spaces()+f'{description:73} {result}'+'\n') def authorizeScopes(message): long_url = ('https://admin.google.com/ac/owl/domainwidedelegation' f'?clientScopeToAdd={",".join(checkScopes)}' f'&clientIdToAdd={service_account}&overwriteClientId=true') if GC.Values[GC.DOMAIN]: long_url += f'&dn={GC.Values[GC.DOMAIN]}' long_url += f'&authuser={_getAdminEmail()}' short_url = shortenURL(long_url) printLine(message.format('', short_url)) credentials = getSvcAcctCredentials([API.USERINFO_EMAIL_SCOPE], None, forceOauth=True) allScopes = API.getSvcAcctScopes(GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY], Act.Get() == Act.UPDATE) checkScopesSet = set() saScopes = {} useColor = False while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg in {'scope', 'scopes'}: for scope in getString(Cmd.OB_API_SCOPE_URL_LIST).lower().replace(',', ' ').split(): api = API.getSvcAcctScopeAPI(scope) if api is not None: saScopes.setdefault(api, []) saScopes[api].append(scope) checkScopesSet.add(scope) else: invalidChoiceExit(scope, allScopes, True) elif myarg == 'usecolor': useColor = True else: unknownArgumentExit() if useColor: testPass = createGreenText('PASS') testFail = createRedText('FAIL') testWarn = createYellowText('WARN') else: testPass = 'PASS' testFail = 'FAIL' testWarn = 'WARN' if Act.Get() == Act.CHECK: if not checkScopesSet: for scope in iter(GM.Globals[GM.SVCACCT_SCOPES].values()): checkScopesSet.update(scope) else: if not checkScopesSet: scopesList = API.getSvcAcctScopesList(GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY], True) selectedScopes = getScopesFromUser(scopesList, False, GM.Globals[GM.SVCACCT_SCOPES]) if selectedScopes is None: return False i = 0 for scope in scopesList: if selectedScopes[i] == '*': saScopes.setdefault(scope['api'], []) saScopes[scope['api']].append(scope['scope']) checkScopesSet.add(scope['scope']) elif selectedScopes[i] == 'R': saScopes.setdefault(scope['api'], []) if 'roscope' not in scope: saScopes[scope['api']].append(f'{scope["scope"]}.readonly') checkScopesSet.add(f'{scope["scope"]}.readonly') else: saScopes[scope['api']].append(scope['roscope']) checkScopesSet.add(scope['roscope']) i += 1 if API.DRIVEACTIVITY in saScopes and API.DRIVE3 in saScopes: saScopes[API.DRIVEACTIVITY].append(API.DRIVE_SCOPE) if API.DRIVE3 in saScopes: saScopes[API.DRIVE2] = saScopes[API.DRIVE3] GM.Globals[GM.OAUTH2SERVICE_JSON_DATA][API.OAUTH2SA_SCOPES] = saScopes writeFile(GC.Values[GC.OAUTH2SERVICE_JSON], json.dumps(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA], ensure_ascii=False, sort_keys=True, indent=2), continueOnError=False) checkScopes = sorted(checkScopesSet) jcount = len(checkScopes) printMessage(Msg.SYSTEM_TIME_STATUS) offsetSeconds, offsetFormatted = getLocalGoogleTimeOffset() if offsetSeconds <= MAX_LOCAL_GOOGLE_TIME_OFFSET: timeStatus = testPass else: timeStatus = testFail Ind.Increment() printPassFail(Msg.YOUR_SYSTEM_TIME_DIFFERS_FROM_GOOGLE.format(GOOGLE_TIMECHECK_LOCATION, offsetFormatted), timeStatus) Ind.Decrement() oa2 = buildGAPIObject(API.OAUTH2) printMessage(Msg.SERVICE_ACCOUNT_PRIVATE_KEY_AUTHENTICATION) # We are explicitly not doing DwD here, just confirming service account can auth auth_error = '' try: request = transportCreateRequest() credentials.refresh(request) sa_token_info = callGAPI(oa2, 'tokeninfo', access_token=credentials.token) if sa_token_info: saTokenStatus = testPass else: saTokenStatus = testFail except (httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: handleServerError(e) except google.auth.exceptions.RefreshError as e: saTokenStatus = testFail if isinstance(e.args, tuple): e = e.args[0] auth_error = ' - '+str(e) Ind.Increment() printPassFail(f'Authentication{auth_error}', saTokenStatus) Ind.Decrement() if saTokenStatus == testFail: invalidOauth2serviceJsonExit(f'Authentication{auth_error}') _getSvcAcctData() # needed to read in GM.OAUTH2SERVICE_JSON_DATA if GM.Globals[GM.SVCACCT_SCOPES_DEFINED] and API.IAM not in GM.Globals[GM.SVCACCT_SCOPES]: GM.Globals[GM.SVCACCT_SCOPES][API.IAM] = [API.CLOUD_PLATFORM_SCOPE] key_type = GM.Globals[GM.OAUTH2SERVICE_JSON_DATA].get('key_type', 'default') if key_type == 'default': printMessage(Msg.SERVICE_ACCOUNT_CHECK_PRIVATE_KEY_AGE) _, iam = buildGAPIServiceObject(API.IAM, None) currentPrivateKeyId, projectId, _, clientId = _getSvcAcctKeyProjectClientFields() name = f'projects/{projectId}/serviceAccounts/{clientId}/keys/{currentPrivateKeyId}' Ind.Increment() try: key = callGAPI(iam.projects().serviceAccounts().keys(), 'get', throwReasons=[GAPI.BAD_REQUEST, GAPI.INVALID, GAPI.NOT_FOUND, GAPI.PERMISSION_DENIED], name=name, fields='validAfterTime') key_created, _ = iso8601.parse_date(key['validAfterTime']) key_age = todaysTime()-key_created printPassFail(Msg.SERVICE_ACCOUNT_PRIVATE_KEY_AGE.format(key_age.days), testWarn if key_age.days > 30 else testPass) except GAPI.permissionDenied: printMessage(Msg.UPDATE_PROJECT_TO_VIEW_MANAGE_SAKEYS) printPassFail(Msg.SERVICE_ACCOUNT_PRIVATE_KEY_AGE.format('UNKNOWN'), testWarn) except (GAPI.badRequest, GAPI.invalid, GAPI.notFound) as e: entityActionFailedWarning([Ent.PROJECT, GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['project_id'], Ent.SVCACCT, GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['client_email']], str(e)) printPassFail(Msg.SERVICE_ACCOUNT_PRIVATE_KEY_AGE.format('UNKNOWN'), testWarn) else: printPassFail(Msg.SERVICE_ACCOUNT_SKIPPING_KEY_AGE_CHECK.format(key_type), testPass) Ind.Decrement() i, count, users = getEntityArgument(users) for user in users: i += 1 allScopesPass = True user = convertUIDtoEmailAddress(user) printKeyValueListWithCount([Msg.DOMAIN_WIDE_DELEGATION_AUTHENTICATION, '', Ent.Singular(Ent.USER), user, Ent.Choose(Ent.SCOPE, jcount), jcount], i, count) Ind.Increment() j = 0 for scope in checkScopes: j += 1 # try with and without email scope for scopes in [[scope, API.USERINFO_EMAIL_SCOPE], [scope]]: try: credentials = getSvcAcctCredentials(scopes, user) credentials.refresh(request) break except (httplib2.HttpLib2Error, google.auth.exceptions.TransportError, RuntimeError) as e: handleServerError(e) except google.auth.exceptions.RefreshError: continue if credentials.token: token_info = callGAPI(oa2, 'tokeninfo', access_token=credentials.token) if scope in token_info.get('scope', '').split(' ') and user == token_info.get('email', user).lower(): scopeStatus = testPass else: scopeStatus = testFail allScopesPass = False else: scopeStatus = testFail allScopesPass = False printPassFail(scope, f'{scopeStatus}{currentCount(j, jcount)}') Ind.Decrement() service_account = GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['client_id'] if allScopesPass: if Act.Get() == Act.CHECK: printLine(Msg.SCOPE_AUTHORIZATION_PASSED.format(service_account)) else: authorizeScopes(Msg.SCOPE_AUTHORIZATION_UPDATE_PASSED) else: # Tack on email scope for more accurate checking checkScopes.append(API.USERINFO_EMAIL_SCOPE) setSysExitRC(SCOPES_NOT_AUTHORIZED_RC) authorizeScopes(Msg.SCOPE_AUTHORIZATION_FAILED) printBlankLine() # gam check svcacct (scope|scopes )* # gam update svcacct (scope|scopes )* def doCheckUpdateSvcAcct(): _, entityList = getEntityToModify(defaultEntityType=Cmd.ENTITY_USER) checkServiceAccount(entityList) def _getSAKeys(iam, projectId, clientEmail, name, keyTypes): try: keys = callGAPIitems(iam.projects().serviceAccounts().keys(), 'list', 'keys', throwReasons=[GAPI.BAD_REQUEST, GAPI.PERMISSION_DENIED], name=name, fields='*', keyTypes=keyTypes) return (True, keys) except GAPI.permissionDenied: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], Msg.UPDATE_PROJECT_TO_VIEW_MANAGE_SAKEYS) except GAPI.badRequest as e: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], str(e)) return (False, None) SVCACCT_KEY_TIME_OBJECTS = {'validAfterTime', 'validBeforeTime'} def _showSAKeys(keys, count, currentPrivateKeyId): Ind.Increment() i = 0 for key in keys: i += 1 keyName = key.pop('name').rsplit('/', 1)[-1] printKeyValueListWithCount(['name', keyName], i, count) Ind.Increment() for k, v in sorted(iter(key.items())): if k not in SVCACCT_KEY_TIME_OBJECTS: printKeyValueList([k, v]) else: printKeyValueList([k, formatLocalTime(v)]) if keyName == currentPrivateKeyId: printKeyValueList(['usedToAuthenticateThisRequest', True]) Ind.Decrement() Ind.Decrement() SVCACCT_DISPLAY_FIELDS = ['displayName', 'description', 'oauth2ClientId', 'uniqueId', 'disabled'] SVCACCT_KEY_TYPE_CHOICE_MAP = { 'all': None, 'system': 'SYSTEM_MANAGED', 'systemmanaged': 'SYSTEM_MANAGED', 'user': 'USER_MANAGED', 'usermanaged': 'USER_MANAGED' } # gam print svcaccts [[admin] ] [all|] # [showsakeys all|system|user] # [todrive *] [formatjson [quotechar ]] # gam show svcaccts [] [all|] # [showsakeys all|system|user] def doPrintShowSvcAccts(): _, httpObj, login_hint, projects = _getLoginHintProjects(printShowCmd=True, readOnly=False) csvPF = CSVPrintFile(['User', 'projectId']) if Act.csvFormat() else None FJQC = FormatJSONQuoteChar(csvPF) iam = getAPIService(API.IAM, httpObj) keyTypes = None showSAKeys = False while Cmd.ArgumentsRemaining(): myarg = getArgument() if csvPF and myarg == 'todrive': csvPF.GetTodriveParameters() elif myarg == 'showsakeys': keyTypes = getChoice(SVCACCT_KEY_TYPE_CHOICE_MAP, mapChoice=True) showSAKeys = True else: FJQC.GetFormatJSONQuoteChar(myarg, True) count = len(projects) if not csvPF: entityPerformActionSubItemModifierNumItems([Ent.USER, login_hint], Ent.SVCACCT, Act.MODIFIER_FOR, count, Ent.PROJECT) else: csvPF.AddTitles(['projectId']+SVCACCT_DISPLAY_FIELDS) csvPF.SetSortAllTitles() i = 0 for project in projects: i += 1 if not _checkProjectFound(project, i, count): continue projectId = project['projectId'] if csvPF: printGettingAllEntityItemsForWhom(Ent.SVCACCT, projectId, i, count) if project['state'] != 'ACTIVE': entityActionNotPerformedWarning([Ent.PROJECT, projectId], Msg.DELETED, i, count) continue try: svcAccts = callGAPIpages(iam.projects().serviceAccounts(), 'list', 'accounts', throwReasons=[GAPI.NOT_FOUND, GAPI.PERMISSION_DENIED], name=f'projects/{projectId}') jcount = len(svcAccts) if not csvPF: entityPerformActionNumItems([Ent.PROJECT, projectId], jcount, Ent.SVCACCT, i, count) Ind.Increment() j = 0 for svcAcct in svcAccts: j += 1 printKeyValueListWithCount(['email', svcAcct['email']], j, jcount) Ind.Increment() for field in SVCACCT_DISPLAY_FIELDS: if field in svcAcct: printKeyValueList([field, svcAcct[field]]) if showSAKeys: name = f"projects/{projectId}/serviceAccounts/{svcAcct['oauth2ClientId']}" status, keys = _getSAKeys(iam, projectId, svcAcct['email'], name, keyTypes) if status: kcount = len(keys) if kcount > 0: printKeyValueList([Ent.Choose(Ent.SVCACCT_KEY, kcount), kcount]) _showSAKeys(keys, kcount, '') Ind.Decrement() Ind.Decrement() else: for svcAcct in svcAccts: if showSAKeys: name = f"projects/{projectId}/serviceAccounts/{svcAcct['oauth2ClientId']}" status, keys = _getSAKeys(iam, projectId, svcAcct['email'], name, keyTypes) if status: svcAcct['keys'] = keys row = flattenJSON(svcAcct, flattened={'User': login_hint}, timeObjects=SVCACCT_KEY_TIME_OBJECTS) if not FJQC.formatJSON: csvPF.WriteRowTitles(row) elif csvPF.CheckRowTitles(row): csvPF.WriteRowNoFilter({'User': login_hint, 'projectId': projectId, 'JSON': json.dumps(cleanJSON(svcAcct, timeObjects=SVCACCT_KEY_TIME_OBJECTS), ensure_ascii=False, sort_keys=True)}) except (GAPI.notFound, GAPI.permissionDenied) as e: entityActionFailedWarning([Ent.PROJECT, projectId], str(e), i, count) if csvPF: csvPF.writeCSVfile('Service Accounts') def _generatePrivateKeyAndPublicCert(projectId, clientEmail, name, key_size, b64enc_pub=True, validityHours=0): if projectId: printEntityMessage([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], Msg.GENERATING_NEW_PRIVATE_KEY) else: writeStdout(Msg.GENERATING_NEW_PRIVATE_KEY+'\n') private_key = rsa.generate_private_key(public_exponent=65537, key_size=key_size, backend=default_backend()) private_pem = private_key.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption()).decode() if projectId: printEntityMessage([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], Msg.EXTRACTING_PUBLIC_CERTIFICATE) else: writeStdout(Msg.EXTRACTING_PUBLIC_CERTIFICATE+'\n') public_key = private_key.public_key() builder = x509.CertificateBuilder() # suppress cryptography warnings on service account email length with warnings.catch_warnings(): warnings.filterwarnings('ignore', message='.*Attribute\'s length.*') builder = builder.subject_name(x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, name, _validate=False)])) builder = builder.issuer_name(x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, name, _validate=False)])) # Gooogle seems to enforce the not before date strictly. Set the not before # date to be UTC two minutes ago which should cover any clock skew. now = datetime.datetime.utcnow() builder = builder.not_valid_before(now - datetime.timedelta(minutes=2)) # Google defaults to 12/31/9999 date for end time if there's no # policy to restrict key age if validityHours: expires = now + datetime.timedelta(hours=validityHours) - datetime.timedelta(minutes=2) builder = builder.not_valid_after(expires) else: builder = builder.not_valid_after(datetime.datetime(9999, 12, 31, 23, 59)) builder = builder.serial_number(x509.random_serial_number()) builder = builder.public_key(public_key) builder = builder.add_extension(x509.BasicConstraints(ca=False, path_length=None), critical=True) builder = builder.add_extension(x509.KeyUsage(key_cert_sign=False, crl_sign=False, digital_signature=True, content_commitment=False, key_encipherment=False, data_encipherment=False, key_agreement=False, encipher_only=False, decipher_only=False), critical=True) builder = builder.add_extension(x509.ExtendedKeyUsage([x509.oid.ExtendedKeyUsageOID.SERVER_AUTH]), critical=True) certificate = builder.sign(private_key=private_key, algorithm=hashes.SHA256(), backend=default_backend()) public_cert_pem = certificate.public_bytes(serialization.Encoding.PEM).decode() if projectId: printEntityMessage([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], Msg.DONE_GENERATING_PRIVATE_KEY_AND_PUBLIC_CERTIFICATE) else: writeStdout(Msg.DONE_GENERATING_PRIVATE_KEY_AND_PUBLIC_CERTIFICATE+'\n') if not b64enc_pub: return (private_pem, public_cert_pem) publicKeyData = base64.b64encode(public_cert_pem.encode()) if isinstance(publicKeyData, bytes): publicKeyData = publicKeyData.decode() return (private_pem, publicKeyData) def _formatOAuth2ServiceData(service_data): quotedEmail = quote(service_data.get('client_email', '')) service_data['auth_provider_x509_cert_url'] = API.GOOGLE_AUTH_PROVIDER_X509_CERT_URL service_data['auth_uri'] = API.GOOGLE_OAUTH2_ENDPOINT service_data['client_x509_cert_url'] = f'https://www.googleapis.com/robot/v1/metadata/x509/{quotedEmail}' service_data['token_uri'] = API.GOOGLE_OAUTH2_TOKEN_ENDPOINT service_data['type'] = 'service_account' GM.Globals[GM.OAUTH2SERVICE_JSON_DATA] = service_data.copy() return json.dumps(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA], indent=2, sort_keys=True) def doProcessSvcAcctKeys(mode=None, iam=None, projectId=None, clientEmail=None, clientId=None): def getSAKeyParms(body, new_data): nonlocal local_key_size, validityHours while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'algorithm': body['keyAlgorithm'] = getChoice(["key_alg_rsa_1024", "key_alg_rsa_2048"]).upper() local_key_size = 0 elif myarg == 'localkeysize': local_key_size = int(getChoice(['1024', '2048', '4096'])) elif myarg == 'validityhours': validityHours = getInteger() elif myarg == 'yubikey': new_data['key_type'] = 'yubikey' elif myarg == 'yubikeyslot': new_data['yubikey_slot'] = getString(Cmd.OB_STRING).upper() elif myarg == 'yubikeypin': new_data['yubikey_pin'] = readStdin('Enter your YubiKey PIN: ') elif myarg == 'yubikeyserialnumber': new_data['yubikey_serial_number'] = getInteger() else: unknownArgumentExit() local_key_size = 2048 validityHours = 0 body = {} if mode is None: mode = getChoice(['retainnone', 'retainexisting', 'replacecurrent']) if iam is None or mode == 'upload': if iam is None: _, iam = buildGAPIServiceObject(API.IAM, None) _getSvcAcctData() currentPrivateKeyId, projectId, clientEmail, clientId = _getSvcAcctKeyProjectClientFields() # dict() ensures we have a real copy, not pointer new_data = dict(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]) # assume default key type unless we are told otherwise new_data['key_type'] = 'default' getSAKeyParms(body, new_data) else: new_data = { 'client_email': clientEmail, 'project_id': projectId, 'client_id': clientId, 'key_type': 'default' } getSAKeyParms(body, new_data) name = f'projects/{projectId}/serviceAccounts/{clientId}' if mode != 'retainexisting': try: keys = callGAPIitems(iam.projects().serviceAccounts().keys(), 'list', 'keys', throwReasons=[GAPI.BAD_REQUEST, GAPI.PERMISSION_DENIED], name=name, keyTypes='USER_MANAGED') except GAPI.permissionDenied: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], Msg.UPDATE_PROJECT_TO_VIEW_MANAGE_SAKEYS) return False except GAPI.badRequest as e: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], str(e)) return False if new_data.get('key_type') == 'yubikey': # Use yubikey private key new_data['yubikey_key_type'] = f'RSA{local_key_size}' new_data.pop('private_key', None) yk = yubikey.YubiKey(new_data) if 'yubikey_serial_number' not in new_data: new_data['yubikey_serial_number'] = yk.get_serial_number() yk = yubikey.YubiKey(new_data) if 'yubikey_slot' not in new_data: new_data['yubikey_slot'] = 'AUTHENTICATION' publicKeyData = yk.get_certificate() elif local_key_size: # Generate private key locally, store in file new_data['private_key'], publicKeyData = _generatePrivateKeyAndPublicCert(projectId, clientEmail, name, local_key_size, validityHours=validityHours) new_data['key_type'] = 'default' for key in list(new_data): if key.startswith('yubikey_'): new_data.pop(key, None) if local_key_size: Act.Set(Act.UPLOAD) maxRetries = 10 printEntityMessage([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], Msg.UPLOADING_NEW_PUBLIC_CERTIFICATE_TO_GOOGLE) for retry in range(1, maxRetries+1): try: result = callGAPI(iam.projects().serviceAccounts().keys(), 'upload', throwReasons=[GAPI.NOT_FOUND, GAPI.BAD_REQUEST, GAPI.PERMISSION_DENIED, GAPI.FAILED_PRECONDITION], name=name, body={'publicKeyData': publicKeyData}) newPrivateKeyId = result['name'].rsplit('/', 1)[-1] break except GAPI.notFound as e: if retry == maxRetries: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], str(e)) return False _waitForSvcAcctCompletion(retry) except GAPI.permissionDenied: if retry == maxRetries: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], Msg.UPDATE_PROJECT_TO_VIEW_MANAGE_SAKEYS) return False _waitForSvcAcctCompletion(retry) except GAPI.badRequest as e: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], str(e)) return False except GAPI.failedPrecondition as e: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], str(e)) if 'iam.disableServiceAccountKeyUpload' not in str(e) and 'iam.managed.disableServiceAccountKeyUpload' not in str(e): return False if retry == maxRetries or mode != 'upload': sys.stdout.write(Msg.ENABLE_SERVICE_ACCOUNT_PRIVATE_KEY_UPLOAD.format(projectId)) new_data['private_key'] = '' newPrivateKeyId = '' break _waitForSvcAcctCompletion(retry) new_data['private_key_id'] = newPrivateKeyId oauth2service_data = _formatOAuth2ServiceData(new_data) else: Act.Set(Act.CREATE) maxRetries = 10 for retry in range(1, maxRetries+1): try: result = callGAPI(iam.projects().serviceAccounts().keys(), 'create', throwReasons=[GAPI.BAD_REQUEST, GAPI.PERMISSION_DENIED], name=name, body=body) newPrivateKeyId = result['name'].rsplit('/', 1)[-1] break except GAPI.permissionDenied: if retry == maxRetries: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], Msg.UPDATE_PROJECT_TO_VIEW_MANAGE_SAKEYS) return False _waitForSvcAcctCompletion(retry) except GAPI.badRequest as e: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], str(e)) return False oauth2service_data = base64.b64decode(result['privateKeyData']).decode(UTF8) if newPrivateKeyId != '': entityActionPerformed([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail, Ent.SVCACCT_KEY, newPrivateKeyId]) if GM.Globals[GM.SVCACCT_SCOPES_DEFINED]: try: GM.Globals[GM.OAUTH2SERVICE_JSON_DATA] = json.loads(oauth2service_data) except (IndexError, KeyError, SyntaxError, TypeError, ValueError) as e: invalidOauth2serviceJsonExit(str(e)) GM.Globals[GM.OAUTH2SERVICE_JSON_DATA][API.OAUTH2SA_SCOPES] = GM.Globals[GM.SVCACCT_SCOPES] oauth2service_data = json.dumps(GM.Globals[GM.OAUTH2SERVICE_JSON_DATA], ensure_ascii=False, sort_keys=True, indent=2) writeFile(GC.Values[GC.OAUTH2SERVICE_JSON], oauth2service_data, continueOnError=False) Act.Set(Act.UPDATE) entityActionPerformed([Ent.OAUTH2SERVICE_JSON_FILE, GC.Values[GC.OAUTH2SERVICE_JSON], Ent.SVCACCT_KEY, newPrivateKeyId]) if mode in {'retainexisting', 'upload'}: return newPrivateKeyId != '' Act.Set(Act.REVOKE) count = len(keys) if mode == 'retainnone' else 1 entityPerformActionNumItems([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], count, Ent.SVCACCT_KEY) Ind.Increment() i = 0 for key in keys: keyName = key['name'].rsplit('/', 1)[-1] if mode == 'retainnone' or keyName == currentPrivateKeyId and keyName != newPrivateKeyId: i += 1 maxRetries = 5 for retry in range(1, maxRetries+1): try: callGAPI(iam.projects().serviceAccounts().keys(), 'delete', throwReasons=[GAPI.BAD_REQUEST, GAPI.PERMISSION_DENIED], name=key['name']) entityActionPerformed([Ent.SVCACCT_KEY, keyName], i, count) break except GAPI.permissionDenied: if retry == maxRetries: entityActionFailedWarning([Ent.SVCACCT_KEY, keyName], Msg.UPDATE_PROJECT_TO_VIEW_MANAGE_SAKEYS) break _waitForSvcAcctCompletion(retry) except GAPI.badRequest as e: entityActionFailedWarning([Ent.SVCACCT_KEY, keyName], str(e), i, count) break if mode != 'retainnone': break Ind.Decrement() return True # gam create sakey|sakeys # gam rotate sakey|sakeys retain_existing # (algorithm KEY_ALG_RSA_1024|KEY_ALG_RSA_2048)| # (localkeysize 1024|2048|4096 [validityhours ])| # (yubikey yubikey_pin yubikey_slot AUTHENTICATION yubikey_serialnumber ) def doCreateSvcAcctKeys(): doProcessSvcAcctKeys(mode='retainexisting') # gam update sakey|sakeys # gam rotate sakey|sakeys replace_current # (algorithm KEY_ALG_RSA_1024|KEY_ALG_RSA_2048)| # (localkeysize 1024|2048|4096 [validityhours ])| # (yubikey yubikey_pin yubikey_slot AUTHENTICATION yubikey_serialnumber ) def doUpdateSvcAcctKeys(): doProcessSvcAcctKeys(mode='replacecurrent') # gam replace sakey|sakeys # gam rotate sakey|sakeys retain_none # (algorithm KEY_ALG_RSA_1024|KEY_ALG_RSA_2048)| # (localkeysize 1024|2048|4096 [validityhours ])| # (yubikey yubikey_pin yubikey_slot AUTHENTICATION yubikey_serialnumber ) def doReplaceSvcAcctKeys(): doProcessSvcAcctKeys(mode='retainnone') # gam upload sakey|sakeys [admin ] # (algorithm KEY_ALG_RSA_1024|KEY_ALG_RSA_2048)| # (localkeysize 1024|2048|4096 [validityhours ])| # (yubikey yubikey_pin yubikey_slot AUTHENTICATION yubikey_serialnumber ) def doUploadSvcAcctKeys(): login_hint = getEmailAddress(noUid=True) if checkArgumentPresent(['admin']) else None httpObj, _ = getCRMService(login_hint) iam = getAPIService(API.IAM, httpObj) if doProcessSvcAcctKeys(mode='upload', iam=iam): sa_email = GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['client_email'] _grantRotateRights(iam, GM.Globals[GM.OAUTH2SERVICE_JSON_DATA]['project_id'], sa_email) sys.stdout.write(Msg.YOUR_GAM_PROJECT_IS_CREATED_AND_READY_TO_USE) # gam delete sakeys def doDeleteSvcAcctKeys(): _, iam = buildGAPIServiceObject(API.IAM, None) doit = False keyList = [] while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'doit': doit = True else: Cmd.Backup() keyList.extend(getString(Cmd.OB_SERVICE_ACCOUNT_KEY_LIST, minLen=0).strip().replace(',', ' ').split()) currentPrivateKeyId, projectId, clientEmail, clientId = _getSvcAcctKeyProjectClientFields() name = f'projects/{projectId}/serviceAccounts/{clientId}' try: keys = callGAPIitems(iam.projects().serviceAccounts().keys(), 'list', 'keys', throwReasons=[GAPI.BAD_REQUEST, GAPI.PERMISSION_DENIED], name=name, keyTypes='USER_MANAGED') except GAPI.permissionDenied: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], Msg.UPDATE_PROJECT_TO_VIEW_MANAGE_SAKEYS) return except GAPI.badRequest as e: entityActionFailedWarning([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], str(e)) return Act.Set(Act.REVOKE) count = len(keyList) entityPerformActionNumItems([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], count, Ent.SVCACCT_KEY) Ind.Increment() i = 0 for dkeyName in keyList: i += 1 for key in keys: keyName = key['name'].rsplit('/', 1)[-1] if keyName == dkeyName: if keyName == currentPrivateKeyId and not doit: entityActionNotPerformedWarning([Ent.SVCACCT_KEY, keyName], Msg.USE_DOIT_ARGUMENT_TO_PERFORM_ACTION+Msg.ON_CURRENT_PRIVATE_KEY, i, count) break try: callGAPI(iam.projects().serviceAccounts().keys(), 'delete', throwReasons=[GAPI.BAD_REQUEST, GAPI.PERMISSION_DENIED], name=key['name']) entityActionPerformed([Ent.SVCACCT_KEY, keyName], i, count) except GAPI.permissionDenied: entityActionFailedWarning([Ent.SVCACCT_KEY, keyName], Msg.UPDATE_PROJECT_TO_VIEW_MANAGE_SAKEYS) except GAPI.badRequest as e: entityActionFailedWarning([Ent.SVCACCT_KEY, keyName], str(e), i, count) break else: entityActionNotPerformedWarning([Ent.SVCACCT_KEY, dkeyName], Msg.NOT_FOUND, i, count) Ind.Decrement() # gam show sakeys [all|system|user] def doShowSvcAcctKeys(): _, iam = buildGAPIServiceObject(API.IAM, None) keyTypes = None while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg in SVCACCT_KEY_TYPE_CHOICE_MAP: keyTypes = SVCACCT_KEY_TYPE_CHOICE_MAP[myarg] else: unknownArgumentExit() currentPrivateKeyId, projectId, clientEmail, clientId = _getSvcAcctKeyProjectClientFields() name = f'projects/{projectId}/serviceAccounts/{clientId}' status, keys = _getSAKeys(iam, projectId, clientEmail, name, keyTypes) if not status: return count = len(keys) entityPerformActionNumItems([Ent.PROJECT, projectId, Ent.SVCACCT, clientEmail], count, Ent.SVCACCT_KEY) if count > 0: _showSAKeys(keys, count, currentPrivateKeyId) # gam create gcpserviceaccount|signjwtserviceaccount def doCreateGCPServiceAccount(): checkForExtraneousArguments() _checkForExistingProjectFiles([GC.Values[GC.OAUTH2SERVICE_JSON]]) sa_info = {'key_type': 'signjwt', 'token_uri': API.GOOGLE_OAUTH2_TOKEN_ENDPOINT, 'type': 'service_account'} request = getTLSv1_2Request() try: credentials, sa_info['project_id'] = google.auth.default(scopes=[API.IAM_SCOPE], request=request) except (google.auth.exceptions.DefaultCredentialsError, google.auth.exceptions.RefreshError) as e: systemErrorExit(API_ACCESS_DENIED_RC, str(e)) credentials.refresh(request) sa_info['client_email'] = credentials.service_account_email oa2 = buildGAPIObjectNoAuthentication(API.OAUTH2) try: token_info = callGAPI(oa2, 'tokeninfo', throwReasons=[GAPI.INVALID], access_token=credentials.token) except GAPI.invalid as e: systemErrorExit(API_ACCESS_DENIED_RC, str(e)) sa_info['client_id'] = token_info['issued_to'] sa_output = json.dumps(sa_info, ensure_ascii=False, sort_keys=True, indent=2) writeStdout(f'Writing SignJWT service account data:\n\n{sa_output}\n') writeFile(GC.Values[GC.OAUTH2SERVICE_JSON], sa_output, continueOnError=False) # Audit command utilities def getAuditParameters(emailAddressRequired=True, requestIdRequired=True, destUserRequired=False): auditObject = getEmailAuditObject() emailAddress = getEmailAddress(noUid=True, optional=not emailAddressRequired) parameters = {} if emailAddress: parameters['auditUser'] = emailAddress parameters['auditUserName'], auditObject.domain = splitEmailAddress(emailAddress) if requestIdRequired: parameters['requestId'] = getString(Cmd.OB_REQUEST_ID) if destUserRequired: destEmailAddress = getEmailAddress(noUid=True) parameters['auditDestUser'] = destEmailAddress parameters['auditDestUserName'], destDomain = splitEmailAddress(destEmailAddress) if auditObject.domain != destDomain: Cmd.Backup() invalidArgumentExit(f'{parameters["auditDestUserName"]}@{auditObject.domain}') return (auditObject, parameters) # Audit monitor command utilities def _showMailboxMonitorRequestStatus(request, i=0, count=0): printKeyValueListWithCount(['Destination', normalizeEmailAddressOrUID(request['destUserName'])], i, count) Ind.Increment() printKeyValueList(['Begin', request.get('beginDate', 'immediately')]) printKeyValueList(['End', request['endDate']]) printKeyValueList(['Monitor Incoming', request['outgoingEmailMonitorLevel']]) printKeyValueList(['Monitor Outgoing', request['incomingEmailMonitorLevel']]) printKeyValueList(['Monitor Chats', request.get('chatMonitorLevel', 'NONE')]) printKeyValueList(['Monitor Drafts', request.get('draftMonitorLevel', 'NONE')]) Ind.Decrement() # gam audit monitor create [begin ] [end ] [incoming_headers] [outgoing_headers] [nochats] [nodrafts] [chat_headers] [draft_headers] def doCreateMonitor(): auditObject, parameters = getAuditParameters(emailAddressRequired=True, requestIdRequired=False, destUserRequired=True) #end_date defaults to 30 days in the future... end_date = (GM.Globals[GM.DATETIME_NOW]+datetime.timedelta(days=30)).strftime(YYYYMMDD_HHMM_FORMAT) begin_date = None incoming_headers_only = outgoing_headers_only = drafts_headers_only = chats_headers_only = False drafts = chats = True while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'begin': begin_date = getYYYYMMDD_HHMM() elif myarg == 'end': end_date = getYYYYMMDD_HHMM() elif myarg == 'incomingheaders': incoming_headers_only = True elif myarg == 'outgoingheaders': outgoing_headers_only = True elif myarg == 'nochats': chats = False elif myarg == 'nodrafts': drafts = False elif myarg == 'chatheaders': chats_headers_only = True elif myarg == 'draftheaders': drafts_headers_only = True else: unknownArgumentExit() try: request = callGData(auditObject, 'createEmailMonitor', throwErrors=[GDATA.INVALID_VALUE, GDATA.INVALID_INPUT, GDATA.DOES_NOT_EXIST, GDATA.INVALID_DOMAIN], source_user=parameters['auditUserName'], destination_user=parameters['auditDestUserName'], end_date=end_date, begin_date=begin_date, incoming_headers_only=incoming_headers_only, outgoing_headers_only=outgoing_headers_only, drafts=drafts, drafts_headers_only=drafts_headers_only, chats=chats, chats_headers_only=chats_headers_only) entityActionPerformed([Ent.USER, parameters['auditUser'], Ent.AUDIT_MONITOR_REQUEST, None]) Ind.Increment() _showMailboxMonitorRequestStatus(request) Ind.Decrement() except (GDATA.invalidValue, GDATA.invalidInput) as e: entityActionFailedWarning([Ent.USER, parameters['auditUser'], Ent.AUDIT_MONITOR_REQUEST, None], str(e)) except (GDATA.doesNotExist, GDATA.invalidDomain) as e: if str(e).find(parameters['auditUser']) != -1: entityUnknownWarning(Ent.USER, parameters['auditUser']) else: entityActionFailedWarning([Ent.USER, parameters['auditUser'], Ent.AUDIT_MONITOR_REQUEST, None], str(e)) # gam audit monitor delete def doDeleteMonitor(): auditObject, parameters = getAuditParameters(emailAddressRequired=True, requestIdRequired=False, destUserRequired=True) checkForExtraneousArguments() try: callGData(auditObject, 'deleteEmailMonitor', throwErrors=[GDATA.INVALID_INPUT, GDATA.DOES_NOT_EXIST, GDATA.INVALID_DOMAIN], source_user=parameters['auditUserName'], destination_user=parameters['auditDestUserName']) entityActionPerformed([Ent.USER, parameters['auditUser'], Ent.AUDIT_MONITOR_REQUEST, parameters['auditDestUser']]) except GDATA.invalidInput as e: entityActionFailedWarning([Ent.USER, parameters['auditUser'], Ent.AUDIT_MONITOR_REQUEST, None], str(e)) except (GDATA.doesNotExist, GDATA.invalidDomain) as e: if str(e).find(parameters['auditUser']) != -1: entityUnknownWarning(Ent.USER, parameters['auditUser']) else: entityActionFailedWarning([Ent.USER, parameters['auditUser'], Ent.AUDIT_MONITOR_REQUEST, None], str(e)) # gam audit monitor list def doShowMonitors(): auditObject, parameters = getAuditParameters(emailAddressRequired=True, requestIdRequired=False, destUserRequired=False) checkForExtraneousArguments() try: results = callGData(auditObject, 'getEmailMonitors', throwErrors=[GDATA.DOES_NOT_EXIST, GDATA.INVALID_DOMAIN], user=parameters['auditUserName']) jcount = len(results) if (results) else 0 entityPerformActionNumItems([Ent.USER, parameters['auditUser']], jcount, Ent.AUDIT_MONITOR_REQUEST) if jcount == 0: setSysExitRC(NO_ENTITIES_FOUND_RC) return Ind.Increment() j = 0 for request in results: j += 1 _showMailboxMonitorRequestStatus(request, j, jcount) Ind.Decrement() except (GDATA.doesNotExist, GDATA.invalidDomain): entityUnknownWarning(Ent.USER, parameters['auditUser']) # gam whatis [noinfo] [noinvitablecheck] def doWhatIs(): def _showPrimaryType(entityType, email): printEntity([entityType, email]) def _showAliasType(entityType, email, primaryEntityType, primaryEmail): printEntity([entityType, email, primaryEntityType, primaryEmail]) cd = buildGAPIObject(API.DIRECTORY) email = getEmailAddress() showInfo = invitableCheck = True while Cmd.ArgumentsRemaining(): myarg = getArgument() if myarg == 'noinfo': showInfo = False elif myarg == 'noinvitablecheck': invitableCheck = False else: unknownArgumentExit() try: result = callGAPI(cd.users(), 'get', throwReasons=GAPI.USER_GET_THROW_REASONS, userKey=email, fields='id,primaryEmail') if (result['primaryEmail'].lower() == email) or (result['id'] == email): if showInfo: infoUsers(entityList=[email]) else: _showPrimaryType(Ent.USER, email) setSysExitRC(ENTITY_IS_A_USER_RC) else: if showInfo: infoAliases(entityList=[email]) else: _showAliasType(Ent.USER_ALIAS, email, Ent.USER, result['primaryEmail']) setSysExitRC(ENTITY_IS_A_USER_ALIAS_RC) return except (GAPI.userNotFound, GAPI.badRequest): pass except (GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.forbidden, GAPI.backendError, GAPI.systemError): entityUnknownWarning(Ent.EMAIL, email) setSysExitRC(ENTITY_IS_UKNOWN_RC) return try: result = callGAPI(cd.groups(), 'get', throwReasons=GAPI.GROUP_GET_THROW_REASONS, groupKey=email, fields='id,email') if (result['email'].lower() == email) or (result['id'] == email): if showInfo: infoGroups([email]) else: _showPrimaryType(Ent.GROUP, email) setSysExitRC(ENTITY_IS_A_GROUP_RC) else: if showInfo: infoAliases(entityList=[email]) else: _showAliasType(Ent.GROUP_ALIAS, email, Ent.GROUP, result['email']) setSysExitRC(ENTITY_IS_A_GROUP_ALIAS_RC) return except (GAPI.groupNotFound, GAPI.forbidden): pass except (GAPI.domainNotFound, GAPI.domainCannotUseApis, GAPI.badRequest): entityUnknownWarning(Ent.EMAIL, email) setSysExitRC(ENTITY_IS_UKNOWN_RC) return if not invitableCheck or not getSvcAcctCredentials(API.CLOUDIDENTITY_USERINVITATIONS, _getAdminEmail(), softErrors=True): isInvitableUser = False else: isInvitableUser, ci = _getIsInvitableUser(None, email) if isInvitableUser: if showInfo: name, user, ci = _getCIUserInvitationsEntity(ci, email) infoCIUserInvitations(name, user, ci, None) else: _showPrimaryType(Ent.USER_INVITATION, email) setSysExitRC(ENTITY_IS_AN_UNMANAGED_ACCOUNT_RC) else: entityUnknownWarning(Ent.EMAIL, email) setSysExitRC(ENTITY_IS_UKNOWN_RC) def _adjustTryDate(errMsg, numDateChanges, limitDateChanges, prevTryDate): match_date = re.match('Data for dates later than (.*) is not yet available. Please check back later', errMsg) if match_date: tryDate = match_date.group(1) else: match_date = re.match('Start date can not be later than (.*)', errMsg) if match_date: tryDate = match_date.group(1) else: match_date = re.match('End date greater than LastReportedDate.', errMsg) if match_date: tryDateTime = datetime.datetime.strptime(prevTryDate, YYYYMMDD_FORMAT)-datetime.timedelta(days=1) tryDate = tryDateTime.strftime(YYYYMMDD_FORMAT) if (not match_date) or (numDateChanges > limitDateChanges >= 0): printWarningMessage(DATA_NOT_AVALIABLE_RC, errMsg) return None return tryDate def _checkDataRequiredServices(result, tryDate, dataRequiredServices, parameterServices=None, checkUserEmail=False): # -1: Data not available: # 0: Backup to earlier date # 1: Data available oneDay = datetime.timedelta(days=1) dataWarnings = result.get('warnings', []) usageReports = result.get('usageReports', []) # move to day before if we don't have at least one usageReport with parameters if not usageReports or not usageReports[0].get('parameters', []): tryDateTime = datetime.datetime.strptime(tryDate, YYYYMMDD_FORMAT)-oneDay return (0, tryDateTime.strftime(YYYYMMDD_FORMAT), None) for warning in dataWarnings: if warning['code'] == 'PARTIAL_DATA_AVAILABLE': for app in warning['data']: if app['key'] == 'application' and app['value'] != 'docs' and app['value'] in dataRequiredServices: tryDateTime = datetime.datetime.strptime(tryDate, YYYYMMDD_FORMAT)-oneDay return (0, tryDateTime.strftime(YYYYMMDD_FORMAT), None) elif warning['code'] == 'DATA_NOT_AVAILABLE': for app in warning['data']: if app['key'] == 'application' and app['value'] != 'docs' and app['value'] in dataRequiredServices: return (-1, tryDate, None) if parameterServices: requiredServices = parameterServices.copy() for item in usageReports[0].get('parameters', []): if 'name' not in item: continue service, _ = item['name'].split(':', 1) if service in requiredServices: requiredServices.remove(service) if not requiredServices: break else: tryDateTime = datetime.datetime.strptime(tryDate, YYYYMMDD_FORMAT)-oneDay return (0, tryDateTime.strftime(YYYYMMDD_FORMAT), None) if checkUserEmail: if 'entity' not in usageReports[0] or 'userEmail' not in usageReports[0]['entity']: tryDateTime = datetime.datetime.strptime(tryDate, YYYYMMDD_FORMAT)-oneDay return (0, tryDateTime.strftime(YYYYMMDD_FORMAT), None) return (1, tryDate, usageReports) CUSTOMER_REPORT_SERVICES = { 'accounts', 'app_maker', 'apps_scripts', 'calendar', 'chat', 'classroom', 'cros', 'device_management', 'docs', 'drive', 'gmail', 'gplus', 'meet', 'sites', } USER_REPORT_SERVICES = { 'accounts', 'chat', 'classroom', 'docs', 'drive', 'gmail', 'gplus', } CUSTOMER_USER_CHOICES = {'customer', 'user'} # gam report usageparameters customer|user [todrive *] def doReportUsageParameters(): report = getChoice(CUSTOMER_USER_CHOICES) csvPF = CSVPrintFile(['parameter'], 'sortall') getTodriveOnly(csvPF) rep = buildGAPIObject(API.REPORTS) if report == 'customer': service = rep.customerUsageReports() dataRequiredServices = CUSTOMER_REPORT_SERVICES kwargs = {} else: # 'user' service = rep.userUsageReport() dataRequiredServices = USER_REPORT_SERVICES kwargs = {'userKey': _getAdminEmail()} customerId = GC.Values[GC.CUSTOMER_ID] if customerId == GC.MY_CUSTOMER: customerId = None tryDate = todaysDate().strftime(YYYYMMDD_FORMAT) allParameters = set() while True: try: result = callGAPI(service, 'get', throwReasons=[GAPI.INVALID, GAPI.BAD_REQUEST], date=tryDate, customerId=customerId, fields='warnings,usageReports(parameters(name))', **kwargs) fullData, tryDate, usageReports = _checkDataRequiredServices(result, tryDate, dataRequiredServices) if fullData < 0: printWarningMessage(DATA_NOT_AVALIABLE_RC, Msg.NO_USAGE_PARAMETERS_DATA_AVAILABLE) return if usageReports: for parameter in usageReports[0]['parameters']: name = parameter.get('name') if name: allParameters.add(name) if fullData == 1: break except GAPI.badRequest: printErrorMessage(BAD_REQUEST_RC, Msg.BAD_REQUEST) return except GAPI.invalid as e: tryDate = _adjustTryDate(str(e), 0, -1, tryDate) if not tryDate: break for parameter in sorted(allParameters): csvPF.WriteRow({'parameter': parameter}) csvPF.writeCSVfile(f'{report.capitalize()} Report Usage Parameters') def getUserOrgUnits(cd, orgUnit, orgUnitId): try: if orgUnit == orgUnitId: orgUnit = callGAPI(cd.orgunits(), 'get', throwReasons=GAPI.ORGUNIT_GET_THROW_REASONS, customerId=GC.Values[GC.CUSTOMER_ID], orgUnitPath=orgUnit, fields='orgUnitPath')['orgUnitPath'] printGettingAllEntityItemsForWhom(Ent.USER, orgUnit, qualifier=Msg.IN_THE.format(Ent.Singular(Ent.ORGANIZATIONAL_UNIT)), entityType=Ent.ORGANIZATIONAL_UNIT) result = callGAPIpages(cd.users(), 'list', 'users', pageMessage=getPageMessageForWhom(), throwReasons=[GAPI.INVALID_ORGUNIT, GAPI.ORGUNIT_NOT_FOUND, GAPI.INVALID_INPUT, GAPI.BAD_REQUEST, GAPI.RESOURCE_NOT_FOUND, GAPI.FORBIDDEN], customer=GC.Values[GC.CUSTOMER_ID], query=orgUnitPathQuery(orgUnit, None), orderBy='email', fields='nextPageToken,users(primaryEmail,orgUnitPath)', maxResults=GC.Values[GC.USER_MAX_RESULTS]) userOrgUnits = {} for user in result: userOrgUnits[user['primaryEmail']] = user['orgUnitPath'] return userOrgUnits except (GAPI.badRequest, GAPI.invalidInput, GAPI.invalidOrgunit, GAPI.orgunitNotFound, GAPI.backendError, GAPI.invalidCustomerId, GAPI.loginRequired, GAPI.resourceNotFound, GAPI.forbidden): checkEntityDNEorAccessErrorExit(cd, Ent.ORGANIZATIONAL_UNIT, orgUnit) # Convert report mb item to gb def convertReportMBtoGB(name, item): if item is not None: item['intValue'] = f"{int(item['intValue'])/1024:.2f}" return name.replace('_in_mb', '_in_gb') REPORTS_PARAMETERS_SIMPLE_TYPES = ['intValue', 'boolValue', 'datetimeValue', 'stringValue'] # gam report usage user [todrive *] # [(user all|)|(orgunit|org|ou [showorgunit])|(select )] # [([start|startdate ] [end|enddate ])|(range )| # thismonth|(previousmonths )] # [fields|parameters )] # [convertmbtogb] # gam report usage customer [todrive *] # [([start|startdate ] [end|enddate ])|(range )| # thismonth|(previousmonths )] # [fields|parameters )] # [convertmbtogb] def doReportUsage(): def usageEntitySelectors(): selectorChoices = Cmd.USER_ENTITY_SELECTORS+Cmd.USER_CSVDATA_ENTITY_SELECTORS if GC.Values[GC.USER_SERVICE_ACCOUNT_ACCESS_ONLY]: selectorChoices += Cmd.SERVICE_ACCOUNT_ONLY_ENTITY_SELECTORS[:]+[Cmd.ENTITY_USER, Cmd.ENTITY_USERS] else: selectorChoices += Cmd.BASE_ENTITY_SELECTORS[:]+Cmd.USER_ENTITIES[:] return selectorChoices def validateYYYYMMDD(argstr): if argstr in TODAY_NOW or argstr[0] in PLUS_MINUS: if argstr == 'NOW': argstr = 'TODAY' deltaDate = getDelta(argstr, DELTA_DATE_PATTERN) if deltaDate is None: Cmd.Backup() invalidArgumentExit(DELTA_DATE_FORMAT_REQUIRED) return deltaDate try: argDate = datetime.datetime.strptime(argstr, YYYYMMDD_FORMAT) return datetime.datetime(argDate.year, argDate.month, argDate.day, tzinfo=GC.Values[GC.TIMEZONE]) except ValueError: Cmd.Backup() invalidArgumentExit(YYYYMMDD_FORMAT_REQUIRED) report = getChoice(CUSTOMER_USER_CHOICES) rep = buildGAPIObject(API.REPORTS) titles = ['date'] if report == 'customer': fullDataServices = CUSTOMER_REPORT_SERVICES userReports = False service = rep.customerUsageReports() kwargs = [{}] else: # 'user' fullDataServices = USER_REPORT_SERVICES userReports = True service = rep.userUsageReport() kwargs = [{'userKey': 'all'}] titles.append('user') csvPF = CSVPrintFile() customerId = GC.Values[GC.CUSTOMER_ID] if customerId == GC.MY_CUSTOMER: customerId = None parameters = set() convertMbToGb = select = showOrgUnit = False userKey = 'all' cd = orgUnit = orgUnitId = None userOrgUnits = {} startEndTime = StartEndTime('startdate', 'enddate', 'date') skipDayNumbers = [] skipDates = set() oneDay = datetime.timedelta(days=1) while Cmd.ArgumentsRemaining(): myarg = getArgument() if csvPF and myarg == 'todrive': csvPF.GetTodriveParameters() elif myarg in {'start', 'startdate', 'end', 'enddate', 'range', 'thismonth', 'previousmonths'}: startEndTime.Get(myarg) elif userReports and myarg in {'ou', 'org', 'orgunit'}: if cd is None: cd = buildGAPIObject(API.DIRECTORY) orgUnit, orgUnitId = getOrgUnitId(cd) select = False elif userReports and myarg == 'showorgunit': showOrgUnit = True elif myarg in {'fields', 'parameters'}: for field in getString(Cmd.OB_STRING).replace(',', ' ').split(): if ':' in field: repsvc, _ = field.split(':', 1) if repsvc in fullDataServices: parameters.add(field) else: invalidChoiceExit(repsvc, fullDataServices, True) else: Cmd.Backup() invalidArgumentExit('service:parameter') elif myarg == 'skipdates': for skip in getString(Cmd.OB_STRING).upper().split(','): if skip.find(':') == -1: skipDates.add(validateYYYYMMDD(skip)) else: skipStart, skipEnd = skip.split(':', 1) skipStartDate = validateYYYYMMDD(skipStart) skipEndDate = validateYYYYMMDD(skipEnd) if skipEndDate < skipStartDate: Cmd.Backup() usageErrorExit(Msg.INVALID_DATE_TIME_RANGE.format(myarg, skipEnd, myarg, skipStart)) while skipStartDate <= skipEndDate: skipDates.add(skipStartDate) skipStartDate += oneDay elif myarg == 'skipdaysofweek': skipdaynames = getString(Cmd.OB_STRING).split(',') dow = [d.lower() for d in calendarlib.day_abbr] skipDayNumbers = [dow.index(d) for d in skipdaynames if d in dow] elif userReports and myarg == 'user': userKey = getString(Cmd.OB_EMAIL_ADDRESS) orgUnit = orgUnitId = None select = False elif userReports and (myarg == 'select' or myarg in usageEntitySelectors()): if myarg != 'select': Cmd.Backup() _, users = getEntityToModify(defaultEntityType=Cmd.ENTITY_USERS) orgUnit = orgUnitId = None select = True elif myarg == 'convertmbtogb': convertMbToGb = True else: unknownArgumentExit() if startEndTime.endDateTime is None: startEndTime.endDateTime = todaysDate() if startEndTime.startDateTime is None: startEndTime.startDateTime = startEndTime.endDateTime+datetime.timedelta(days=-30) startDateTime = startEndTime.startDateTime startDate = startDateTime.strftime(YYYYMMDD_FORMAT) endDateTime = startEndTime.endDateTime endDate = endDateTime.strftime(YYYYMMDD_FORMAT) startUseDate = endUseDate = None if not orgUnitId: showOrgUnit = False if userReports: if select: Ent.SetGetting(Ent.REPORT) kwargs = [{'userKey': normalizeEmailAddressOrUID(user)} for user in users] elif userKey == 'all': if orgUnitId: kwargs[0]['orgUnitID'] = orgUnitId userOrgUnits = getUserOrgUnits(cd, orgUnit, orgUnitId) forWhom = f'users in orgUnit {orgUnit}' else: forWhom = 'all users' printGettingEntityItemForWhom(Ent.REPORT, forWhom) else: Ent.SetGetting(Ent.REPORT) kwargs = [{'userKey': normalizeEmailAddressOrUID(userKey)}] printGettingEntityItemForWhom(Ent.REPORT, kwargs[0]['userKey']) if showOrgUnit: titles.append('orgUnitPath') else: pageMessage = None csvPF.SetTitles(titles) csvPF.SetSortAllTitles() parameters = ','.join(parameters) if parameters else None while startDateTime <= endDateTime: if startDateTime.weekday() in skipDayNumbers or startDateTime in skipDates: startDateTime += oneDay continue useDate = startDateTime.strftime(YYYYMMDD_FORMAT) startDateTime += oneDay try: for kwarg in kwargs: if userReports: if not select and userKey == 'all': pageMessage = getPageMessageForWhom(forWhom, showDate=useDate) else: pageMessage = getPageMessageForWhom(kwarg['userKey'], showDate=useDate) try: usage = callGAPIpages(service, 'get', 'usageReports', pageMessage=pageMessage, throwReasons=[GAPI.INVALID, GAPI.INVALID_INPUT, GAPI.BAD_REQUEST, GAPI.FORBIDDEN], retryReasons=GAPI.SERVICE_NOT_AVAILABLE_RETRY_REASONS, customerId=customerId, date=useDate, parameters=parameters, **kwarg) except GAPI.badRequest: continue for entity in usage: row = {'date': useDate} if userReports: if 'userEmail' in entity['entity']: row['user'] = entity['entity']['userEmail'] if showOrgUnit: row['orgUnitPath'] = userOrgUnits.get(row['user'], UNKNOWN) else: row['user'] = UNKNOWN for item in entity.get('parameters', []): if 'name' not in item: continue name = item['name'] if name == 'cros:device_version_distribution': versions = {} for version in item['msgValue']: versions[version['version_number']] = version['num_devices'] for k, v in sorted(iter(versions.items()), reverse=True): title = f'cros:num_devices_chrome_{k}' row[title] = v else: for ptype in REPORTS_PARAMETERS_SIMPLE_TYPES: if ptype in item: if ptype != 'datetimeValue': if convertMbToGb and name.endswith('_in_mb'): name = convertReportMBtoGB(name, item) row[name] = item[ptype] else: row[name] = formatLocalTime(item[ptype]) break else: row[name] = '' if not startUseDate: startUseDate = useDate endUseDate = useDate csvPF.WriteRowTitles(row) except GAPI.invalid as e: stderrWarningMsg(str(e)) break except GAPI.invalidInput as e: systemErrorExit(GOOGLE_API_ERROR_RC, str(e)) except GAPI.forbidden as e: accessErrorExit(None, str(e)) if startUseDate: reportName = f'{report.capitalize()} Usage Report - {startUseDate}:{endUseDate}' else: reportName = f'{report.capitalize()} Usage Report - {startDate}:{endDate} - No Data' csvPF.writeCSVfile(reportName) NL_SPACES_PATTERN = re.compile(r'\n +') DISABLED_REASON_TIME_PATTERN = re.compile(r'.*(\d{4}/\d{2}/\d{2}-\d{2}:\d{2}:\d{2})') REPORT_CHOICE_MAP = { 'access': 'access_transparency', 'accesstransparency': 'access_transparency', 'admin': 'admin', 'calendar': 'calendar', 'calendars': 'calendar', 'chat': 'chat', 'chrome': 'chrome', 'contextawareaccess': 'context_aware_access', 'customer': 'customer', 'customers': 'customer', 'datastudio': 'data_studio', 'devices': 'mobile', 'doc': 'drive', 'docs': 'drive', 'domain': 'customer', 'drive': 'drive', 'enterprisegroups': 'groups_enterprise', 'gcp': 'gcp', 'gemini': 'gemini_for_workspace', 'geminiforworkspace': 'gemini_for_workspace', 'gplus': 'gplus', 'google+': 'gplus', 'group': 'groups', 'groups': 'groups', 'groupsenterprise': 'groups_enterprise', 'hangoutsmeet': 'meet', 'jamboard': 'jamboard', 'keep': 'keep', 'login': 'login', 'logins': 'login', 'lookerstudio': 'data_studio', 'meet': 'meet', 'mobile': 'mobile', 'oauthtoken': 'token', 'rules': 'rules', 'saml': 'saml', 'token': 'token', 'tokens': 'token', 'usage': 'usage', 'usageparameters': 'usageparameters', 'user': 'user', 'users': 'user', 'useraccounts': 'user_accounts', 'vault': 'vault', } REPORT_ACTIVITIES_UPPERCASE_EVENTS = { 'access_transparency', 'admin', 'chrome', 'context_aware_access', 'data_studio', 'gcp', 'jamboard', 'mobile' } REPORT_ACTIVITIES_TIME_OBJECTS = {'time'} # gam report [todrive *] # [(user all|)|(orgunit|org|ou [showorgunit])|(select )] # [([start