Commit 1d8d6161 authored by Rob Carleski's avatar Rob Carleski 🇮🇸
Browse files

Abandon using root logger.

parent e8119e72
......@@ -21,6 +21,7 @@ class SharedAccount:
self.__dict__.update(args)
self.config = config
self.create_extra_attr()
self.logger = logging.getLogger(__name__)
def create_box(self):
try:
......@@ -29,15 +30,15 @@ class SharedAccount:
)
boxClient = boxsdk.Client(auth)
except IOError as e:
logging.error(e, extra={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
exit(2)
except boxsdk.exception.BoxAPIException as e:
logging.error(e, extra={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
exit(2)
try:
if len(self.full_name) > 50:
logging.warning(
self.logger.warning(
'Full name attribute for user exceeds limit. Truncating.',
extra={'entity': self.account}
)
......@@ -99,7 +100,7 @@ Accounts section of the M+Box support site.
CollaborationRole.CO_OWNER
)
except boxsdk.exception.BoxAPIException as e:
logging.warning(
self.logger.warning(
'Unable to add {} as Box collaborator'.format(
_
),
......@@ -107,7 +108,7 @@ Accounts section of the M+Box support site.
)
break
except boxsdk.exception.BoxAPIException as e:
logging.error(e, extra={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
def upload_box_info_file(self):
try:
......@@ -132,9 +133,9 @@ http://its.umich.edu/help/'''.format(self.full_name, self.email)
self.shared_box_folder.upload(readme)
os.remove(readme)
except IOError as e:
logging.error(e, extra={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
except boxsdk.exception.BoxAPIException as e:
logging.error(e, extra={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
def create_google(self):
try:
......@@ -153,7 +154,7 @@ http://its.umich.edu/help/'''.format(self.full_name, self.email)
'Department accounts'
])
except CalledProcessError as e:
logging.error(e, extra={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
self.google_text = '''
Google-Specific Information:
......@@ -174,20 +175,20 @@ Google-Specific Information:
if controllerCn not in mcommClient.group_data['ownerDn']:
if self.take_group_ownership(self.account):
mcommClient.fetch_group(self.account)
logging.debug(
self.logger.debug(
'Waiting 10 seconds for replication.',
extra={'entity': self.account}
)
sleep(1)
else:
logging.error(
self.logger.error(
'Failed to obtain group ownership.',
extra={'entity': self.account}
)
exit(2)
except Exception:
mcommClient.reserve_group(self.account)
logging.debug(
self.logger.debug(
'Waiting 10 seconds for replication.',
extra={'entity': self.account}
)
......@@ -200,7 +201,7 @@ Google-Specific Information:
mcommClient.add_group_members(email)
mcommClient.remove_group_owners('collab-api-client')
except Exception as e:
logging.warning(e, extra={'entity': self.account})
self.logger.warning(e, extra={'entity': self.account})
def take_group_ownership(self, group):
input('Add api controller as group owner, then press enter.')
......@@ -224,7 +225,7 @@ Google-Specific Information:
self.password
)
except kadmin.KAdminError as e:
logging.error(e, extra={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
exit(2)
def upload_and_share_password(self):
......@@ -267,7 +268,7 @@ Google-Specific Information:
'reader'
])
except CalledProcessError as e:
logging.warning(e.output, extra={'entity': self.account})
self.logger.warning(e.output, extra={'entity': self.account})
continue
os.remove(passFilePath)
......@@ -370,7 +371,7 @@ def main():
config = yaml.load(stream, Loader=yaml.BaseLoader)
# Get the root logger and set the debug level
logger = logging.getLogger()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Create a syslog handler, set format, and associate.
......
......@@ -23,10 +23,10 @@ class SharedAccount():
self.box_exists = False
self.google_exists = False
self.group_data = {}
self.logger = logging.getLogger(__name__)
def check_box(self):
try:
logging.error('checking!')
auth = boxsdk.JWTAuth.from_settings_file(
self.config['box']['auth_file']
)
......@@ -39,9 +39,9 @@ class SharedAccount():
break
return False
except IOError as e:
logging.error(e, extras={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
except boxsdk.exception.BoxAPIException as e:
logging.error(e, extras={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
def check_google(self):
try:
......@@ -64,10 +64,10 @@ class SharedAccount():
group.fetch_group(self.account)
self.group_data = group.group_data
except yaml.parser.ParserError as e:
logging.error(e, extras={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
exit(2)
except Exception as e:
logging.error(e, extras={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
exit(2)
def set_kerberos_password(self):
......@@ -84,10 +84,10 @@ class SharedAccount():
princ = kadm.get_princ(principal)
princ.change_password(self.password)
except IOError as e:
logging.error(e, extras={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
exit(2)
except kadmin.KAdminError as e:
logging.error(e, extras={'entity': self.account})
self.logger.error(e, extra={'entity': self.account})
exit(2)
def set_google_password(self):
......@@ -102,7 +102,7 @@ class SharedAccount():
], stderr=STDOUT
)
except CalledProcessError as e:
logging.error(e.output, extras={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
def upload_and_share_password(self):
try:
......@@ -130,7 +130,7 @@ class SharedAccount():
upload_output.decode('UTF-8')
).groups()[0]
except CalledProcessError as e:
logging.error(e.output, extras={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
# Gather a list of account owners from MCommunity
owners = []
......@@ -153,7 +153,7 @@ class SharedAccount():
'reader'
])
except CalledProcessError:
logging.warning(
self.logger.warning(
'Unable to share password with {}'.format(owner),
extra={'entity': self.account}
)
......@@ -225,7 +225,7 @@ def main():
config = yaml.load(stream, Loader=yaml.BaseLoader)
# Get the root logger and set the debug level
logger = logging.getLogger()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Create a syslog handler, set format, and associate.
......@@ -248,13 +248,13 @@ def main():
args.password,
config
)
sa.check_mcommunity(args.account, config['mcommunity'])
sa.check_box(args.account, config['box'])
sa.check_google(args.account, config['google'])
sa.check_mcommunity()
sa.check_box()
sa.check_google()
# Exit out if a shared account doesn't exist.
if not sa.google_exists and not sa.box_exists:
logging.error(
logger.error(
'Entity does not exist as a shared account',
extra={'entity': args.account}
)
......
......@@ -14,6 +14,7 @@ class DiscussionGroup:
def __init__(self, args, config):
self.__dict__.update(args)
self.config = config
self.logger = logging.getLogger(__name__)
# Create a sane email address automatically if one isn't specified.
if not hasattr(self, 'email'):
......@@ -37,7 +38,7 @@ class DiscussionGroup:
self.description
])
except CalledProcessError as e:
logging.error(e.output, extra={'entity': self.email})
self.logger.error(e.output, extra={'entity': self.email})
def add_group_owners(self):
if hasattr(self, 'owners'):
......@@ -54,7 +55,7 @@ class DiscussionGroup:
owner
])
except CalledProcessError as e:
logging.error(
self.logger.error(
e.output,
extra={'entity': self.email}
)
......@@ -72,12 +73,12 @@ class DiscussionGroup:
self.owners_file
])
except CalledProcessError as e:
logging.error(
self.logger.error(
e.output,
extra={'entity': self.email}
)
else:
logging.error(
self.logger.error(
'No owners assigned to group',
extra={'entity': self.email}
)
......@@ -97,7 +98,7 @@ class DiscussionGroup:
member
])
except CalledProcessError as e:
logging.warning(
self.logger.warning(
e.output,
extra={'entity': self.email}
)
......@@ -115,12 +116,12 @@ class DiscussionGroup:
self.members_file
])
except CalledProcessError as e:
logging.warning(
self.logger.warning(
e.output,
extra={'entity': self.email}
)
except IOError as e:
logging.error(
self.logger.error(
e,
extra={'entity': self.email}
)
......@@ -209,7 +210,7 @@ def main():
config = yaml.load(stream, Loader=yaml.BaseLoader)
# Get the root logger and set the debug level
logger = logging.getLogger()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Create a syslog handler, set format, and associate.
......
......@@ -22,7 +22,7 @@ class Archiver():
def __init__(self, args, config):
self.__dict__.update(args)
self.config = config
self.logger = logging.getLogger(__name__)
self.today = datetime.now().strftime('%Y-%m-%d')
self.data_dir = "{}/log/{}".format(
......@@ -49,9 +49,9 @@ class Archiver():
]).decode('utf-8')
break
except CalledProcessError as e:
logging.warning(e.output)
self.logger.warning(e.output)
else:
logging.error('Persistent errors fetching resource data.')
self.logger.error('Persistent errors fetching resource data.')
exit(2)
# Write user data to logfile.
......@@ -62,7 +62,7 @@ class Archiver():
with open(self.data_dir + '/user.csv', 'r') as csvfile:
reader = csv.DictReader(csvfile)
for line in reader:
logging.info(
self.logger.info(
'type=user,ou={}'.format(line['orgUnitPath']),
extra={'entity': line['primaryEmail']}
)
......@@ -82,9 +82,9 @@ class Archiver():
]).decode('utf-8')
break
except CalledProcessError as e:
logging.warning(e.output)
self.logger.warning(e.output)
else:
logging.error('Persistent errors fetching group data.')
self.logger.error('Persistent errors fetching group data.')
exit(2)
# Write group data to logfile.
......@@ -95,7 +95,7 @@ class Archiver():
with open(self.data_dir + '/group.csv', 'r') as csvfile:
reader = csv.DictReader(csvfile)
for line in reader:
logging.info(
self.logger.info(
'type=group,members={},owners={}'.format(
line['Members'].replace('\n', ','),
line['Owners'].replace('\n', ',')
......@@ -116,9 +116,9 @@ class Archiver():
]).decode('utf-8')
break
except CalledProcessError as e:
logging.warning(e.output)
self.logger.warning(e.output)
else:
logging.error('Persistent errors fetching resource data.')
self.logger.error('Persistent errors fetching resource data.')
exit(2)
# Write resource data to logfile.
......@@ -128,7 +128,7 @@ class Archiver():
with open(self.data_dir + '/resource.csv', 'r') as csvfile:
reader = csv.DictReader(csvfile)
for line in reader:
logging.info(
self.logger.info(
'type=resource,'
'email={},'
'id={}'.format(
......@@ -158,9 +158,9 @@ class Archiver():
]).decode('utf-8')
break
except CalledProcessError as e:
logging.warning(e.output)
self.logger.warning(e.output)
else:
logging.error('Persistent errors fetching oauth data.')
self.logger.error('Persistent errors fetching oauth data.')
exit(2)
_ignore = []
......@@ -183,7 +183,7 @@ class Archiver():
_scopes = filter(None, _scopes)
_scopes = ','.join(_scopes)
logging.info(
self.logger.info(
'type=oauth,'
'time={},'
'ip_address={},'
......@@ -215,9 +215,9 @@ class Archiver():
]).decode('utf-8')
break
except CalledProcessError as e:
logging.warning(e.output)
self.logger.warning(e.output)
else:
logging.error('Persistent errors fetching login data.')
self.logger.error('Persistent errors fetching login data.')
exit(2)
# Write resource data to logfile.
......@@ -229,7 +229,7 @@ class Archiver():
reader = csv.DictReader(csvfile)
for line in reader:
if line['actor.email']:
logging.info(
self.logger.info(
'type=login,'
'type={},'
'suspicious={}'.format(
......@@ -239,7 +239,7 @@ class Archiver():
extra={'entity': line['actor.email']}
)
elif line['actor.callerType']:
logging.info(
self.logger.info(
'type=google_account_audit,'
'audit_method={},'
'exposure_type={}'.format(
......@@ -275,9 +275,9 @@ class Archiver():
])
break
except CalledProcessError as e:
logging.warning(e.output)
self.logger.warning(e.output)
else:
logging.error(
self.logger.error(
'Persistent failures uploading {} log.'.format(log_type)
)
......@@ -345,13 +345,13 @@ class Archiver():
regex = r'\/drive\/folders\/([a-zA-Z0-9\-\_]+)'
self.log_dir_id = re.findall(regex, log_dir_check)[0]
except CalledProcessError as e:
logging.error(e.output)
self.logger.error(e.output)
def cleanup(self):
try:
rmtree(self.data_dir)
except IOError as e:
logging.error(e)
self.logger.error(e)
def main():
......@@ -413,7 +413,7 @@ Used with login reporting to account for inherant delay.''',
)
# Get the root logger and set the debug level
logger = logging.getLogger()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Create a syslog handler, set format, and associate.
......
......@@ -13,6 +13,7 @@ class Account():
def __init__(self, args, config):
self.__dict__.update(args)
self.config = config
self.logger = logging.getLogger(__name__)
self.localpart = self.account.split('@')[0]
# Create a formatted list, starting at 001.
......@@ -26,7 +27,7 @@ class Account():
self.account
])
except CalledProcessError as e:
logging.error(e.output, extra={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
exit(2)
def create_labels_and_filters(self):
......@@ -62,7 +63,7 @@ class Account():
label
])
except CalledProcessError as e:
logging.error(e.output, extra={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
def main():
......@@ -109,7 +110,7 @@ def main():
config = yaml.load(stream, Loader=yaml.BaseLoader)
# Get the root logger and set the debug level
logger = logging.getLogger()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Create a syslog handler, set format, and associate.
......
......@@ -16,6 +16,7 @@ class Resource:
def __init__(self, args, config):
self.__dict__.update(args)
self.config = config
self.logger = logging.getLogger(__name__)
self.resource_id = ''
_allowed = string.ascii_letters + string.digits
for char in self.name:
......@@ -37,7 +38,7 @@ class Resource:
])
sleep(1)
except CalledProcessError:
logging.error(
self.logger.error(
'Failed to create resource.',
extra={'entity': self.name}
)
......@@ -52,7 +53,7 @@ class Resource:
])
self.email = re.findall(b'Email: (.+)', resource_info)[0]
except CalledProcessError:
logging.error(
self.logger.error(
'Failed to fetch resource data after creation.',
extra={'entity': self.name}
)
......@@ -70,7 +71,7 @@ class Resource:
owner
])
except CalledProcessError:
logging.warning(
self.logger.warning(
'Failed to add {} as calendar owner'.format(owner),
extra={'entity': self.name}
)
......@@ -95,7 +96,7 @@ class Resource:
'default'
])
except CalledProcessError as e:
logging.warning(
self.logger.warning(
'Unable to remove domain-wide access to resource: {}'.format(
e.output
),
......@@ -187,7 +188,7 @@ def main():
config = yaml.load(stream, Loader=yaml.BaseLoader)
# Get the root logger and set the debug level
logger = logging.getLogger()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Create a syslog handler, set format, and associate.
......
......@@ -19,6 +19,7 @@ class Sifter:
def __init__(self, args, config):
self.__dict__.update(args)
self.config = config
self.logger = logging.getLogger(__name__)
try:
cmd([
......@@ -27,7 +28,7 @@ class Sifter:
self.account
])
except CalledProcessError as e:
logging.error(e.output, extra={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
exit(2)
if '@' not in self.account:
......@@ -56,7 +57,7 @@ class Sifter:
self.parent = [self.root_folder]
def sift(self, folder_id, in_private=False):
logging.info(
self.logger.info(
'message=Processing folder {}.'.format(folder_id),
extra={'entity': self.account}
)
......@@ -79,7 +80,7 @@ class Sifter:
)], stderr=devnull
).decode('utf-8').split('\n')[:-1]
except CalledProcessError as e:
logging.error(e.output, extra={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
return None
if len(folderFiles) > 0:
children = csv.DictReader(
......@@ -140,7 +141,7 @@ class Sifter:
'parent': self.root_folder
}
except CalledProcessError as e:
logging.error(e.output, extra={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
def find_shared(self):
try:
......@@ -171,7 +172,7 @@ class Sifter:
'link': item['alternateLink']
}
except CalledProcessError as e:
logging.error(e.output, extra={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
def scrub_duplicates(self):
for id in self.transfer_files.keys():
......@@ -231,13 +232,13 @@ class Sifter:
upload_user
])
except CalledProcessError as e:
logging.warning(e.output, extra={'entity': self.account})
self.logger.warning(e.output, extra={'entity': self.account})
upload_user = self.config['google']['admin_account']
# Find available files, then upload them.
available_files = os.listdir(self.log_path)
if len(available_files) == 0:
logging.warning(
self.logger.warning(
'No files available for transfer',
extra={'entity': self.account}
)
......@@ -258,7 +259,7 @@ class Sifter:
], stderr=devnull
)
except CalledProcessError as e:
logging.error(e.output, extra={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
for log_file in available_files:
try:
......@@ -276,7 +277,7 @@ class Sifter:
], stderr=devnull
)
except CalledProcessError as e:
logging.error(e.output, extra={'entity': self.account})
self.logger.error(e.output, extra={'entity': self.account})
rmtree(self.log_path)
......@@ -344,7 +345,7 @@ def main():
config = yaml.load(stream, Loader=yaml.BaseLoader)
# Get the root logger and set the debug level
logger = logging.getLogger()
logger = logging.getLogger(__name__)