archive_data.py 16.5 KB
Newer Older
1
#!/usr/bin/env python3
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
import argparse
import csv
import logging
import logging.handlers
import re
import sys
import yaml

from datetime import datetime
from datetime import timedelta
from multiprocessing import Process
from subprocess import check_output as cmd
from subprocess import CalledProcessError
from os.path import exists
from os import makedirs
Rob Carleski's avatar
Rob Carleski committed
17
18
from os import devnull
from os import stat
19
20
from shutil import rmtree

Rob Carleski's avatar
Rob Carleski committed
21
22
devnull = open(devnull, 'w')

23
24
25
26
27
28

class Archiver():

    def __init__(self, args, config):
        self.__dict__.update(args)
        self.config = config
Rob Carleski's avatar
Rob Carleski committed
29
        self.logger = logging.getLogger(__name__)
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
        self.today = datetime.now().strftime('%Y-%m-%d')

        self.data_dir = "{}/log/{}".format(
            self.config['general']['data_dir'],
            self.today
        )
        if not exists(self.data_dir):
            makedirs(self.data_dir)

        # CSV's max feild size needs to be bumpbed
        # because some groups are hugemungous.
        csv.field_size_limit(sys.maxsize)

        self.create_google_report_directory()

    def log_users(self):
        for i in range(5):
            try:
                userData = cmd([
                    self.config['google']['gam_command'],
                    'print',
                    'users',
                    'ou'
                ]).decode('utf-8')
                break
            except CalledProcessError as e:
Rob Carleski's avatar
Rob Carleski committed
56
57
58
59
                self.logger.warning(
                    e.output,
                    extra={'entity': 'google-archive-data'}
                )
60
        else:
Rob Carleski's avatar
Rob Carleski committed
61
62
63
64
            self.logger.error(
                'Persistent errors fetching resource data.',
                extra={'entity': 'google-archive-data'}
            )
65
66
67
68
69
70
71
72
73
74
            exit(2)

        # Write user data to logfile.
        with open(self.data_dir + '/user.csv', 'w+') as stream:
            stream.write(userData)

        # Log data to syslog.
        with open(self.data_dir + '/user.csv', 'r') as csvfile:
            reader = csv.DictReader(csvfile)
            for line in reader:
Rob Carleski's avatar
Rob Carleski committed
75
                self.logger.info(
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
                    'type=user,ou={}'.format(line['orgUnitPath']),
                    extra={'entity': line['primaryEmail']}
                )
        print(self.__dict__)
        # Upload file to admin account drive space
        self.upload_to_drive('user')

    def log_groups(self):
        for i in range(5):
            try:
                groupData = cmd([
                    self.config['google']['gam_command'],
                    'print',
                    'groups',
                    'members',
                    'owners'
                ]).decode('utf-8')
                break
            except CalledProcessError as e:
Rob Carleski's avatar
Rob Carleski committed
95
96
97
98
                self.logger.warning(
                    e.output,
                    extra={'entity': 'google-archive-data'}
                )
99
        else:
Rob Carleski's avatar
Rob Carleski committed
100
101
102
103
            self.logger.error(
                'Persistent errors fetching resource data.',
                extra={'entity': 'google-archive-data'}
            )
104
105
106
107
108
109
110
111
112
113
            exit(2)

        # Write group data to logfile.
        with open(self.data_dir + '/group.csv', 'w+') as stream:
            stream.write(groupData)

        # Log data to syslog.
        with open(self.data_dir + '/group.csv', 'r') as csvfile:
            reader = csv.DictReader(csvfile)
            for line in reader:
Rob Carleski's avatar
Rob Carleski committed
114
                self.logger.info(
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
                    'type=group,members={},owners={}'.format(
                        line['Members'].replace('\n', ','),
                        line['Owners'].replace('\n', ',')
                    ),
                    extra={'entity': line['Email']}
                )

        # Upload file to admin account drive space
        self.upload_to_drive('group')

    def log_resources(self):
        for i in range(5):
            try:
                resourceData = cmd([
                    self.config['google']['gam_command'],
                    'print',
                    'resources'
                ]).decode('utf-8')
                break
            except CalledProcessError as e:
Rob Carleski's avatar
Rob Carleski committed
135
136
137
138
                self.logger.warning(
                    e.output,
                    extra={'entity': 'google-archive-data'}
                )
139
        else:
Rob Carleski's avatar
Rob Carleski committed
140
141
142
143
            self.logger.error(
                'Persistent errors fetching resource data.',
                extra={'entity': 'google-archive-data'}
            )
144
145
146
147
148
149
150
151
152
            exit(2)

        # Write resource data to logfile.
        open(self.data_dir + '/resource.csv', 'w').write(resourceData)

        # Log data to syslog.
        with open(self.data_dir + '/resource.csv', 'r') as csvfile:
            reader = csv.DictReader(csvfile)
            for line in reader:
Rob Carleski's avatar
Rob Carleski committed
153
                self.logger.info(
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
                    'type=resource,'
                    'email={},'
                    'id={}'.format(
                        line['resourceEmail'],
                        line['resourceId']
                    ),
                    extra={'entity': line['resourceName']}
                )

        # Upload file to admin account drive space
        self.upload_to_drive('resource')

    def log_oauth(self):
        for i in range(5):
            try:
                oauthData = cmd([
                    self.config['google']['gam_command'],
                    'report',
                    'token',
                    'start',
                    datetime.strftime(
                        datetime.today() - timedelta(1),
                        '%Y-%m-%dT00:00:00Z'
                        ),
Rob Carleski's avatar
Rob Carleski committed
178
179
180
181
182
                    'end',
                    datetime.strftime(
                        datetime.today(),
                        '%Y-%m-%dT00:00:00Z'
                        ),
183
184
185
186
187
                    'event',
                    'authorize'
                ]).decode('utf-8')
                break
            except CalledProcessError as e:
Rob Carleski's avatar
Rob Carleski committed
188
189
190
191
                self.logger.warning(
                    e.output,
                    extra={'entity': 'google-archive-data'}
                )
192
        else:
Rob Carleski's avatar
Rob Carleski committed
193
194
195
196
            self.logger.error(
                'Persistent errors fetching resource data.',
                extra={'entity': 'google-archive-data'}
            )
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
            exit(2)

        _ignore = []
        if hasattr(self.config, 'archive_data'):
            if hasattr(self.config['archive_data'], 'oauth_ignore_clients'):
                _ignore = self.config['archive_data']['oauth_ignore_clients']

        with open(self.data_dir + '/oauth.csv', 'w') as stream:
            stream.write(oauthData)

        with open(self.data_dir + '/oauth.csv', 'r') as csvfile:
            reader = csv.DictReader(csvfile)
            for line in reader:
                if str(line['parameters.0.value']) in _ignore:
                    continue
                _scopes = []
                for key in line.keys():
                    if 'multiValue' in key:
                        _scopes.append(line[key])

                _scopes = filter(None, _scopes)
                _scopes = ','.join(_scopes)
Rob Carleski's avatar
Rob Carleski committed
219
                self.logger.info(
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
                    'type=oauth,'
                    'time={},'
                    'ip_address={},'
                    'app_client_id={},'
                    'app_name={},'
                    'scopes={}'.format(
                        line['id.time'],
                        line['ipAddress'],
                        line['parameters.0.value'],
                        line['parameters.1.value'],
                        _scopes
                    ),
                    extra={'entity': line['actor.email']}
                )

        # Upload file to admin account drive space
        self.upload_to_drive('oauth')

    def log_logins(self):
Rob Carleski's avatar
Rob Carleski committed
239
240
        lookback_start = datetime.now() - timedelta(hours=(self.lookback + 1))
        lookback_end = datetime.now() - timedelta(hours=self.lookback)
Rob Carleski's avatar
Rob Carleski committed
241
242
        hour = datetime.now().hour - self.lookback

243
244
245
246
247
248
249
        for i in range(5):
            try:
                loginData = cmd([
                    self.config['google']['gam_command'],
                    'report',
                    'login',
                    'start',
Rob Carleski's avatar
Rob Carleski committed
250
                    datetime.strftime(lookback_start, '%Y-%m-%dT%H:00:00Z'),
Rob Carleski's avatar
Rob Carleski committed
251
                    'end',
Rob Carleski's avatar
Rob Carleski committed
252
                    datetime.strftime(lookback_end, '%Y-%m-%dT%H:00:00Z')
253
254
255
                ]).decode('utf-8')
                break
            except CalledProcessError as e:
Rob Carleski's avatar
Rob Carleski committed
256
257
258
259
                self.logger.warning(
                    e.output,
                    extra={'entity': 'google-archive-data'}
                )
260
        else:
Rob Carleski's avatar
Rob Carleski committed
261
262
263
264
            self.logger.error(
                'Persistent errors fetching resource data.',
                extra={'entity': 'google-archive-data'}
            )
265
266
267
            exit(2)

        # Write resource data to logfile.
Rob Carleski's avatar
Rob Carleski committed
268
269
        logfile = '/logins-{}.csv'.format(hour)
        with open(self.data_dir + logfile, 'w') as stream:
270
271
272
            stream.write(loginData)

        # Log data to syslog.
Rob Carleski's avatar
Rob Carleski committed
273
        with open(self.data_dir + logfile, 'r') as csvfile:
274
275
276
            reader = csv.DictReader(csvfile)
            for line in reader:
                if line['actor.email']:
Rob Carleski's avatar
Rob Carleski committed
277
                    self.logger.info(
278
279
280
281
282
283
284
285
286
                        'type=login,'
                        'type={},'
                        'suspicious={}'.format(
                            line['name'],
                            line['parameters.2.boolValue']
                        ),
                        extra={'entity': line['actor.email']}
                    )
                elif line['actor.callerType']:
Rob Carleski's avatar
Rob Carleski committed
287
                    self.logger.info(
288
289
290
291
292
293
294
295
296
297
                        'type=google_account_audit,'
                        'audit_method={},'
                        'exposure_type={}'.format(
                            line['name'],
                            line['parameters.0.name'],
                        ),
                        extra={'entity': line['parameters.0.value']}
                    )

    def upload_to_drive(self, log_type):
Rob Carleski's avatar
Rob Carleski committed
298
299
300
        logfile = '{}/{}.csv'.format(self.data_dir, log_type)
        if stat(logfile).st_size == 0:
            exit(2)
301
302
303
304
305
306
307
308
309
        for i in range(5):
            try:
                cmd([
                    self.config['google']['gam_command'],
                    'user',
                    self.config['google']['admin_account'],
                    'add',
                    'drivefile',
                    'localfile',
Rob Carleski's avatar
Rob Carleski committed
310
                    logfile,
311
312
313
314
315
                    'parentid',
                    self.log_dir_id
                ])
                break
            except CalledProcessError as e:
Rob Carleski's avatar
Rob Carleski committed
316
317
318
319
                self.logger.warning(
                    e.output,
                    extra={'entity': 'google-archive-data'}
                )
320
        else:
Rob Carleski's avatar
Rob Carleski committed
321
            self.logger.error(
Rob Carleski's avatar
Rob Carleski committed
322
323
                'Persistent failures uploading {} log.'.format(log_type),
                extra={'entity': 'google-archive-data'}
324
325
326
327
328
329
330
331
332
333
334
335
336
            )

    def create_google_report_directory(self):
        try:
            # Ensure top-level reports directory exists.
            top_level_check = cmd([
                self.config['google']['gam_command'],
                'user',
                self.config['google']['admin_account'],
                'show',
                'filelist',
                'query',
                "title='{}'".format(self.drive_folder)
Rob Carleski's avatar
Rob Carleski committed
337
            ], stderr=devnull).decode('UTF-8')
338

Rob Carleski's avatar
Rob Carleski committed
339
            print(top_level_check)
340
341
342
343
344
345
346
347
348
349
350
            if len(top_level_check.split('\n')) <= 2:
                results = cmd([
                    self.config['google']['gam_command'],
                    'user',
                    self.config['google']['admin_account'],
                    'add',
                    'drivefile',
                    'drivefilename',
                    self.drive_folder,
                    'mimetype',
                    'gfolder'
Rob Carleski's avatar
Rob Carleski committed
351
                ], stderr=devnull).decode('UTF-8')
352
353
354
355
356
357
358
359
360
361
362
363
364
365
                regex = r'\((.*)\)'
                parent_id = re.findall(regex, results)[0]
            else:
                regex = r'\/drive\/folders\/([a-zA-Z0-9\-\_]+)'
                parent_id = re.findall(regex, top_level_check)[0]

            # Ensure today's report directory exists.
            log_dir_check = cmd([
                self.config['google']['gam_command'],
                'user',
                self.config['google']['admin_account'],
                'show',
                'filelist',
                'query',
Rob Carleski's avatar
Rob Carleski committed
366
                "title='{}' and '{}' in parents".format(
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
                    self.today,
                    parent_id
                )
            ]).decode('UTF-8')
            if len(log_dir_check.split('\n')) <= 2:
                results = cmd([
                    self.config['google']['gam_command'],
                    'user',
                    self.config['google']['admin_account'],
                    'add',
                    'drivefile',
                    'drivefilename',
                    self.today,
                    'mimetype',
                    'gfolder',
                    'parentname',
                    self.drive_folder
                ]).decode('UTF-8')
                regex = r'\((.*)\)'
                self.log_dir_id = re.findall(regex, results)[0]
            else:
                regex = r'\/drive\/folders\/([a-zA-Z0-9\-\_]+)'
                self.log_dir_id = re.findall(regex, log_dir_check)[0]
        except CalledProcessError as e:
Rob Carleski's avatar
Rob Carleski committed
391
392
393
394
            self.logger.error(
                e.output,
                extra={'entity': 'google_archive_data'}
            )
395
396
397
398
399

    def cleanup(self):
        try:
            rmtree(self.data_dir)
        except IOError as e:
Rob Carleski's avatar
Rob Carleski committed
400
401
402
403
            self.logger.error(
                e,
                extra={'entity': 'google_archive_data'}
            )
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437


def main():

    helptext = '''examples:
    google-archive-data -H
    google-archive-data --daily
    '''

    # Parse command line arguments
    parser = argparse.ArgumentParser(
        description='Logs various datapoints for a GSuite domain.',
        epilog=helptext,
        formatter_class=argparse.RawDescriptionHelpFormatter
    )
    timeframe = parser.add_mutually_exclusive_group(required=True)
    timeframe.add_argument(
        '--hourly',
        '-H',
        dest='hourly',
        action='store_true'
    )
    timeframe.add_argument(
        '--daily',
        '-D',
        dest='daily',
        action='store_true'
    )
    parser.add_argument(
        '-l',
        '--lookback',
        help='''
The number of hours back in time to look for data for.
Used with login reporting to account for inherant delay.''',
Rob Carleski's avatar
Rob Carleski committed
438
        default=4
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
    )
    parser.add_argument(
        '-d',
        '--drive_folder',
        help='The log directory name to use in Google',
        default='Google Domain reports'
    )
    parser.add_argument(
        '-c',
        '--config',
        help="The location of the CAK config.",
        default='/etc/collab-admin-kit.yml'
    )
    args = parser.parse_args()

    # Open the CAK Config
    with open(args.config) as stream:
        config = yaml.load(stream, Loader=yaml.BaseLoader)

    # Oerride the default syslog format for this one.
    config['general']['log_format'] = ("level=%(levelname)s"
                                       ",%(message)s,"
                                       "entity=%(entity)s"
                                       )

    # Get the root logger and set the debug level
Rob Carleski's avatar
Rob Carleski committed
465
    logger = logging.getLogger(__name__)
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
    logger.setLevel(logging.DEBUG)

    # Create a syslog handler, set format, and associate.
    sh = logging.handlers.SysLogHandler(
        address='/dev/log',
        facility=config['general']['log_facility']
    )
    formatter = logging.Formatter(config['general']['log_format'])
    sh.setFormatter(formatter)
    logger.addHandler(sh)

    # Create a console handler, set format, and associate.
    ch = logging.StreamHandler()
    formatter = logging.Formatter(config['general']['console_format'])
    ch.setFormatter(formatter)
    logger.addHandler(ch)

    if args.daily:
484
        archiver = Archiver(vars(args), config)
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
        userP = Process(target=archiver.log_users)
        groupP = Process(target=archiver.log_groups)
        resourceP = Process(target=archiver.log_resources)
        oauthP = Process(target=archiver.log_oauth)

        userP.start()
        groupP.start()
        resourceP.start()
        oauthP.start()

        userP.join()
        groupP.join()
        resourceP.join()
        oauthP.join()

    if args.hourly:
501
        archiver = Archiver(vars(args), config)
502
503
504
505
506
507
508
509
510
511
512
        logP = Process(target=archiver.log_logins)

        logP.start()

        logP.join()

    archiver.cleanup()


if __name__ == '__main__':
    main()