_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q35900
RequiredTagsAuditor.check_required_tags_compliance
train
def check_required_tags_compliance(self, resource): """Check whether a resource is compliance Args: resource: A single resource Returns: `(list, list)` A tuple contains missing tags (if there were any) and notes """ missing_tags = [] notes = [] resource_tags = {tag.key.lower(): tag.value for tag in resource.tags} # Do not audit this resource if it is not in the Account scope if resource.resource_type in self.alert_schedule: target_accounts = self.alert_schedule[resource.resource_type]['scope'] else: target_accounts = self.alert_schedule['*']['scope'] if not (resource.account.account_name in target_accounts or '*' in target_accounts): return missing_tags, notes # Do not audit this resource if the ignore tag was set if self.audit_ignore_tag.lower() in resource_tags: return missing_tags, notes required_tags = list(self.required_tags) # Add GDPR tag to required tags if the account must be GDPR compliant if self.gdpr_enabled and resource.account.account_name in self.gdpr_accounts: required_tags.append(self.gdpr_tag) ''' # Do not audit this resource if it is still in grace period if (datetime.utcnow() - resource.resource_creation_date).total_seconds() // 3600 < self.grace_period: return missing_tags, notes ''' # Check if the resource is missing required tags or has invalid tag values for key in [tag.lower() for tag in required_tags]: if key not in resource_tags: missing_tags.append(key) elif not self.validate_tag(key, resource_tags[key]): missing_tags.append(key) notes.append('{} tag is not valid'.format(key)) return missing_tags, notes
python
{ "resource": "" }
q35901
RequiredTagsAuditor.notify
train
def notify(self, notices): """Send notifications to the recipients provided Args: notices (:obj:`dict` of `str`: `list`): A dictionary mapping notification messages to the recipient. Returns: `None` """ tmpl_html = get_template('required_tags_notice.html') tmpl_text = get_template('required_tags_notice.txt') for recipient, data in list(notices.items()): body_html = tmpl_html.render(data=data) body_text = tmpl_text.render(data=data) send_notification( subsystem=self.ns, recipients=[recipient], subject=self.email_subject, body_html=body_html, body_text=body_text )
python
{ "resource": "" }
q35902
Enforcement.get_one
train
def get_one(cls, enforcement_id): """ Return the properties of any enforcement action""" qry = db.Enforcements.filter(enforcement_id == Enforcements.enforcement_id) return qry
python
{ "resource": "" }
q35903
Enforcement.get_all
train
def get_all(cls, account_id=None, location=None): """ Return all Enforcements args: `account_id` : Unique Account Identifier `location` : Region associated with the Resource returns: list of enforcement objects """ qry = db.Enforcements.filter() if account_id: qry = qry.filter(account_id == Enforcements.account_id) if location: qry = qry.join(Resource, Resource.location == location) return qry
python
{ "resource": "" }
q35904
Enforcement.create
train
def create(cls, account_id, resource_id, action, timestamp, metrics): """ Set properties for an enforcement action""" enforcement = Enforcements() enforcement.account_id = account_id enforcement.resource_id = resource_id enforcement.action = action enforcement.timestamp = timestamp enforcement.metrics = metrics try: db.session.add(enforcement) except SQLAlchemyError as e: logging.error('Could not add enforcement entry to database. {}'.format(e))
python
{ "resource": "" }
q35905
IssueType.get
train
def get(cls, issue_type): """Returns the IssueType object for `issue_type`. If no existing object was found, a new type will be created in the database and returned Args: issue_type (str,int,IssueType): Issue type name, id or class Returns: :obj:`IssueType` """ if isinstance(issue_type, str): obj = getattr(db, cls.__name__).find_one(cls.issue_type == issue_type) elif isinstance(issue_type, int): obj = getattr(db, cls.__name__).find_one(cls.issue_type_id == issue_type) elif isinstance(issue_type, cls): return issue_type else: obj = None if not obj: obj = cls() obj.issue_type = issue_type db.session.add(obj) db.session.commit() db.session.refresh(obj) return obj
python
{ "resource": "" }
q35906
Issue.get
train
def get(issue_id, issue_type_id): """Return issue by ID Args: issue_id (str): Unique Issue identifier issue_type_id (str): Type of issue to get Returns: :obj:`Issue`: Returns Issue object if found, else None """ return db.Issue.find_one( Issue.issue_id == issue_id, Issue.issue_type_id == issue_type_id )
python
{ "resource": "" }
q35907
EBSAuditor.run
train
def run(self, *args, **kwargs): """Main execution point for the auditor Args: *args: **kwargs: Returns: `None` """ self.log.debug('Starting EBSAuditor') data = self.update_data() notices = defaultdict(list) for account, issues in data.items(): for issue in issues: for recipient in account.contacts: notices[NotificationContact(type=recipient['type'], value=recipient['value'])].append(issue) self.notify(notices)
python
{ "resource": "" }
q35908
EBSAuditor.get_unattached_volumes
train
def get_unattached_volumes(self): """Build a list of all volumes missing tags and not ignored. Returns a `dict` keyed by the issue_id with the volume as the value Returns: :obj:`dict` of `str`: `EBSVolume` """ volumes = {} ignored_tags = dbconfig.get('ignore_tags', self.ns) for volume in EBSVolume.get_all().values(): issue_id = get_resource_id('evai', volume.id) if len(volume.attachments) == 0: if len(list(filter(set(ignored_tags).__contains__, [tag.key for tag in volume.tags]))): continue volumes[issue_id] = volume return volumes
python
{ "resource": "" }
q35909
EBSAuditor.process_new_issues
train
def process_new_issues(self, volumes, existing_issues): """Takes a dict of existing volumes missing tags and a dict of existing issues, and finds any new or updated issues. Args: volumes (:obj:`dict` of `str`: `EBSVolume`): Dict of current volumes with issues existing_issues (:obj:`dict` of `str`: `EBSVolumeAuditIssue`): Current list of issues Returns: :obj:`dict` of `str`: `EBSVolumeAuditIssue` """ new_issues = {} for issue_id, volume in volumes.items(): state = EBSIssueState.DETECTED.value if issue_id in existing_issues: issue = existing_issues[issue_id] data = { 'state': state, 'notes': issue.notes, 'last_notice': issue.last_notice } if issue.update(data): new_issues.setdefault(issue.volume.account, []).append(issue) self.log.debug('Updated EBSVolumeAuditIssue {}'.format( issue_id )) else: properties = { 'volume_id': volume.id, 'account_id': volume.account_id, 'location': volume.location, 'state': state, 'last_change': datetime.now(), 'last_notice': None, 'notes': [] } issue = EBSVolumeAuditIssue.create(issue_id, properties=properties) new_issues.setdefault(issue.volume.account, []).append(issue) return new_issues
python
{ "resource": "" }
q35910
EBSAuditor.process_fixed_issues
train
def process_fixed_issues(self, volumes, existing_issues): """Provided a list of volumes and existing issues, returns a list of fixed issues to be deleted Args: volumes (`dict`): A dictionary keyed on the issue id, with the :obj:`Volume` object as the value existing_issues (`dict`): A dictionary keyed on the issue id, with the :obj:`EBSVolumeAuditIssue` object as the value Returns: :obj:`list` of :obj:`EBSVolumeAuditIssue` """ fixed_issues = [] for issue_id, issue in list(existing_issues.items()): if issue_id not in volumes: fixed_issues.append(issue) return fixed_issues
python
{ "resource": "" }
q35911
EBSAuditor.notify
train
def notify(self, notices): """Send notifications to the users via. the provided methods Args: notices (:obj:`dict` of `str`: `dict`): List of the notifications to send Returns: `None` """ issues_html = get_template('unattached_ebs_volume.html') issues_text = get_template('unattached_ebs_volume.txt') for recipient, issues in list(notices.items()): if issues: message_html = issues_html.render(issues=issues) message_text = issues_text.render(issues=issues) send_notification( subsystem=self.name, recipients=[recipient], subject=self.subject, body_html=message_html, body_text=message_text )
python
{ "resource": "" }
q35912
get_local_aws_session
train
def get_local_aws_session(): """Returns a session for the local instance, not for a remote account Returns: :obj:`boto3:boto3.session.Session` """ if not all((app_config.aws_api.access_key, app_config.aws_api.secret_key)): return boto3.session.Session() else: # If we are not running on an EC2 instance, assume the instance role # first, then assume the remote role session_args = [app_config.aws_api.access_key, app_config.aws_api.secret_key] if app_config.aws_api.session_token: session_args.append(app_config.aws_api.session_token) return boto3.session.Session(*session_args)
python
{ "resource": "" }
q35913
get_aws_session
train
def get_aws_session(account): """Function to return a boto3 Session based on the account passed in the first argument. Args: account (:obj:`Account`): Account to create the session object for Returns: :obj:`boto3:boto3.session.Session` """ from cloud_inquisitor.config import dbconfig from cloud_inquisitor.plugins.types.accounts import AWSAccount if not isinstance(account, AWSAccount): raise InquisitorError('Non AWSAccount passed to get_aws_session, got {}'.format(account.__class__.__name__)) # If no keys are on supplied for the account, use sts.assume_role instead session = get_local_aws_session() if session.get_credentials().method in ['iam-role', 'env', 'explicit']: sts = session.client('sts') else: # If we are not running on an EC2 instance, assume the instance role # first, then assume the remote role temp_sts = session.client('sts') audit_sts_role = temp_sts.assume_role( RoleArn=app_config.aws_api.instance_role_arn, RoleSessionName='inquisitor' ) sts = boto3.session.Session( audit_sts_role['Credentials']['AccessKeyId'], audit_sts_role['Credentials']['SecretAccessKey'], audit_sts_role['Credentials']['SessionToken'] ).client('sts') role = sts.assume_role( RoleArn='arn:aws:iam::{}:role/{}'.format( account.account_number, dbconfig.get('role_name', default='cinq_role') ), RoleSessionName='inquisitor' ) sess = boto3.session.Session( role['Credentials']['AccessKeyId'], role['Credentials']['SecretAccessKey'], role['Credentials']['SessionToken'] ) return sess
python
{ "resource": "" }
q35914
get_aws_regions
train
def get_aws_regions(*, force=False): """Load a list of AWS regions from the AWS static data. Args: force (`bool`): Force fetch list of regions even if we already have a cached version Returns: :obj:`list` of `str` """ from cloud_inquisitor.config import dbconfig global __regions if force or not __regions: logger.debug('Loading list of AWS regions from static data') data = requests.get('https://ip-ranges.amazonaws.com/ip-ranges.json').json() rgx = re.compile(dbconfig.get('ignored_aws_regions_regexp', default='(^cn-|GLOBAL|-gov)'), re.I) __regions = sorted(list({x['region'] for x in data['prefixes'] if not rgx.search(x['region'])})) return __regions
python
{ "resource": "" }
q35915
AccountList.get
train
def get(self): """List all accounts""" _, accounts = BaseAccount.search() if ROLE_ADMIN not in session['user'].roles: accounts = list(filter(lambda acct: acct.account_id in session['accounts'], accounts)) if accounts: return self.make_response({ 'message': None, 'accounts': [x.to_json(is_admin=ROLE_ADMIN in session['user'].roles or False) for x in accounts] }) else: return self.make_response({ 'message': 'Unable to find any accounts', 'accounts': None }, HTTP.NOT_FOUND)
python
{ "resource": "" }
q35916
AccountDetail.get
train
def get(self, accountId): """Fetch a single account""" account = BaseAccount.get(accountId) if account: return self.make_response({ 'message': None, 'account': account.to_json(is_admin=True) }) else: return self.make_response({ 'message': 'Unable to find account', 'account': None }, HTTP.NOT_FOUND)
python
{ "resource": "" }
q35917
AccountDetail.put
train
def put(self, accountId): """Update an account""" self.reqparse.add_argument('accountName', type=str, required=True) self.reqparse.add_argument('accountType', type=str, required=True) self.reqparse.add_argument('contacts', type=dict, required=True, action='append') self.reqparse.add_argument('enabled', type=int, required=True, choices=(0, 1)) self.reqparse.add_argument('requiredRoles', type=str, action='append', default=()) self.reqparse.add_argument('properties', type=dict, required=True) args = self.reqparse.parse_args() account_class = get_plugin_by_name(PLUGIN_NAMESPACES['accounts'], args['accountType']) if not account_class: raise InquisitorError('Invalid account type: {}'.format(args['accountType'])) validate_contacts(args['contacts']) if not args['accountName'].strip(): raise Exception('You must provide an account name') if not args['contacts']: raise Exception('You must provide at least one contact') class_properties = {from_camelcase(key): value for key, value in args['properties'].items()} for prop in account_class.class_properties: if prop['key'] not in class_properties: raise InquisitorError('Missing required property {}'.format(prop)) account = account_class.get(accountId) if account.account_type != args['accountType']: raise InquisitorError('You cannot change the type of an account') account.account_name = args['accountName'] account.contacts = args['contacts'] account.enabled = args['enabled'] account.required_roles = args['requiredRoles'] account.update(**args['properties']) account.save() auditlog(event='account.update', actor=session['user'].username, data=args) return self.make_response({'message': 'Object updated', 'account': account.to_json(is_admin=True)})
python
{ "resource": "" }
q35918
AccountDetail.delete
train
def delete(self, accountId): """Delete an account""" acct = BaseAccount.get(accountId) if not acct: raise Exception('No such account found') acct.delete() auditlog(event='account.delete', actor=session['user'].username, data={'accountId': accountId}) return self.make_response('Account deleted')
python
{ "resource": "" }
q35919
AWSAccountCollector.__get_distribution_tags
train
def __get_distribution_tags(self, client, arn): """Returns a dict containing the tags for a CloudFront distribution Args: client (botocore.client.CloudFront): Boto3 CloudFront client object arn (str): ARN of the distribution to get tags for Returns: `dict` """ return { t['Key']: t['Value'] for t in client.list_tags_for_resource( Resource=arn )['Tags']['Items'] }
python
{ "resource": "" }
q35920
AWSAccountCollector.__fetch_route53_zones
train
def __fetch_route53_zones(self): """Return a list of all DNS zones hosted in Route53 Returns: :obj:`list` of `dict` """ done = False marker = None zones = {} route53 = self.session.client('route53') try: while not done: if marker: response = route53.list_hosted_zones(Marker=marker) else: response = route53.list_hosted_zones() if response['IsTruncated']: marker = response['NextMarker'] else: done = True for zone_data in response['HostedZones']: zones[get_resource_id('r53z', zone_data['Id'])] = { 'name': zone_data['Name'].rstrip('.'), 'source': 'AWS/{}'.format(self.account), 'comment': zone_data['Config']['Comment'] if 'Comment' in zone_data['Config'] else None, 'zone_id': zone_data['Id'], 'private_zone': zone_data['Config']['PrivateZone'], 'tags': self.__fetch_route53_zone_tags(zone_data['Id']) } return zones finally: del route53
python
{ "resource": "" }
q35921
AWSAccountCollector.__fetch_route53_zone_records
train
def __fetch_route53_zone_records(self, zone_id): """Return all resource records for a specific Route53 zone Args: zone_id (`str`): Name / ID of the hosted zone Returns: `dict` """ route53 = self.session.client('route53') done = False nextName = nextType = None records = {} try: while not done: if nextName and nextType: response = route53.list_resource_record_sets( HostedZoneId=zone_id, StartRecordName=nextName, StartRecordType=nextType ) else: response = route53.list_resource_record_sets(HostedZoneId=zone_id) if response['IsTruncated']: nextName = response['NextRecordName'] nextType = response['NextRecordType'] else: done = True if 'ResourceRecordSets' in response: for record in response['ResourceRecordSets']: # Cannot make this a list, due to a race-condition in the AWS api that might return the same # record more than once, so we use a dict instead to ensure that if we get duplicate records # we simply just overwrite the one already there with the same info. record_id = self._get_resource_hash(zone_id, record) if 'AliasTarget' in record: value = record['AliasTarget']['DNSName'] records[record_id] = { 'id': record_id, 'name': record['Name'].rstrip('.'), 'type': 'ALIAS', 'ttl': 0, 'value': [value] } else: value = [y['Value'] for y in record['ResourceRecords']] records[record_id] = { 'id': record_id, 'name': record['Name'].rstrip('.'), 'type': record['Type'], 'ttl': record['TTL'], 'value': value } return list(records.values()) finally: del route53
python
{ "resource": "" }
q35922
AWSAccountCollector.__fetch_route53_zone_tags
train
def __fetch_route53_zone_tags(self, zone_id): """Return a dict with the tags for the zone Args: zone_id (`str`): ID of the hosted zone Returns: :obj:`dict` of `str`: `str` """ route53 = self.session.client('route53') try: return { tag['Key']: tag['Value'] for tag in route53.list_tags_for_resource( ResourceType='hostedzone', ResourceId=zone_id.split('/')[-1] )['ResourceTagSet']['Tags'] } finally: del route53
python
{ "resource": "" }
q35923
AWSAccountCollector._get_resource_hash
train
def _get_resource_hash(zone_name, record): """Returns the last ten digits of the sha256 hash of the combined arguments. Useful for generating unique resource IDs Args: zone_name (`str`): The name of the DNS Zone the record belongs to record (`dict`): A record dict to generate the hash from Returns: `str` """ record_data = defaultdict(int, record) if type(record_data['GeoLocation']) == dict: record_data['GeoLocation'] = ":".join(["{}={}".format(k, v) for k, v in record_data['GeoLocation'].items()]) args = [ zone_name, record_data['Name'], record_data['Type'], record_data['Weight'], record_data['Region'], record_data['GeoLocation'], record_data['Failover'], record_data['HealthCheckId'], record_data['TrafficPolicyInstanceId'] ] return get_resource_id('r53r', args)
python
{ "resource": "" }
q35924
AWSAccountCollector._get_bucket_statistics
train
def _get_bucket_statistics(self, bucket_name, bucket_region, storage_type, statistic, days): """ Returns datapoints from cloudwatch for bucket statistics. Args: bucket_name `(str)`: The name of the bucket statistic `(str)`: The statistic you want to fetch from days `(int)`: Sample period for the statistic """ cw = self.session.client('cloudwatch', region_name=bucket_region) # gather cw stats try: obj_stats = cw.get_metric_statistics( Namespace='AWS/S3', MetricName=statistic, Dimensions=[ { 'Name': 'StorageType', 'Value': storage_type }, { 'Name': 'BucketName', 'Value': bucket_name } ], Period=86400, StartTime=datetime.utcnow() - timedelta(days=days), EndTime=datetime.utcnow(), Statistics=[ 'Average' ] ) stat_value = obj_stats['Datapoints'][0]['Average'] if obj_stats['Datapoints'] else 'NO_DATA' return stat_value except Exception as e: self.log.error( 'Could not get bucket statistic for account {} / bucket {} / {}'.format(self.account.account_name, bucket_name, e)) finally: del cw
python
{ "resource": "" }
q35925
ResourceType.get
train
def get(cls, resource_type): """Returns the ResourceType object for `resource_type`. If no existing object was found, a new type will be created in the database and returned Args: resource_type (str): Resource type name Returns: :obj:`ResourceType` """ if isinstance(resource_type, str): obj = getattr(db, cls.__name__).find_one(cls.resource_type == resource_type) elif isinstance(resource_type, int): obj = getattr(db, cls.__name__).find_one(cls.resource_type_id == resource_type) elif isinstance(resource_type, cls): return resource_type else: obj = None if not obj: obj = cls() obj.resource_type = resource_type db.session.add(obj) db.session.commit() db.session.refresh(obj) return obj
python
{ "resource": "" }
q35926
Account.get
train
def get(account_id, account_type_id=None): """Return account by ID and type Args: account_id (`int`, `str`): Unique Account identifier account_type_id (str): Type of account to get Returns: :obj:`Account`: Returns an Account object if found, else None """ if type(account_id) == str: args = {'account_name': account_id} else: args = {'account_id': account_id} if account_type_id: args['account_type_id'] = account_type_id return db.Account.find_one(**args)
python
{ "resource": "" }
q35927
Account.user_has_access
train
def user_has_access(self, user): """Check if a user has access to view information for the account Args: user (:obj:`User`): User object to check Returns: True if user has access to the account, else false """ if ROLE_ADMIN in user.roles: return True # Non-admin users should only see active accounts if self.enabled: if not self.required_roles: return True for role in self.required_roles: if role in user.roles: return True return False
python
{ "resource": "" }
q35928
BaseScheduler.load_plugins
train
def load_plugins(self): """Refresh the list of available collectors and auditors Returns: `None` """ for entry_point in CINQ_PLUGINS['cloud_inquisitor.plugins.collectors']['plugins']: cls = entry_point.load() if cls.enabled(): self.log.debug('Collector loaded: {} in module {}'.format(cls.__name__, cls.__module__)) self.collectors.setdefault(cls.type, []).append(Worker( cls.name, cls.interval, { 'name': entry_point.name, 'module_name': entry_point.module_name, 'attrs': entry_point.attrs } )) else: self.log.debug('Collector disabled: {} in module {}'.format(cls.__name__, cls.__module__)) for entry_point in CINQ_PLUGINS['cloud_inquisitor.plugins.auditors']['plugins']: cls = entry_point.load() if cls.enabled(): self.log.debug('Auditor loaded: {} in module {}'.format(cls.__name__, cls.__module__)) self.auditors.append(Worker( cls.name, cls.interval, { 'name': entry_point.name, 'module_name': entry_point.module_name, 'attrs': entry_point.attrs } )) else: self.log.debug('Auditor disabled: {} in module {}'.format(cls.__name__, cls.__module__)) collector_count = sum(len(x) for x in self.collectors.values()) auditor_count = len(self.auditors) if collector_count + auditor_count == 0: raise Exception('No auditors or collectors loaded, aborting scheduler') self.log.info('Scheduler loaded {} collectors and {} auditors'.format(collector_count, auditor_count))
python
{ "resource": "" }
q35929
BaseSchedulerCommand.load_scheduler_plugins
train
def load_scheduler_plugins(self): """Refresh the list of available schedulers Returns: `list` of :obj:`BaseScheduler` """ if not self.scheduler_plugins: for entry_point in CINQ_PLUGINS['cloud_inquisitor.plugins.schedulers']['plugins']: cls = entry_point.load() self.scheduler_plugins[cls.__name__] = cls if cls.__name__ == self.active_scheduler: self.log.debug('Scheduler loaded: {} in module {}'.format(cls.__name__, cls.__module__)) else: self.log.debug('Scheduler disabled: {} in module {}'.format(cls.__name__, cls.__module__))
python
{ "resource": "" }
q35930
Scheduler.run
train
def run(self, **kwargs): """Execute the scheduler. Returns: `None` """ if not super().run(**kwargs): return if kwargs['list']: self.log.info('--- List of Scheduler Modules ---') for name, scheduler in list(self.scheduler_plugins.items()): if self.active_scheduler == name: self.log.info('{} (active)'.format(name)) else: self.log.info(name) self.log.info('--- End list of Scheduler Modules ---') return scheduler = self.scheduler_plugins[self.active_scheduler]() scheduler.execute_scheduler()
python
{ "resource": "" }
q35931
Worker.run
train
def run(self, **kwargs): """Execute the worker thread. Returns: `None` """ super().run(**kwargs) scheduler = self.scheduler_plugins[self.active_scheduler]() if not kwargs['no_daemon']: self.log.info('Starting {} worker with {} threads checking for new messages every {} seconds'.format( scheduler.name, kwargs['threads'], kwargs['delay'] )) for i in range(kwargs['threads']): thd = threading.Thread( target=self.execute_worker_thread, args=(scheduler.execute_worker, kwargs['delay']) ) thd.start() else: self.log.info('Starting {} worker for a single non-daemon execution'.format( scheduler.name )) scheduler.execute_worker()
python
{ "resource": "" }
q35932
TemplateList.post
train
def post(self): """Create a new template""" self.reqparse.add_argument('templateName', type=str, required=True) self.reqparse.add_argument('template', type=str, required=True) args = self.reqparse.parse_args() template = db.Template.find_one(template_name=args['templateName']) if template: return self.make_response('Template already exists, update the existing template instead', HTTP.CONFLICT) template = Template() template.template_name = args['templateName'] template.template = args['template'] db.session.add(template) db.session.commit() auditlog(event='template.create', actor=session['user'].username, data=args) return self.make_response('Template {} has been created'.format(template.template_name), HTTP.CREATED)
python
{ "resource": "" }
q35933
TemplateList.put
train
def put(self): """Re-import all templates, overwriting any local changes made""" try: _import_templates(force=True) return self.make_response('Imported templates') except: self.log.exception('Failed importing templates') return self.make_response('Failed importing templates', HTTP.SERVER_ERROR)
python
{ "resource": "" }
q35934
TemplateGet.get
train
def get(self, template_name): """Get a specific template""" template = db.Template.find_one(template_name=template_name) if not template: return self.make_response('No such template found', HTTP.NOT_FOUND) return self.make_response({'template': template})
python
{ "resource": "" }
q35935
TemplateGet.put
train
def put(self, template_name): """Update a template""" self.reqparse.add_argument('template', type=str, required=True) args = self.reqparse.parse_args() template = db.Template.find_one(template_name=template_name) if not template: return self.make_response('No such template found', HTTP.NOT_FOUND) changes = diff(template.template, args['template']) template.template = args['template'] template.is_modified = True db.session.add(template) db.session.commit() auditlog( event='template.update', actor=session['user'].username, data={ 'template_name': template_name, 'template_changes': changes } ) return self.make_response('Template {} has been updated'.format(template_name))
python
{ "resource": "" }
q35936
TemplateGet.delete
train
def delete(self, template_name): """Delete a template""" template = db.Template.find_one(template_name=template_name) if not template: return self.make_response('No such template found', HTTP.NOT_FOUND) db.session.delete(template) db.session.commit() auditlog(event='template.delete', actor=session['user'].username, data={'template_name': template_name}) return self.make_response({ 'message': 'Template has been deleted', 'templateName': template_name })
python
{ "resource": "" }
q35937
process_action
train
def process_action(resource, action, action_issuer='unknown'): """Process an audit action for a resource, if possible Args: resource (:obj:`Resource`): A resource object to perform the action on action (`str`): Type of action to perform (`kill` or `stop`) action_issuer (`str`): The issuer of the action Returns: `ActionStatus` """ from cinq_collector_aws import AWSRegionCollector func_action = action_mapper[resource.resource_type][action] extra_info = {} action_status = ActionStatus.UNKNOWN if func_action: if action_mapper[resource.resource_type]['service_name'] == 'lambda': client = get_aws_session( AWSAccount.get(dbconfig.get('rds_collector_account', AWSRegionCollector.ns, '')) ).client( 'lambda', dbconfig.get('rds_collector_region', AWSRegionCollector.ns, '') ) else: client = get_aws_session(AWSAccount(resource.account)).client( action_mapper[resource.resource_type]['service_name'], region_name=resource.location ) try: logger.info(f'Trying to {action} resource {resource.id} for account {resource.account.account_name} / region {resource.location}') action_status, extra_info = func_action(client, resource) Enforcement.create(resource.account.account_id, resource.id, action, datetime.now(), extra_info) except Exception as ex: action_status = ActionStatus.FAILED logger.exception('Failed to apply action {} to {}: {}'.format(action, resource.id, ex)) finally: auditlog( event='{}.{}.{}.{}'.format(action_issuer, resource.resource_type, action, action_status), actor=action_issuer, data={ 'resource_id': resource.id, 'account_name': resource.account.account_name, 'location': resource.location, 'info': extra_info } ) return action_status else: logger.error('Failed to apply action {} to {}: Not supported'.format(action, resource.id)) return ActionStatus.FAILED
python
{ "resource": "" }
q35938
stop_ec2_instance
train
def stop_ec2_instance(client, resource): """Stop an EC2 Instance This function will attempt to stop a running instance. Args: client (:obj:`boto3.session.Session.client`): A boto3 client object resource (:obj:`Resource`): The resource object to stop Returns: `ActionStatus` """ instance = EC2Instance.get(resource.id) if instance.state in ('stopped', 'terminated'): return ActionStatus.IGNORED, {} client.stop_instances(InstanceIds=[resource.id]) return ActionStatus.SUCCEED, {'instance_type': resource.instance_type, 'public_ip': resource.public_ip}
python
{ "resource": "" }
q35939
terminate_ec2_instance
train
def terminate_ec2_instance(client, resource): """Terminate an EC2 Instance This function will terminate an EC2 Instance. Args: client (:obj:`boto3.session.Session.client`): A boto3 client object resource (:obj:`Resource`): The resource object to terminate Returns: `ActionStatus` """ # TODO: Implement disabling of TerminationProtection instance = EC2Instance.get(resource.id) if instance.state == 'terminated': return ActionStatus.IGNORED, {} client.terminate_instances(InstanceIds=[resource.id]) return ActionStatus.SUCCEED, {'instance_type': resource.instance_type, 'public_ip': resource.public_ip}
python
{ "resource": "" }
q35940
stop_s3_bucket
train
def stop_s3_bucket(client, resource): """ Stop an S3 bucket from being used This function will try to 1. Add lifecycle policy to make sure objects inside it will expire 2. Block certain access to the bucket """ bucket_policy = { 'Version': '2012-10-17', 'Id': 'PutObjPolicy', 'Statement': [ { 'Sid': 'cinqDenyObjectUploads', 'Effect': 'Deny', 'Principal': '*', 'Action': ['s3:PutObject', 's3:GetObject'], 'Resource': 'arn:aws:s3:::{}/*'.format(resource.id) } ] } s3_removal_lifecycle_policy = { 'Rules': [ {'Status': 'Enabled', 'NoncurrentVersionExpiration': {u'NoncurrentDays': 1}, 'Filter': {u'Prefix': ''}, 'Expiration': { u'Date': datetime.utcnow().replace( hour=0, minute=0, second=0, microsecond=0 ) + timedelta(days=dbconfig.get('lifecycle_expiration_days', NS_AUDITOR_REQUIRED_TAGS, 3)) }, 'AbortIncompleteMultipartUpload': {u'DaysAfterInitiation': 3}, 'ID': 'cloudInquisitor'} ] } policy_exists = s3_removal_policy_exists(client, resource) lifecycle_policy_exists = s3_removal_lifecycle_policy_exists(client, resource) if policy_exists and lifecycle_policy_exists: return ActionStatus.IGNORED, {} if not policy_exists: client.put_bucket_policy(Bucket=resource.id, Policy=json.dumps(bucket_policy)) logger.info('Added policy to prevent putObject in s3 bucket {} in {}'.format( resource.id, resource.account.account_name )) if not lifecycle_policy_exists: # Grab S3 Metrics before lifecycle policies start removing objects client.put_bucket_lifecycle_configuration( Bucket=resource.id, LifecycleConfiguration=s3_removal_lifecycle_policy ) logger.info('Added policy to delete bucket contents in s3 bucket {} in {}'.format( resource.id, resource.account.account_name )) return ActionStatus.SUCCEED, resource.metrics()
python
{ "resource": "" }
q35941
delete_s3_bucket
train
def delete_s3_bucket(client, resource): """Delete an S3 bucket This function will try to delete an S3 bucket Args: client (:obj:`boto3.session.Session.client`): A boto3 client object resource (:obj:`Resource`): The resource object to terminate Returns: `ActionStatus` """ if dbconfig.get('enable_delete_s3_buckets', NS_AUDITOR_REQUIRED_TAGS, False): client.delete_bucket(Bucket=resource.id) return ActionStatus.SUCCEED, resource.metrics()
python
{ "resource": "" }
q35942
BaseResource.get
train
def get(cls, resource_id): """Returns the class object identified by `resource_id` Args: resource_id (str): Unique EC2 Instance ID to load from database Returns: EC2 Instance object if found, else None """ res = Resource.get(resource_id) return cls(res) if res else None
python
{ "resource": "" }
q35943
BaseResource.create
train
def create(cls, resource_id, *, account_id, properties=None, tags=None, location=None, auto_add=True, auto_commit=False): """Creates a new Resource object with the properties and tags provided Args: resource_id (str): Unique identifier for the resource object account_id (int): Account ID which owns the resource properties (dict): Dictionary of properties for the resource object. tags (dict): Key / value dictionary of tags. Values must be `str` types location (str): Location of the resource, if applicable auto_add (bool): Automatically add the new resource to the DB session. Default: True auto_commit (bool): Automatically commit the change to the database. Default: False """ if cls.get(resource_id): raise ResourceException('Resource {} already exists'.format(resource_id)) res = Resource() res.resource_id = resource_id res.account_id = account_id res.location = location res.resource_type_id = ResourceType.get(cls.resource_type).resource_type_id if properties: for name, value in properties.items(): prop = ResourceProperty() prop.resource_id = res.resource_id prop.name = name prop.value = value.isoformat() if type(value) == datetime else value res.properties.append(prop) db.session.add(prop) if tags: for key, value in tags.items(): if type(value) != str: raise ValueError('Invalid object type for tag value: {}'.format(key)) tag = Tag() tag.resource_id = resource_id tag.key = key tag.value = value res.tags.append(tag) db.session.add(tag) if auto_add: db.session.add(res) if auto_commit: db.session.commit() return cls.get(res.resource_id) else: return cls(res)
python
{ "resource": "" }
q35944
BaseResource.get_all
train
def get_all(cls, account=None, location=None, include_disabled=False): """Returns a list of all resources for a given account, location and resource type. Attributes: account (:obj:`Account`): Account owning the resources location (`str`): Location of the resources to return (region) include_disabled (`bool`): Include resources from disabled accounts (default: False) Returns: list of resource objects """ qry = db.Resource.filter( Resource.resource_type_id == ResourceType.get(cls.resource_type).resource_type_id ) if account: qry = qry.filter(Resource.account_id == account.account_id) if not include_disabled: qry = qry.join(Account, Resource.account_id == Account.account_id).filter(Account.enabled == 1) if location: qry = qry.filter(Resource.location == location) return {res.resource_id: cls(res) for res in qry.all()}
python
{ "resource": "" }
q35945
BaseResource.search
train
def search(cls, *, limit=100, page=1, accounts=None, locations=None, resources=None, properties=None, include_disabled=False, return_query=False): """Search for resources based on the provided filters. If `return_query` a sub-class of `sqlalchemy.orm.Query` is returned instead of the resource list. Args: limit (`int`): Number of results to return. Default: 100 page (`int`): Pagination offset for results. Default: 1 accounts (`list` of `int`): A list of account id's to limit the returned resources to locations (`list` of `str`): A list of locations as strings to limit the search for resources ('list' of `str`): A list of resource_ids properties (`dict`): A `dict` containing property name and value pairs. Values can be either a str or a list of strings, in which case a boolean OR search is performed on the values include_disabled (`bool`): Include resources from disabled accounts. Default: False return_query (`bool`): Returns the query object prior to adding the limit and offset functions. Allows for sub-classes to amend the search feature with extra conditions. The calling function must handle pagination on its own Returns: `list` of `Resource`, `sqlalchemy.orm.Query` """ qry = db.Resource.order_by(Resource.resource_id).filter( Resource.resource_type_id == ResourceType.get(cls.resource_type).resource_type_id ) if not include_disabled: qry = qry.join(Account, Resource.account_id == Account.account_id).filter(Account.enabled == 1) if session: qry = qry.filter(Resource.account_id.in_(session['accounts'])) if accounts: qry = qry.filter(Resource.account_id.in_([Account.get(acct).account_id for acct in accounts])) if locations: qry = qry.filter(Resource.location.in_(locations)) if resources: qry = qry.filter(Resource.resource_id.in_(resources)) if properties: for prop_name, value in properties.items(): alias = aliased(ResourceProperty) qry = qry.join(alias, Resource.resource_id == alias.resource_id) if type(value) == list: where_clause = [] for item in value: where_clause.append(alias.value == item) qry = qry.filter( and_( alias.name == prop_name, or_(*where_clause) ).self_group() ) else: qry = qry.filter( and_( alias.name == prop_name, alias.value == value ).self_group() ) if return_query: return qry total = qry.count() qry = qry.limit(limit) qry = qry.offset((page - 1) * limit if page > 1 else 0) return total, [cls(x) for x in qry.all()]
python
{ "resource": "" }
q35946
BaseResource.get_owner_emails
train
def get_owner_emails(self, partial_owner_match=True): """Return a list of email addresses associated with the instance, based on tags Returns: List of email addresses if any, else None """ for tag in self.tags: if tag.key.lower() == 'owner': rgx = re.compile(RGX_EMAIL_VALIDATION_PATTERN, re.I) if partial_owner_match: match = rgx.findall(tag.value) if match: return [NotificationContact('email', email) for email in match] else: match = rgx.match(tag.value) if match: return [NotificationContact('email', email) for email in match.groups()] return None
python
{ "resource": "" }
q35947
BaseResource.get_property
train
def get_property(self, name): """Return a named property for a resource, if available. Will raise an `AttributeError` if the property does not exist Args: name (str): Name of the property to return Returns: `ResourceProperty` """ for prop in self.resource.properties: if prop.name == name: return prop raise AttributeError(name)
python
{ "resource": "" }
q35948
BaseResource.set_property
train
def set_property(self, name, value, update_session=True): """Create or set the value of a property. Returns `True` if the property was created or updated, or `False` if there were no changes to the value of the property. Args: name (str): Name of the property to create or update value (any): Value of the property. This can be any type of JSON serializable data update_session (bool): Automatically add the change to the SQLAlchemy session. Default: True Returns: `bool` """ if type(value) == datetime: value = value.isoformat() else: value = value try: prop = self.get_property(name) if prop.value == value: return False prop.value = value except AttributeError: prop = ResourceProperty() prop.resource_id = self.id prop.name = name prop.value = value if update_session: db.session.add(prop) return True
python
{ "resource": "" }
q35949
BaseResource.get_tag
train
def get_tag(self, key, *, case_sensitive=True): """Return a tag by key, if found Args: key (str): Name/key of the tag to locate case_sensitive (bool): Should tag keys be treated case-sensitive (default: true) Returns: `Tag`,`None` """ key = key if case_sensitive else key.lower() for tag in self.resource.tags: if not case_sensitive: if tag.key.lower() == key: return tag elif key == tag.key: return tag return None
python
{ "resource": "" }
q35950
BaseResource.set_tag
train
def set_tag(self, key, value, update_session=True): """Create or set the value of the tag with `key` to `value`. Returns `True` if the tag was created or updated or `False` if there were no changes to be made. Args: key (str): Key of the tag value (str): Value of the tag update_session (bool): Automatically add the change to the SQLAlchemy session. Default: True Returns: `bool` """ existing_tags = {x.key: x for x in self.tags} if key in existing_tags: tag = existing_tags[key] if tag.value == value: return False tag.value = value else: tag = Tag() tag.resource_id = self.id tag.key = key tag.value = value self.tags.append(tag) if update_session: db.session.add(tag) return True
python
{ "resource": "" }
q35951
BaseResource.delete_tag
train
def delete_tag(self, key, update_session=True): """Removes a tag from a resource based on the tag key. Returns `True` if the tag was removed or `False` if the tag didn't exist Args: key (str): Key of the tag to delete update_session (bool): Automatically add the change to the SQLAlchemy session. Default: True Returns: """ existing_tags = {x.key: x for x in self.tags} if key in existing_tags: if update_session: db.session.delete(existing_tags[key]) self.tags.remove(existing_tags[key]) return True return False
python
{ "resource": "" }
q35952
BaseResource.save
train
def save(self, *, auto_commit=False): """Save the resource to the database Args: auto_commit (bool): Automatically commit the transaction. Default: `False` Returns: `None` """ try: db.session.add(self.resource) if auto_commit: db.session.commit() except SQLAlchemyError as ex: self.log.exception('Failed updating resource: {}'.format(ex)) db.session.rollback()
python
{ "resource": "" }
q35953
BaseResource.delete
train
def delete(self, *, auto_commit=False): """Removes a resource from the database Args: auto_commit (bool): Automatically commit the transaction. Default: `False` Returns: `None` """ try: db.session.delete(self.resource) if auto_commit: db.session.commit() except SQLAlchemyError: self.log.exception('Failed deleting resource: {}'.format(self.id)) db.session.rollback()
python
{ "resource": "" }
q35954
BaseResource.to_json
train
def to_json(self): """Return a `dict` representation of the resource, including all properties and tags Returns: `dict` """ return { 'resourceType': self.resource.resource_type_id, 'resourceId': self.id, 'accountId': self.resource.account_id, 'account': self.account, 'location': self.resource.location, 'properties': {to_camelcase(prop.name): prop.value for prop in self.resource.properties}, 'tags': [{'key': t.key, 'value': t.value} for t in self.resource.tags] }
python
{ "resource": "" }
q35955
EC2Instance.volumes
train
def volumes(self): """Returns a list of the volumes attached to the instance Returns: `list` of `EBSVolume` """ return [ EBSVolume(res) for res in db.Resource.join( ResourceProperty, Resource.resource_id == ResourceProperty.resource_id ).filter( Resource.resource_type_id == ResourceType.get('aws_ebs_volume').resource_type_id, ResourceProperty.name == 'attachments', func.JSON_CONTAINS(ResourceProperty.value, func.JSON_QUOTE(self.id)) ).all() ]
python
{ "resource": "" }
q35956
EC2Instance.get_name_or_instance_id
train
def get_name_or_instance_id(self, with_id=False): """Returns the name of an instance if existant, else return the instance id Args: with_id (bool): Include the instance ID even if the name is found (default: False) Returns: Name and/or instance ID of the instance object """ name = self.get_tag('Name', case_sensitive=False) if name and len(name.value.strip()) > 0: return '{0} ({1})'.format(name.value, self.id) if with_id else name.value return self.id
python
{ "resource": "" }
q35957
EC2Instance.search_by_age
train
def search_by_age(cls, *, limit=100, page=1, accounts=None, locations=None, age=720, properties=None, include_disabled=False): """Search for resources based on the provided filters Args: limit (`int`): Number of results to return. Default: 100 page (`int`): Pagination offset for results. Default: 1 accounts (`list` of `int`): A list of account id's to limit the returned resources to locations (`list` of `str`): A list of locations as strings to limit the search for age (`int`): Age of instances older than `age` days to return properties (`dict`): A `dict` containing property name and value pairs. Values can be either a str or a list of strings, in which case a boolean OR search is performed on the values include_disabled (`bool`): Include resources from disabled accounts. Default: False Returns: `list` of `Resource` """ qry = cls.search( limit=limit, page=page, accounts=accounts, locations=locations, properties=properties, include_disabled=include_disabled, return_query=True ) age_alias = aliased(ResourceProperty) qry = ( qry.join(age_alias, Resource.resource_id == age_alias.resource_id) .filter( age_alias.name == 'launch_date', cast(func.JSON_UNQUOTE(age_alias.value), DATETIME) < datetime.now() - timedelta(days=age) ) ) total = qry.count() qry = qry.limit(limit) qry = qry.offset((page - 1) * limit if page > 1 else 0) return total, [cls(x) for x in qry.all()]
python
{ "resource": "" }
q35958
EC2Instance.to_json
train
def to_json(self, with_volumes=True): """Augment the base `to_json` function, adding information about volumes Returns: `dict` """ data = super().to_json() if with_volumes: data['volumes'] = [ { 'volumeId': vol.id, 'volumeType': vol.volume_type, 'size': vol.size } for vol in self.volumes ] return data
python
{ "resource": "" }
q35959
DNSZone.delete_record
train
def delete_record(self, record): """Remove a DNSRecord Args: record (:obj:`DNSRecord`): :obj:`DNSRecord` to remove Returns: `None` """ self.children.remove(record.resource) record.delete()
python
{ "resource": "" }
q35960
ConfigList.get
train
def get(self): """List existing config namespaces and their items""" namespaces = db.ConfigNamespace.order_by( ConfigNamespace.sort_order, ConfigNamespace.name ).all() return self.make_response({ 'message': None, 'namespaces': namespaces }, HTTP.OK)
python
{ "resource": "" }
q35961
ConfigList.post
train
def post(self): """Create a new config item""" self.reqparse.add_argument('namespacePrefix', type=str, required=True) self.reqparse.add_argument('description', type=str, required=True) self.reqparse.add_argument('key', type=str, required=True) self.reqparse.add_argument('value', required=True) self.reqparse.add_argument('type', type=str, required=True) args = self.reqparse.parse_args() if not self.dbconfig.namespace_exists(args['namespacePrefix']): return self.make_response('The namespace doesnt exist', HTTP.NOT_FOUND) if self.dbconfig.key_exists(args['namespacePrefix'], args['key']): return self.make_response('This config item already exists', HTTP.CONFLICT) self.dbconfig.set(args['namespacePrefix'], args['key'], _to_dbc_class(args), description=args['description']) auditlog(event='configItem.create', actor=session['user'].username, data=args) return self.make_response('Config item added', HTTP.CREATED)
python
{ "resource": "" }
q35962
ConfigGet.get
train
def get(self, namespace, key): """Get a specific configuration item""" cfg = self.dbconfig.get(key, namespace, as_object=True) return self.make_response({ 'message': None, 'config': cfg })
python
{ "resource": "" }
q35963
ConfigGet.put
train
def put(self, namespace, key): """Update a single configuration item""" args = request.json if not self.dbconfig.key_exists(namespace, key): return self.make_response('No such config entry: {}/{}'.format(namespace, key), HTTP.BAD_REQUEST) if (args['type'] == 'choice' and not args['value']['min_items'] <= len(args['value']['enabled']) <= args['value']['max_items']): return self.make_response( 'You should select {} {}item{}'.format( args['value']['min_items'], '' if args['value']['min_items'] == args['value']['max_items'] else 'to {} '.format( args['value']['max_items'] ), 's' if args['value']['max_items'] > 1 else '' ), HTTP.BAD_REQUEST ) if args['type'] == 'choice' and not set(args['value']['enabled']).issubset(args['value']['available']): return self.make_response('Invalid item', HTTP.BAD_REQUEST) item = db.ConfigItem.find_one( ConfigItem.namespace_prefix == namespace, ConfigItem.key == key ) if item.value != args['value']: item.value = args['value'] if item.type != args['type']: item.type = args['type'] if item.description != args['description']: item.description = args['description'] self.dbconfig.set(namespace, key, _to_dbc_class(args)) auditlog(event='configItem.update', actor=session['user'].username, data=args) return self.make_response('Config entry updated')
python
{ "resource": "" }
q35964
ConfigGet.delete
train
def delete(self, namespace, key): """Delete a specific configuration item""" if not self.dbconfig.key_exists(namespace, key): return self.make_response('No such config entry exists: {}/{}'.format(namespace, key), HTTP.BAD_REQUEST) self.dbconfig.delete(namespace, key) auditlog(event='configItem.delete', actor=session['user'].username, data={'namespace': namespace, 'key': key}) return self.make_response('Config entry deleted')
python
{ "resource": "" }
q35965
NamespaceGet.get
train
def get(self, namespacePrefix): """Get a specific configuration namespace""" ns = db.ConfigNamespace.find_one(ConfigNamespace.namespace_prefix == namespacePrefix) if not ns: return self.make_response('No such namespace: {}'.format(namespacePrefix), HTTP.NOT_FOUND) return self.make_response({ 'message': None, 'namespace': ns })
python
{ "resource": "" }
q35966
NamespaceGet.put
train
def put(self, namespacePrefix): """Update a specific configuration namespace""" self.reqparse.add_argument('name', type=str, required=True) self.reqparse.add_argument('sortOrder', type=int, required=True) args = self.reqparse.parse_args() ns = db.ConfigNamespace.find_one(ConfigNamespace.namespace_prefix == namespacePrefix) if not ns: return self.make_response('No such namespace: {}'.format(namespacePrefix), HTTP.NOT_FOUND) ns.name = args['name'] ns.sort_order = args['sortOrder'] db.session.add(ns) db.session.commit() self.dbconfig.reload_data() auditlog(event='configNamespace.update', actor=session['user'].username, data=args) return self.make_response('Namespace updated')
python
{ "resource": "" }
q35967
NamespaceGet.delete
train
def delete(self, namespacePrefix): """Delete a specific configuration namespace""" ns = db.ConfigNamespace.find_one(ConfigNamespace.namespace_prefix == namespacePrefix) if not ns: return self.make_response('No such namespace: {}'.format(namespacePrefix), HTTP.NOT_FOUND) db.session.delete(ns) db.session.commit() self.dbconfig.reload_data() auditlog( event='configNamespace.delete', actor=session['user'].username, data={'namespacePrefix': namespacePrefix} ) return self.make_response('Namespace deleted')
python
{ "resource": "" }
q35968
Namespaces.post
train
def post(self): """Create a new configuration namespace""" self.reqparse.add_argument('namespacePrefix', type=str, required=True) self.reqparse.add_argument('name', type=str, required=True) self.reqparse.add_argument('sortOrder', type=int, required=True) args = self.reqparse.parse_args() if self.dbconfig.namespace_exists(args['namespacePrefix']): return self.make_response('Namespace {} already exists'.format(args['namespacePrefix']), HTTP.CONFLICT) ns = ConfigNamespace() ns.namespace_prefix = args['namespacePrefix'] ns.name = args['name'] ns.sort_order = args['sortOrder'] db.session.add(ns) db.session.commit() self.dbconfig.reload_data() auditlog(event='configNamespace.create', actor=session['user'].username, data=args) return self.make_response('Namespace created', HTTP.CREATED)
python
{ "resource": "" }
q35969
_get_syslog_format
train
def _get_syslog_format(event_type): """Take an event type argument and return a python logging format In order to properly format the syslog messages to current standard, load the template and perform necessary replacements and return the string. Args: event_type (str): Event type name Returns: `str` """ syslog_format_template = get_template('syslog_format.json') fmt = syslog_format_template.render( event_type=event_type, host=dbconfig.get('instance_name', default='local') ) # Load and redump string, to get rid of any extraneous whitespaces return json.dumps(json.loads(fmt))
python
{ "resource": "" }
q35970
setup_logging
train
def setup_logging(): """Utility function to setup the logging systems based on the `logging.json` configuration file""" config = json.load(open(os.path.join(config_path, 'logging.json'))) # If syslogging is disabled, set the pipeline handler to NullHandler if dbconfig.get('enable_syslog_forwarding', NS_LOG, False): try: config['formatters']['syslog'] = { 'format': _get_syslog_format('cloud-inquisitor-logs') } config['handlers']['syslog'] = { 'class': 'cloud_inquisitor.log.SyslogPipelineHandler', 'formatter': 'syslog', 'filters': ['standard'] } config['loggers']['cloud_inquisitor']['handlers'].append('syslog') # Configure the audit log handler audit_handler = SyslogPipelineHandler() audit_handler.setFormatter(logging.Formatter(_get_syslog_format('cloud-inquisitor-audit'))) audit_handler.setLevel(logging.DEBUG) _AUDIT_LOGGER.addHandler(audit_handler) _AUDIT_LOGGER.propagate = False except Exception as ex: print('An error occured while configuring the syslogger: {}'.format(ex)) logging.config.dictConfig(config)
python
{ "resource": "" }
q35971
DBLogger.emit
train
def emit(self, record): """Persist a record into the database Args: record (`logging.Record`): The logging.Record object to store Returns: `None` """ # Skip records less than min_level if record.levelno < logging.getLevelName(self.min_level): return evt = LogEvent() evt.level = record.levelname evt.levelno = record.levelno evt.timestamp = datetime.fromtimestamp(record.created) evt.message = record.message evt.filename = record.filename evt.lineno = record.lineno evt.module = record.module evt.funcname = record.funcName evt.pathname = record.pathname evt.process_id = record.process # Only log stacktraces if its the level is ERROR or higher if record.levelno >= 40: evt.stacktrace = traceback.format_exc() try: db.session.add(evt) db.session.commit() except Exception: db.session.rollback()
python
{ "resource": "" }
q35972
ConfigItem.get
train
def get(cls, ns, key): """Fetch an item by namespace and key Args: ns (str): Namespace prefix key (str): Item key Returns: :obj:`Configitem`: Returns config item object if found, else `None` """ return getattr(db, cls.__name__).find_one( ConfigItem.namespace_prefix == ns, ConfigItem.key == key )
python
{ "resource": "" }
q35973
User.add_role
train
def add_role(user, roles): """Map roles for user in database Args: user (User): User to add roles to roles ([Role]): List of roles to add Returns: None """ def _add_role(role): user_role = UserRole() user_role.user_id = user.user_id user_role.role_id = role.role_id db.session.add(user_role) db.session.commit() [_add_role(role) for role in roles]
python
{ "resource": "" }
q35974
DBConfig.reload_data
train
def reload_data(self): """Reloads the configuration from the database Returns: `None` """ # We must force a rollback here to ensure that we are working on a fresh session, without any cache db.session.rollback() self.__data = {} try: for ns in db.ConfigNamespace.all(): self.__data[ns.namespace_prefix] = {x.key: x.value for x in ns.config_items} except SQLAlchemyError as ex: if str(ex).find('1146') != -1: pass
python
{ "resource": "" }
q35975
DBConfig.key_exists
train
def key_exists(self, namespace, key): """Checks a namespace for the existence of a specific key Args: namespace (str): Namespace to check in key (str): Name of the key to check for Returns: `True` if key exists in the namespace, else `False` """ return namespace in self.__data and key in self.__data[namespace]
python
{ "resource": "" }
q35976
DBConfig.delete
train
def delete(self, namespace, key): """Remove a configuration item from the database Args: namespace (`str`): Namespace of the config item key (`str`): Key to delete Returns: `None` """ if self.key_exists(namespace, key): obj = db.ConfigItem.find_one( ConfigItem.namespace_prefix == namespace, ConfigItem.key == key ) del self.__data[namespace][key] db.session.delete(obj) db.session.commit() else: raise KeyError('{}/{}'.format(namespace, key))
python
{ "resource": "" }
q35977
RoleList.post
train
def post(self): """Create a new role""" self.reqparse.add_argument('name', type=str, required=True) self.reqparse.add_argument('color', type=str, required=True) args = self.reqparse.parse_args() role = Role() role.name = args['name'] role.color = args['color'] db.session.add(role) db.session.commit() auditlog(event='role.create', actor=session['user'].username, data=args) return self.make_response('Role {} has been created'.format(role.role_id), HTTP.CREATED)
python
{ "resource": "" }
q35978
RoleGet.get
train
def get(self, roleId): """Get a specific role information""" role = db.Role.find_one(Role.role_id == roleId) if not role: return self.make_response('No such role found', HTTP.NOT_FOUND) return self.make_response({'role': role})
python
{ "resource": "" }
q35979
RoleGet.delete
train
def delete(self, roleId): """Delete a user role""" role = db.Role.find_one(Role.role_id == roleId) if not role: return self.make_response('No such role found', HTTP.NOT_FOUND) if role.name in ('User', 'Admin'): return self.make_response('Cannot delete the built-in roles', HTTP.BAD_REQUEST) db.session.delete(role) db.session.commit() auditlog(event='role.delete', actor=session['user'].username, data={'roleId': roleId}) return self.make_response({ 'message': 'Role has been deleted', 'roleId': roleId })
python
{ "resource": "" }
q35980
EmailNotifier.__send_ses_email
train
def __send_ses_email(self, recipients, subject, body_html, body_text): """Send an email using SES Args: recipients (`1ist` of `str`): List of recipient email addresses subject (str): Subject of the email body_html (str): HTML body of the email body_text (str): Text body of the email Returns: `None` """ source_arn = dbconfig.get('source_arn', NS_EMAIL) return_arn = dbconfig.get('return_path_arn', NS_EMAIL) session = get_local_aws_session() ses = session.client('ses', region_name=dbconfig.get('ses_region', NS_EMAIL, 'us-west-2')) body = {} if body_html: body['Html'] = { 'Data': body_html } if body_text: body['Text'] = { 'Data': body_text } ses_options = { 'Source': self.sender, 'Destination': { 'ToAddresses': recipients }, 'Message': { 'Subject': { 'Data': subject }, 'Body': body } } # Set SES options if needed if source_arn and return_arn: ses_options.update({ 'SourceArn': source_arn, 'ReturnPathArn': return_arn }) ses.send_email(**ses_options)
python
{ "resource": "" }
q35981
EmailNotifier.__send_smtp_email
train
def __send_smtp_email(self, recipients, subject, html_body, text_body): """Send an email using SMTP Args: recipients (`list` of `str`): List of recipient email addresses subject (str): Subject of the email html_body (str): HTML body of the email text_body (str): Text body of the email Returns: `None` """ smtp = smtplib.SMTP( dbconfig.get('smtp_server', NS_EMAIL, 'localhost'), dbconfig.get('smtp_port', NS_EMAIL, 25) ) source_arn = dbconfig.get('source_arn', NS_EMAIL) return_arn = dbconfig.get('return_path_arn', NS_EMAIL) from_arn = dbconfig.get('from_arn', NS_EMAIL) msg = MIMEMultipart('alternative') # Set SES options if needed if source_arn and from_arn and return_arn: msg['X-SES-SOURCE-ARN'] = source_arn msg['X-SES-FROM-ARN'] = from_arn msg['X-SES-RETURN-PATH-ARN'] = return_arn msg['Subject'] = subject msg['To'] = ','.join(recipients) msg['From'] = self.sender # Check body types to avoid exceptions if html_body: html_part = MIMEText(html_body, 'html') msg.attach(html_part) if text_body: text_part = MIMEText(text_body, 'plain') msg.attach(text_part) # TLS if needed if dbconfig.get('smtp_tls', NS_EMAIL, False): smtp.starttls() # Login if needed username = dbconfig.get('smtp_username', NS_EMAIL) password = dbconfig.get('smtp_password', NS_EMAIL) if username and password: smtp.login(username, password) smtp.sendmail(self.sender, recipients, msg.as_string()) smtp.quit()
python
{ "resource": "" }
q35982
InquisitorJSONEncoder.default
train
def default(self, obj): """Default object encoder function Args: obj (:obj:`Any`): Object to be serialized Returns: JSON string """ if isinstance(obj, datetime): return obj.isoformat() if issubclass(obj.__class__, Enum.__class__): return obj.value to_json = getattr(obj, 'to_json', None) if to_json: out = obj.to_json() if issubclass(obj.__class__, Model): out.update({'__type': obj.__class__.__name__}) return out return JSONEncoder.default(self, obj)
python
{ "resource": "" }
q35983
is_truthy
train
def is_truthy(value, default=False): """Evaluate a value for truthiness >>> is_truthy('Yes') True >>> is_truthy('False') False >>> is_truthy(1) True Args: value (Any): Value to evaluate default (bool): Optional default value, if the input does not match the true or false values Returns: True if a truthy value is passed, else False """ if value is None: return False if isinstance(value, bool): return value if isinstance(value, int): return value > 0 trues = ('1', 'true', 'y', 'yes', 'ok') falses = ('', '0', 'false', 'n', 'none', 'no') if value.lower().strip() in falses: return False elif value.lower().strip() in trues: return True else: if default: return default else: raise ValueError('Invalid argument given to truthy: {0}'.format(value))
python
{ "resource": "" }
q35984
validate_email
train
def validate_email(email, partial_match=False): """Perform email address validation >>> validate_email('akjaer@riotgames.com') True >>> validate_email('Asbjorn Kjaer <akjaer@riotgames.com') False >>> validate_email('Asbjorn Kjaer <akjaer@riotgames.com', partial_match=True) True Args: email (str): Email address to match partial_match (bool): If False (default), the entire string must be a valid email address. If true, any valid email address in the string will trigger a valid response Returns: True if the value contains an email address, else False """ rgx = re.compile(RGX_EMAIL_VALIDATION_PATTERN, re.I) if partial_match: return rgx.search(email) is not None else: return rgx.match(email) is not None
python
{ "resource": "" }
q35985
get_template
train
def get_template(template): """Return a Jinja2 template by filename Args: template (str): Name of the template to return Returns: A Jinja2 Template object """ from cloud_inquisitor.database import db tmpl = db.Template.find_one(template_name=template) if not tmpl: raise InquisitorError('No such template found: {}'.format(template)) tmplenv = Environment(loader=BaseLoader, autoescape=True) tmplenv.filters['json_loads'] = json.loads tmplenv.filters['slack_quote_join'] = lambda data: ', '.join('`{}`'.format(x) for x in data) return tmplenv.from_string(tmpl.template)
python
{ "resource": "" }
q35986
to_utc_date
train
def to_utc_date(date): """Convert a datetime object from local to UTC format >>> import datetime >>> d = datetime.datetime(2017, 8, 15, 18, 24, 31) >>> to_utc_date(d) datetime.datetime(2017, 8, 16, 1, 24, 31) Args: date (`datetime`): Input datetime object Returns: `datetime` """ return datetime.utcfromtimestamp(float(date.strftime('%s'))).replace(tzinfo=None) if date else None
python
{ "resource": "" }
q35987
generate_password
train
def generate_password(length=32): """Generate a cryptographically secure random string to use for passwords Args: length (int): Length of password, defaults to 32 characters Returns: Randomly generated string """ return ''.join(random.SystemRandom().choice(string.ascii_letters + '!@#$+.,') for _ in range(length))
python
{ "resource": "" }
q35988
get_jwt_key_data
train
def get_jwt_key_data(): """Returns the data for the JWT private key used for encrypting the user login token as a string object Returns: `str` """ global __jwt_data if __jwt_data: return __jwt_data from cloud_inquisitor import config_path from cloud_inquisitor.config import dbconfig jwt_key_file = dbconfig.get('jwt_key_file_path', default='ssl/private.key') if not os.path.isabs(jwt_key_file): jwt_key_file = os.path.join(config_path, jwt_key_file) with open(os.path.join(jwt_key_file), 'r') as f: __jwt_data = f.read() return __jwt_data
python
{ "resource": "" }
q35989
has_access
train
def has_access(user, required_roles, match_all=True): """Check if the user meets the role requirements. If mode is set to AND, all the provided roles must apply Args: user (:obj:`User`): User object required_roles (`list` of `str`): List of roles that the user must have applied match_all (`bool`): If true, all the required_roles must be applied to the user, else any one match will return `True` Returns: `bool` """ # Admins have access to everything if ROLE_ADMIN in user.roles: return True if isinstance(required_roles, str): if required_roles in user.roles: return True return False # If we received a list of roles to match against if match_all: for role in required_roles: if role not in user.roles: return False return True else: for role in required_roles: if role in user.roles: return True return False
python
{ "resource": "" }
q35990
merge_lists
train
def merge_lists(*args): """Merge an arbitrary number of lists into a single list and dedupe it Args: *args: Two or more lists Returns: A deduped merged list of all the provided lists as a single list """ out = {} for contacts in filter(None, args): for contact in contacts: out[contact.value] = contact return list(out.values())
python
{ "resource": "" }
q35991
get_resource_id
train
def get_resource_id(prefix, *data): """Returns a unique ID based on the SHA256 hash of the provided data. The input data is flattened and sorted to ensure identical hashes are generated regardless of the order of the input. Values must be of types `str`, `int` or `float`, any other input type will raise a `ValueError` >>> get_resource_id('ec2', 'lots', 'of', 'data') 'ec2-1d21940125214123' >>> get_resource_id('ecs', 'foo', ['more', 'data', 'here', 2, 3]) 'ecs-e536b036ea6fd463' >>> get_resource_id('ecs', ['more'], 'data', 'here', [[2], 3], 'foo') 'ecs-e536b036ea6fd463' Args: prefix (`str`): Key prefix *data (`str`, `int`, `float`, `list`, `tuple`): Data used to generate a unique ID Returns: `str` """ parts = flatten(data) for part in parts: if type(part) not in (str, int, float): raise ValueError('Supported data types: int, float, list, tuple, str. Got: {}'.format(type(part))) return '{}-{}'.format( prefix, get_hash('-'.join(sorted(map(str, parts))))[-16:] )
python
{ "resource": "" }
q35992
parse_date
train
def parse_date(date_string, ignoretz=True): """Parse a string as a date. If the string fails to parse, `None` will be returned instead >>> parse_date('2017-08-15T18:24:31') datetime.datetime(2017, 8, 15, 18, 24, 31) Args: date_string (`str`): Date in string format to parse ignoretz (`bool`): If set ``True``, ignore time zones and return a naive :class:`datetime` object. Returns: `datetime`, `None` """ try: return parser.parse(date_string, ignoretz=ignoretz) except TypeError: return None
python
{ "resource": "" }
q35993
get_user_data_configuration
train
def get_user_data_configuration(): """Retrieve and update the application configuration with information from the user-data Returns: `None` """ from cloud_inquisitor import get_local_aws_session, app_config kms_region = app_config.kms_region session = get_local_aws_session() if session.get_credentials().method == 'iam-role': kms = session.client('kms', region_name=kms_region) else: sts = session.client('sts') audit_role = sts.assume_role(RoleArn=app_config.aws_api.instance_role_arn, RoleSessionName='cloud_inquisitor') kms = boto3.session.Session( audit_role['Credentials']['AccessKeyId'], audit_role['Credentials']['SecretAccessKey'], audit_role['Credentials']['SessionToken'], ).client('kms', region_name=kms_region) user_data_url = app_config.user_data_url res = requests.get(user_data_url) if res.status_code == 200: data = kms.decrypt(CiphertextBlob=b64decode(res.content)) kms_config = json.loads(zlib.decompress(data['Plaintext']).decode('utf-8')) app_config.database_uri = kms_config['db_uri'] else: raise RuntimeError('Failed loading user-data, cannot continue: {}: {}'.format(res.status_code, res.content))
python
{ "resource": "" }
q35994
flatten
train
def flatten(data): """Returns a flattened version of a list. Courtesy of https://stackoverflow.com/a/12472564 Args: data (`tuple` or `list`): Input data Returns: `list` """ if not data: return data if type(data[0]) in (list, tuple): return list(flatten(data[0])) + list(flatten(data[1:])) return list(data[:1]) + list(flatten(data[1:]))
python
{ "resource": "" }
q35995
diff
train
def diff(a, b): """Return the difference between two strings Will return a human-readable difference between two strings. See https://docs.python.org/3/library/difflib.html#difflib.Differ for more information about the output format Args: a (str): Original string b (str): New string Returns: `str` """ return ''.join( Differ().compare( a.splitlines(keepends=True), b.splitlines(keepends=True) ) )
python
{ "resource": "" }
q35996
build
train
def build(bucket_name, version, force, verbose): """Build and upload a new tarball Args: bucket_name (str): Name of the bucket to upload to version (str): Override build version. Defaults to using SCM based versioning (git tags) force (bool): Overwrite existing files in S3, if present verbose (bool): Verbose output """ if verbose: log.setLevel('DEBUG') if not version: version = setuptools_scm.get_version() release = "dev" if "dev" in version else "release" tarball = TARBALL_FORMAT.format(version) tarball_path = os.path.join(tempfile.gettempdir(), tarball) s3_key = os.path.join(release, tarball) try: run('npm i') run('./node_modules/.bin/gulp build.prod') except ExecutionError: log.exception('Failed executing command') return log.debug('Creating archive') tar = tarfile.open(tarball_path, "w:gz") for root, dirnames, filenames in os.walk('dist'): for f in filenames: tar.add(os.path.join(root, f), recursive=False, filter=strip_path) tar.close() log.debug('Uploading {} to s3://{}/{}'.format(tarball, bucket_name, s3_key)) try: bucket = get_bucket_resource(bucket_name) if s3_file_exists(bucket, s3_key) and not force: log.error('File already exists in S3, use --force to overwrite') return bucket.upload_file(tarball_path, os.path.join(release, tarball)) except ClientError: log.exception('AWS API failure')
python
{ "resource": "" }
q35997
BaseIssue.get
train
def get(cls, issue_id): """Returns the class object identified by `issue_id` Args: issue_id (str): Unique EC2 Instance ID to load from database Returns: EC2 Instance object if found, else None """ res = Issue.get(issue_id, IssueType.get(cls.issue_type).issue_type_id) return cls(res) if res else None
python
{ "resource": "" }
q35998
BaseIssue.create
train
def create(cls, issue_id, *, properties=None, auto_commit=False): """Creates a new Issue object with the properties and tags provided Attributes: issue_id (str): Unique identifier for the issue object account (:obj:`Account`): Account which owns the issue properties (dict): Dictionary of properties for the issue object. """ if cls.get(issue_id): raise IssueException('Issue {} already exists'.format(issue_id)) res = Issue() res.issue_id = issue_id res.issue_type_id = IssueType.get(cls.issue_type).issue_type_id if properties: for name, value in properties.items(): prop = IssueProperty() prop.issue_id = res.issue_id prop.name = name prop.value = value.isoformat() if type(value) == datetime else value res.properties.append(prop) db.session.add(prop) db.session.add(res) if auto_commit: db.session.commit() return cls.get(res.issue_id)
python
{ "resource": "" }
q35999
BaseIssue.get_all
train
def get_all(cls): """Returns a list of all issues of a given type Returns: list of issue objects """ issues = db.Issue.find( Issue.issue_type_id == IssueType.get(cls.issue_type).issue_type_id ) return {res.issue_id: cls(res) for res in issues}
python
{ "resource": "" }