idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
50,000
def fold_columns_to_rows ( df , levels_from = 2 ) : df = df . copy ( ) df . reset_index ( inplace = True , drop = True ) df = df . T a = [ list ( set ( df . index . get_level_values ( i ) ) ) for i in range ( 0 , levels_from ) ] combinations = list ( itertools . product ( * a ) ) names = df . index . names [ : levels_f...
Take a levels from the columns and fold down into the row index . This destroys the existing index ; existing rows will appear as columns under the new column index
50,001
def args ( self , args ) : self . _args = args self . _logger . log ( 'debug' , 'Args set to {}' . format ( args ) )
Set additional arguments to be passed to the fitness function
50,002
def minimize ( self , minimize ) : self . _minimize = minimize self . _logger . log ( 'debug' , 'Minimize set to {}' . format ( minimize ) )
Configures the ABC to minimize fitness function return value or derived score
50,003
def num_employers ( self , num_employers ) : if num_employers < 2 : self . _logger . log ( 'warn' , 'Two employers are needed: setting to two' ) num_employers = 2 self . _num_employers = num_employers self . _logger . log ( 'debug' , 'Number of employers set to {}' . format ( num_employers ) ) self . _limit = num_emplo...
Sets the number of employer bees ; at least two are required
50,004
def processes ( self , processes ) : if self . _processes > 1 : self . _pool . close ( ) self . _pool . join ( ) self . _pool = multiprocessing . Pool ( processes ) else : self . _pool = None self . _logger . log ( 'debug' , 'Number of processes set to {}' . format ( processes ) )
Set the number of concurrent processes the ABC will utilize for fitness function evaluation ; if < = 1 single process is used
50,005
def infer_process_count ( self ) : try : self . processes = multiprocessing . cpu_count ( ) except NotImplementedError : self . _logger . log ( 'error' , 'Could infer CPU count, setting number of processes back to 4' ) self . processes = 4
Infers the number of CPU cores in the current system sets the number of concurrent processes accordingly
50,006
def create_employers ( self ) : self . __verify_ready ( True ) employers = [ ] for i in range ( self . _num_employers ) : employer = EmployerBee ( self . __gen_random_values ( ) ) if self . _processes <= 1 : employer . error = self . _fitness_fxn ( employer . values , ** self . _args ) employer . score = employer . get...
Generate employer bees . This should be called directly after the ABC is initialized .
50,007
def run_iteration ( self ) : self . _employer_phase ( ) self . _calc_probability ( ) self . _onlooker_phase ( ) self . _check_positions ( )
Runs a single iteration of the ABC ; employer phase - > probability calculation - > onlooker phase - > check positions
50,008
def _calc_probability ( self ) : self . _logger . log ( 'debug' , 'Calculating bee probabilities' ) self . __verify_ready ( ) self . _total_score = 0 for employer in self . _employers : self . _total_score += employer . score if self . __update ( employer . score , employer . values , employer . error ) : self . _logge...
Determines the probability that each bee will be chosen during the onlooker phase ; also determines if a new best - performing bee is found
50,009
def _merge_bee ( self , bee ) : random_dimension = randint ( 0 , len ( self . _value_ranges ) - 1 ) second_bee = randint ( 0 , self . _num_employers - 1 ) while ( bee . id == self . _employers [ second_bee ] . id ) : second_bee = randint ( 0 , self . _num_employers - 1 ) new_bee = deepcopy ( bee ) new_bee . values [ ra...
Shifts a random value for a supplied bee with in accordance with another random bee s value
50,010
def _move_bee ( self , bee , new_values ) : score = np . nan_to_num ( new_values [ 0 ] ) if bee . score > score : bee . failed_trials += 1 else : bee . values = new_values [ 1 ] bee . score = score bee . error = new_values [ 2 ] bee . failed_trials = 0 self . _logger . log ( 'debug' , 'Bee assigned to new merged positi...
Moves a bee to a new position if new fitness score is better than the bee s current fitness score
50,011
def __update ( self , score , values , error ) : if self . _minimize : if self . _best_score is None or score > self . _best_score : self . _best_score = score self . _best_values = values . copy ( ) self . _best_error = error self . _logger . log ( 'debug' , 'New best food source memorized: {}' . format ( self . _best...
Update the best score and values if the given score is better than the current best score
50,012
def __gen_random_values ( self ) : values = [ ] if self . _value_ranges is None : self . _logger . log ( 'crit' , 'Must set the type/range of possible values' ) raise RuntimeError ( "Must set the type/range of possible values" ) else : for t in self . _value_ranges : if t [ 0 ] == 'int' : values . append ( randint ( t ...
Generate random values based on supplied value ranges
50,013
def __verify_ready ( self , creating = False ) : if len ( self . _value_ranges ) == 0 : self . _logger . log ( 'crit' , 'Attribute value_ranges must have at least one value' ) raise RuntimeWarning ( 'Attribute value_ranges must have at least one value' ) if len ( self . _employers ) == 0 and creating is False : self . ...
Some cleanup ensures that everything is set up properly to avoid random errors during execution
50,014
def import_settings ( self , filename ) : if not os . path . isfile ( filename ) : self . _logger . log ( 'error' , 'File: {} not found, continuing with default settings' . format ( filename ) ) else : with open ( filename , 'r' ) as jsonFile : data = json . load ( jsonFile ) self . _value_ranges = data [ 'valueRanges'...
Import settings from a JSON file
50,015
def save_settings ( self , filename ) : data = dict ( ) data [ 'valueRanges' ] = self . _value_ranges data [ 'best_values' ] = [ str ( value ) for value in self . _best_values ] data [ 'minimize' ] = self . _minimize data [ 'num_employers' ] = self . _num_employers data [ 'best_score' ] = str ( self . _best_score ) dat...
Save settings to a JSON file
50,016
def get_score ( self , error = None ) : if error is not None : self . error = error if self . error >= 0 : return 1 / ( self . error + 1 ) else : return 1 + abs ( self . error )
Calculate bee s fitness score given a value returned by the fitness function
50,017
def create_elb_dns ( self , regionspecific = False ) : if regionspecific : dns_elb = self . generated . dns ( ) [ 'elb_region' ] else : dns_elb = self . generated . dns ( ) [ 'elb' ] dns_elb_aws = find_elb ( name = self . app_name , env = self . env , region = self . region ) zone_ids = get_dns_zone_ids ( env = self . ...
Create dns entries in route53 .
50,018
def create_failover_dns ( self , primary_region = 'us-east-1' ) : dns_record = self . generated . dns ( ) [ 'global' ] zone_ids = get_dns_zone_ids ( env = self . env , facing = self . elb_subnet ) elb_dns_aws = find_elb ( name = self . app_name , env = self . env , region = self . region ) elb_dns_zone_id = find_elb_dn...
Create dns entries in route53 for multiregion failover setups .
50,019
def format_listeners ( elb_settings = None , env = 'dev' , region = 'us-east-1' ) : LOG . debug ( 'ELB settings:\n%s' , elb_settings ) credential = get_env_credential ( env = env ) account = credential [ 'accountId' ] listeners = [ ] if 'ports' in elb_settings : for listener in elb_settings [ 'ports' ] : cert_name = fo...
Format ELB Listeners into standard list .
50,020
def format_cert_name ( env = '' , account = '' , region = '' , certificate = None ) : cert_name = None if certificate : if certificate . startswith ( 'arn' ) : LOG . info ( "Full ARN provided...skipping lookup." ) cert_name = certificate else : generated_cert_name = generate_custom_cert_name ( env , region , account , ...
Format the SSL certificate name into ARN for ELB .
50,021
def generate_custom_cert_name ( env = '' , region = '' , account = '' , certificate = None ) : cert_name = None template_kwargs = { 'account' : account , 'name' : certificate } try : rendered_template = get_template ( template_file = 'infrastructure/iam/tlscert_naming.json.j2' , ** template_kwargs ) tlscert_dict = json...
Generate a custom TLS Cert name based on a template .
50,022
def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) log = logging . getLogger ( __name__ ) parser = argparse . ArgumentParser ( ) add_debug ( parser ) add_app ( parser ) add_env ( parser ) add_properties ( parser ) args = parser . parse_args ( ) logging . getLogger ( __package__ . split ( "." ) [ 0 ] ) . s...
Send Slack notification to a configured channel .
50,023
def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) parser = argparse . ArgumentParser ( description = main . __doc__ ) add_debug ( parser ) add_app ( parser ) args = parser . parse_args ( ) if args . debug == logging . DEBUG : logging . getLogger ( __package__ . split ( '.' ) [ 0 ] ) . setLevel ( args . d...
Attempt to fully destroy AWS Resources for a Spinnaker Application .
50,024
def check_provider_healthcheck ( settings , default_provider = 'Discovery' ) : ProviderHealthCheck = collections . namedtuple ( 'ProviderHealthCheck' , [ 'providers' , 'has_healthcheck' ] ) eureka_enabled = settings [ 'app' ] [ 'eureka_enabled' ] providers = settings [ 'asg' ] [ 'provider_healthcheck' ] LOG . debug ( '...
Set Provider Health Check when specified .
50,025
def get_template_name ( env , pipeline_type ) : pipeline_base = 'pipeline/pipeline' template_name_format = '{pipeline_base}' if env . startswith ( 'prod' ) : template_name_format = template_name_format + '_{env}' else : template_name_format = template_name_format + '_stages' if pipeline_type != 'ec2' : template_name_fo...
Generates the correct template name based on pipeline type
50,026
def ec2_pipeline_setup ( generated = None , project = '' , settings = None , env = '' , pipeline_type = '' , region = '' , region_subnets = None , ) : data = copy . deepcopy ( settings ) user_data = generate_encoded_user_data ( env = env , region = region , generated = generated , group_name = project , pipeline_type =...
Handles ec2 pipeline data setup
50,027
def create_pipeline ( self ) : pipelines = self . settings [ 'pipeline' ] [ 'pipeline_files' ] self . log . info ( 'Uploading manual Pipelines: %s' , pipelines ) lookup = FileLookup ( git_short = self . generated . gitlab ( ) [ 'main' ] , runway_dir = self . runway_dir ) for json_file in pipelines : json_dict = lookup ...
Use JSON files to create Pipelines .
50,028
def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) log = logging . getLogger ( __name__ ) parser = argparse . ArgumentParser ( ) add_debug ( parser ) add_app ( parser ) add_properties ( parser ) parser . add_argument ( '-b' , '--base' , help = 'Base AMI name to use, e.g. fedora, tomcat' ) parser . add_arg...
Creates a pipeline in Spinnaker
50,029
def convert_ini ( config_dict ) : config_lines = [ ] for env , configs in sorted ( config_dict . items ( ) ) : for resource , app_properties in sorted ( configs . items ( ) ) : try : for app_property , value in sorted ( app_properties . items ( ) ) : variable = '{env}_{resource}_{app_property}' . format ( env = env , r...
Convert _config_dict_ into a list of INI formatted strings .
50,030
def write_variables ( app_configs = None , out_file = '' , git_short = '' ) : generated = gogoutils . Generator ( * gogoutils . Parser ( git_short ) . parse_url ( ) , formats = APP_FORMATS ) json_configs = { } for env , configs in app_configs . items ( ) : if env != 'pipeline' : instance_profile = generated . iam ( ) [...
Append _application . json_ configs to _out_file_ . exports and . json .
50,031
def get_sns_subscriptions ( app_name , env , region ) : session = boto3 . Session ( profile_name = env , region_name = region ) sns_client = session . client ( 'sns' ) lambda_alias_arn = get_lambda_alias_arn ( app = app_name , account = env , region = region ) lambda_subscriptions = [ ] subscriptions = sns_client . lis...
List SNS lambda subscriptions .
50,032
def destroy_cloudwatch_log_event ( app = '' , env = 'dev' , region = '' ) : session = boto3 . Session ( profile_name = env , region_name = region ) cloudwatch_client = session . client ( 'logs' ) cloudwatch_client . delete_subscription_filter ( logGroupName = '/aws/lambda/awslimitchecker' , filterName = app ) return Tr...
Destroy Cloudwatch log event .
50,033
def get_accounts ( self , provider = 'aws' ) : url = '{gate}/credentials' . format ( gate = API_URL ) response = requests . get ( url , verify = GATE_CA_BUNDLE , cert = GATE_CLIENT_CERT ) assert response . ok , 'Failed to get accounts: {0}' . format ( response . text ) all_accounts = response . json ( ) self . log . de...
Get Accounts added to Spinnaker .
50,034
def create_app ( self ) : self . appinfo [ 'accounts' ] = self . get_accounts ( ) self . log . debug ( 'Pipeline Config\n%s' , pformat ( self . pipeline_config ) ) self . log . debug ( 'App info:\n%s' , pformat ( self . appinfo ) ) jsondata = self . retrieve_template ( ) wait_for_task ( jsondata ) self . log . info ( "...
Send a POST to spinnaker to create a new application with class variables .
50,035
def retrieve_template ( self ) : links = self . retrieve_instance_links ( ) self . log . debug ( 'Links is \n%s' , pformat ( links ) ) self . pipeline_config [ 'instance_links' ] . update ( links ) jsondata = get_template ( template_file = 'infrastructure/app_data.json.j2' , appinfo = self . appinfo , pipeline_config =...
Sets the instance links with pipeline_configs and then renders template files
50,036
def retrieve_instance_links ( self ) : instance_links = { } self . log . debug ( "LINKS IS %s" , LINKS ) for key , value in LINKS . items ( ) : if value not in self . pipeline_config [ 'instance_links' ] . values ( ) : instance_links [ key ] = value return instance_links
Appends on existing instance links
50,037
def get_cloudwatch_event_rule ( app_name , account , region ) : session = boto3 . Session ( profile_name = account , region_name = region ) cloudwatch_client = session . client ( 'events' ) lambda_alias_arn = get_lambda_alias_arn ( app = app_name , account = account , region = region ) rule_names = cloudwatch_client . ...
Get CloudWatch Event rule names .
50,038
def setup_pathing ( self ) : self . s3_version_uri = self . _path_formatter ( self . version ) self . s3_latest_uri = self . _path_formatter ( "LATEST" ) self . s3_canary_uri = self . _path_formatter ( "CANARY" ) self . s3_alpha_uri = self . _path_formatter ( "ALPHA" ) self . s3_mirror_uri = self . _path_formatter ( "M...
Format pathing for S3 deployments .
50,039
def _path_formatter ( self , suffix ) : if suffix . lower ( ) == "mirror" : path_items = [ self . bucket , self . s3path ] else : path_items = [ self . bucket , self . s3path , suffix ] path = '/' . join ( path_items ) s3_format = "s3://{}" formatted_path = path . replace ( '//' , '/' ) full_path = s3_format . format (...
Format the s3 path properly .
50,040
def upload_artifacts ( self ) : deploy_strategy = self . properties [ "deploy_strategy" ] mirror = False if deploy_strategy == "mirror" : mirror = True self . _upload_artifacts_to_path ( mirror = mirror ) if deploy_strategy == "highlander" : self . _sync_to_uri ( self . s3_latest_uri ) elif deploy_strategy == "canary" ...
Upload artifacts to S3 and copy to correct path depending on strategy .
50,041
def promote_artifacts ( self , promote_stage = 'latest' ) : if promote_stage . lower ( ) == 'alpha' : self . _sync_to_uri ( self . s3_canary_uri ) elif promote_stage . lower ( ) == 'canary' : self . _sync_to_uri ( self . s3_latest_uri ) else : self . _sync_to_uri ( self . s3_latest_uri )
Promote artifact version to dest .
50,042
def _get_upload_cmd ( self , mirror = False ) : if mirror : dest_uri = self . s3_mirror_uri else : dest_uri = self . s3_version_uri cmd = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}' . format ( self . artifact_path , dest_uri , self . env ) return cmd
Generate the S3 CLI upload command
50,043
def _upload_artifacts_to_path ( self , mirror = False ) : if not os . listdir ( self . artifact_path ) or not self . artifact_path : raise S3ArtifactNotFound uploaded = False if self . s3props . get ( "content_metadata" ) : LOG . info ( "Uploading in multiple parts to set metadata" ) uploaded = self . content_metadata_...
Recursively upload directory contents to S3 .
50,044
def content_metadata_uploads ( self , mirror = False ) : excludes_str = '' includes_cmds = [ ] cmd_base = self . _get_upload_cmd ( mirror = mirror ) for content in self . s3props . get ( 'content_metadata' ) : full_path = os . path . join ( self . artifact_path , content [ 'path' ] ) if not os . listdir ( full_path ) :...
Finds all specified encoded directories and uploads in multiple parts setting metadata for objects .
50,045
def _sync_to_uri ( self , uri ) : cmd_cp = 'aws s3 cp {} {} --recursive --profile {}' . format ( self . s3_version_uri , uri , self . env ) cmd_sync = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}' . format ( self . s3_version_uri , uri , self . env ) cp_result = subprocess . run ( cmd_cp , check = True ,...
Copy and sync versioned directory to uri in S3 .
50,046
def get_vpc_id ( account , region ) : url = '{0}/networks/aws' . format ( API_URL ) response = requests . get ( url , verify = GATE_CA_BUNDLE , cert = GATE_CLIENT_CERT ) if not response . ok : raise SpinnakerVPCNotFound ( response . text ) vpcs = response . json ( ) for vpc in vpcs : LOG . debug ( 'VPC: %(name)s, %(acc...
Get VPC ID configured for account in region .
50,047
def get_subnets ( target = 'ec2' , purpose = 'internal' , env = '' , region = '' , ) : account_az_dict = defaultdict ( defaultdict ) subnet_id_dict = defaultdict ( defaultdict ) subnet_url = '{0}/subnets/aws' . format ( API_URL ) subnet_response = requests . get ( subnet_url , verify = GATE_CA_BUNDLE , cert = GATE_CLIE...
Get all availability zones for a given target .
50,048
def create_lambda_events ( self ) : remove_all_lambda_permissions ( app_name = self . app_name , env = self . env , region = self . region ) triggers = self . properties [ 'lambda_triggers' ] for trigger in triggers : if trigger [ 'type' ] == 'sns' : create_sns_event ( app_name = self . app_name , env = self . env , re...
Create all defined lambda events for an lambda application .
50,049
def get_all_pipelines ( app = '' ) : url = '{host}/applications/{app}/pipelineConfigs' . format ( host = API_URL , app = app ) response = requests . get ( url , verify = GATE_CA_BUNDLE , cert = GATE_CLIENT_CERT ) assert response . ok , 'Could not retrieve Pipelines for {0}.' . format ( app ) pipelines = response . json...
Get a list of all the Pipelines in _app_ .
50,050
def get_pipeline_id ( app = '' , name = '' ) : return_id = None pipelines = get_all_pipelines ( app = app ) for pipeline in pipelines : LOG . debug ( 'ID of %(name)s: %(id)s' , pipeline ) if pipeline [ 'name' ] == name : return_id = pipeline [ 'id' ] LOG . info ( 'Pipeline %s found, ID: %s' , name , return_id ) break r...
Get the ID for Pipeline _name_ .
50,051
def normalize_pipeline_name ( name = '' ) : normalized_name = name for bad in '\\/?%#' : normalized_name = normalized_name . replace ( bad , '_' ) return normalized_name
Translate unsafe characters to underscores .
50,052
def get_all_apps ( ) : LOG . info ( 'Retreiving list of all Spinnaker applications' ) url = '{}/applications' . format ( API_URL ) response = requests . get ( url , verify = GATE_CA_BUNDLE , cert = GATE_CLIENT_CERT ) assert response . ok , 'Could not retrieve application list' pipelines = response . json ( ) LOG . debu...
Get a list of all applications in Spinnaker .
50,053
def get_details ( app = 'groupproject' , env = 'dev' , region = 'us-east-1' ) : url = '{host}/applications/{app}' . format ( host = API_URL , app = app ) request = requests . get ( url , verify = GATE_CA_BUNDLE , cert = GATE_CLIENT_CERT ) if not request . ok : raise SpinnakerAppNotFound ( '"{0}" not found.' . format ( ...
Extract details for Application .
50,054
def create_pipeline ( self ) : clean_pipelines ( app = self . app_name , settings = self . settings ) pipeline_envs = self . environments self . log . debug ( 'Envs from pipeline.json: %s' , pipeline_envs ) regions_envs = collections . defaultdict ( list ) for env in pipeline_envs : for region in self . settings [ env ...
Main wrapper for pipeline creation . 1 . Runs clean_pipelines to clean up existing ones 2 . determines which environments the pipeline needs 3 . Renders all of the pipeline blocks as defined in configs 4 . Runs post_pipeline to create pipeline
50,055
def _check_lambda ( self ) : exists = False try : self . lambda_client . get_function ( FunctionName = self . app_name ) exists = True except boto3 . exceptions . botocore . exceptions . ClientError : pass return exists
Check if lambda function exists .
50,056
def _check_lambda_alias ( self ) : aliases = self . lambda_client . list_aliases ( FunctionName = self . app_name ) matched_alias = False for alias in aliases [ 'Aliases' ] : if alias [ 'Name' ] == self . env : LOG . info ( 'Found alias %s for function %s' , self . env , self . app_name ) matched_alias = True break els...
Check if lambda alias exists .
50,057
def _vpc_config ( self ) : if self . vpc_enabled : subnets = get_subnets ( env = self . env , region = self . region , purpose = 'internal' ) [ 'subnet_ids' ] [ self . region ] security_groups = self . _get_sg_ids ( ) vpc_config = { 'SubnetIds' : subnets , 'SecurityGroupIds' : security_groups } else : vpc_config = { 'S...
Get VPC config .
50,058
def _get_sg_ids ( self ) : try : lambda_extras = self . settings [ 'security_groups' ] [ 'lambda_extras' ] except KeyError : lambda_extras = [ ] security_groups = [ self . app_name ] + lambda_extras sg_ids = [ ] for security_group in security_groups : sg_id = get_security_group_id ( name = security_group , env = self ....
Get IDs for all defined security groups .
50,059
def update_function_configuration ( self , vpc_config ) : LOG . info ( 'Updating configuration for lambda function: %s' , self . app_name ) try : self . lambda_client . update_function_configuration ( Environment = self . lambda_environment , FunctionName = self . app_name , Runtime = self . runtime , Role = self . rol...
Update existing Lambda function configuration .
50,060
def create_function ( self , vpc_config ) : zip_file = 'lambda-holder.zip' with zipfile . ZipFile ( zip_file , mode = 'w' ) as zipped : zipped . writestr ( 'index.py' , 'print "Hello world"' ) contents = '' with open ( 'lambda-holder.zip' , 'rb' ) as openfile : contents = openfile . read ( ) LOG . info ( 'Creating lamb...
Create lambda function configures lambda parameters .
50,061
def create_lambda_function ( self ) : vpc_config = self . _vpc_config ( ) if self . _check_lambda ( ) : self . update_function_configuration ( vpc_config ) else : self . create_function ( vpc_config ) if self . _check_lambda_alias ( ) : self . update_alias ( ) else : self . create_alias ( )
Create or update Lambda function .
50,062
def destroy_sg ( app = '' , env = '' , region = '' , ** _ ) : vpc = get_vpc_id ( account = env , region = region ) url = '{api}/securityGroups/{env}/{region}/{app}' . format ( api = API_URL , env = env , region = region , app = app ) payload = { 'vpcId' : vpc } security_group = requests . get ( url , params = payload ,...
Destroy Security Group .
50,063
def destroy_s3 ( app = '' , env = 'dev' , ** _ ) : session = boto3 . Session ( profile_name = env ) client = session . resource ( 's3' ) generated = get_details ( app = app , env = env ) archaius = generated . archaius ( ) bucket = client . Bucket ( archaius [ 'bucket' ] ) for item in bucket . objects . filter ( Prefix...
Destroy S3 Resources for _app_ in _env_ .
50,064
def main ( ) : parser = argparse . ArgumentParser ( ) add_debug ( parser ) add_app ( parser ) parser . add_argument ( '--email' , help = 'Email address to associate with application' , default = 'PS-DevOpsTooling@example.com' ) parser . add_argument ( '--project' , help = 'Git project to associate with application' , d...
Entry point for creating a Spinnaker application .
50,065
def destroy_s3_event ( app , env , region ) : generated = get_details ( app = app , env = env ) bucket = generated . s3_app_bucket ( ) session = boto3 . Session ( profile_name = env , region_name = region ) s3_client = session . client ( 's3' ) config = { } s3_client . put_bucket_notification_configuration ( Bucket = b...
Destroy S3 event .
50,066
def destroy_iam ( app = '' , env = 'dev' , ** _ ) : session = boto3 . Session ( profile_name = env ) client = session . client ( 'iam' ) generated = get_details ( env = env , app = app ) generated_iam = generated . iam ( ) app_details = collections . namedtuple ( 'AppDetails' , generated_iam . keys ( ) ) details = app_...
Destroy IAM Resources .
50,067
def get_role_arn ( role_name , env , region ) : session = boto3 . Session ( profile_name = env , region_name = region ) iam_client = session . client ( 'iam' ) LOG . debug ( 'Searching for %s.' , role_name ) role = iam_client . get_role ( RoleName = role_name ) role_arn = role [ 'Role' ] [ 'Arn' ] LOG . debug ( "Found ...
Get role ARN given role name .
50,068
def render_policy_template ( account_number = '' , app = 'coreforrest' , env = 'dev' , group = 'forrest' , items = None , pipeline_settings = None , region = 'us-east-1' , service = '' ) : statements = [ ] rendered_service_policy = get_template ( 'infrastructure/iam/{0}.json.j2' . format ( service ) , account_number = ...
Render IAM Policy template .
50,069
def construct_policy ( app = 'coreforrest' , env = 'dev' , group = 'forrest' , region = 'us-east-1' , pipeline_settings = None ) : LOG . info ( 'Create custom IAM Policy for %s.' , app ) services = pipeline_settings . get ( 'services' , { } ) LOG . debug ( 'Found requested services: %s' , services ) services = auto_ser...
Assemble IAM Policy for _app_ .
50,070
def validate_gate ( ) : try : credentials = get_env_credential ( ) LOG . debug ( 'Found credentials: %s' , credentials ) LOG . info ( 'Gate working.' ) except TypeError : LOG . fatal ( 'Gate connection not valid: API_URL = %s' , API_URL )
Check Gate connection .
50,071
def create_s3_event ( app_name , env , region , bucket , triggers ) : session = boto3 . Session ( profile_name = env , region_name = region ) s3_client = session . client ( 's3' ) lambda_alias_arn = get_lambda_alias_arn ( app_name , env , region ) LOG . debug ( "Lambda ARN for lambda function %s is %s." , app_name , la...
Create S3 lambda events from triggers
50,072
def generate_packer_filename ( provider , region , builder ) : filename = '{0}_{1}_{2}.json' . format ( provider , region , builder ) return filename
Generate a filename to be used by packer .
50,073
def get_template ( template_file = '' , ** kwargs ) : template = get_template_object ( template_file ) LOG . info ( 'Rendering template %s' , template . filename ) for key , value in kwargs . items ( ) : LOG . debug ( '%s => %s' , key , value ) rendered_json = template . render ( ** kwargs ) LOG . debug ( 'Rendered JSO...
Get the Jinja2 template and renders with dict _kwargs_ .
50,074
def renumerate_stages ( pipeline ) : stages = pipeline [ 'stages' ] main_index = 0 branch_index = 0 previous_refid = '' for stage in stages : current_refid = stage [ 'refId' ] . lower ( ) if current_refid == 'master' : if main_index == 0 : stage [ 'requisiteStageRefIds' ] = [ ] else : stage [ 'requisiteStageRefIds' ] =...
Renumber Pipeline Stage reference IDs to account for dependencies .
50,075
def post_task ( task_data , task_uri = '/tasks' ) : url = '{}/{}' . format ( API_URL , task_uri . lstrip ( '/' ) ) if isinstance ( task_data , str ) : task_json = task_data else : task_json = json . dumps ( task_data ) resp = requests . post ( url , data = task_json , headers = HEADERS , verify = GATE_CA_BUNDLE , cert ...
Create Spinnaker Task .
50,076
def _check_task ( taskid ) : try : taskurl = taskid . get ( 'ref' , '0000' ) except AttributeError : taskurl = taskid taskid = taskurl . split ( '/tasks/' ) [ - 1 ] LOG . info ( 'Checking taskid %s' , taskid ) url = '{}/tasks/{}' . format ( API_URL , taskid ) task_response = requests . get ( url , headers = HEADERS , v...
Check Spinnaker Task status .
50,077
def check_task ( taskid , timeout = DEFAULT_TASK_TIMEOUT , wait = 2 ) : max_attempts = int ( timeout / wait ) try : return retry_call ( partial ( _check_task , taskid ) , max_attempts = max_attempts , wait = wait , exceptions = ( AssertionError , ValueError ) , ) except ValueError : raise SpinnakerTaskInconclusiveError...
Wrap check_task .
50,078
def wait_for_task ( task_data , task_uri = '/tasks' ) : taskid = post_task ( task_data , task_uri ) if isinstance ( task_data , str ) : json_data = json . loads ( task_data ) else : json_data = task_data job = json_data [ 'job' ] [ 0 ] env = job . get ( 'credentials' ) task_type = job . get ( 'type' ) timeout = TASK_TI...
Run task and check the result .
50,079
def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) parser = argparse . ArgumentParser ( description = main . __doc__ ) add_debug ( parser ) add_app ( parser ) add_env ( parser ) add_properties ( parser ) add_region ( parser ) add_artifact_path ( parser ) add_artifact_version ( parser ) args = parser . par...
Create application . properties for a given application .
50,080
def init_properties ( env = 'dev' , app = 'unnecessary' , ** _ ) : aws_env = boto3 . session . Session ( profile_name = env ) s3client = aws_env . resource ( 's3' ) generated = get_details ( app = app , env = env ) archaius = generated . archaius ( ) archaius_file = ( '{path}/application.properties' ) . format ( path =...
Make sure _application . properties_ file exists in S3 .
50,081
def create_cloudwatch_event ( app_name , env , region , rules ) : session = boto3 . Session ( profile_name = env , region_name = region ) cloudwatch_client = session . client ( 'events' ) rule_name = rules . get ( 'rule_name' ) schedule = rules . get ( 'schedule' ) rule_description = rules . get ( 'rule_description' ) ...
Create cloudwatch event for lambda from rules .
50,082
def find_api_id ( self ) : allapis = self . client . get_rest_apis ( ) api_name = self . trigger_settings [ 'api_name' ] api_id = None for api in allapis [ 'items' ] : if api [ 'name' ] == api_name : api_id = api [ 'id' ] self . log . info ( "Found API for: %s" , api_name ) break else : api_id = self . create_api ( ) r...
Given API name find API ID .
50,083
def find_resource_ids ( self ) : all_resources = self . client . get_resources ( restApiId = self . api_id ) parent_id = None resource_id = None for resource in all_resources [ 'items' ] : if resource [ 'path' ] == "/" : parent_id = resource [ 'id' ] if resource [ 'path' ] == self . trigger_settings [ 'resource' ] : re...
Given a resource path and API Id find resource Id .
50,084
def add_lambda_integration ( self ) : lambda_uri = self . generate_uris ( ) [ 'lambda_uri' ] self . client . put_integration ( restApiId = self . api_id , resourceId = self . resource_id , httpMethod = self . trigger_settings [ 'method' ] , integrationHttpMethod = 'POST' , uri = lambda_uri , type = 'AWS' ) self . add_i...
Attach lambda found to API .
50,085
def add_integration_response ( self ) : self . client . put_integration_response ( restApiId = self . api_id , resourceId = self . resource_id , httpMethod = self . trigger_settings [ 'method' ] , statusCode = '200' , responseTemplates = { 'application/json' : '' } )
Add an intergation response to the API for the lambda integration .
50,086
def add_permission ( self ) : statement_id = '{}_api_{}' . format ( self . app_name , self . trigger_settings [ 'api_name' ] ) principal = 'apigateway.amazonaws.com' lambda_alias_arn = get_lambda_alias_arn ( self . app_name , self . env , self . region ) lambda_unqualified_arn = get_lambda_arn ( self . app_name , self ...
Add permission to Lambda for the API Trigger .
50,087
def create_api_deployment ( self ) : try : self . client . create_deployment ( restApiId = self . api_id , stageName = self . env ) self . log . info ( 'Created a deployment resource.' ) except botocore . exceptions . ClientError as error : error_code = error . response [ 'Error' ] [ 'Code' ] if error_code == 'TooManyR...
Create API deployment of ENV name .
50,088
def create_api_key ( self ) : apikeys = self . client . get_api_keys ( ) for key in apikeys [ 'items' ] : if key [ 'name' ] == self . app_name : self . log . info ( "Key %s already exists" , self . app_name ) break else : self . client . create_api_key ( name = self . app_name , enabled = True , stageKeys = [ { 'restAp...
Create API Key for API access .
50,089
def _format_base_path ( self , api_name ) : name = self . app_name if self . app_name != api_name : name = '{0}-{1}' . format ( self . app_name , api_name ) return name
Format the base path name .
50,090
def update_api_mappings ( self ) : response_provider = None response_action = None domain = self . generated . apigateway ( ) [ 'domain' ] try : response_provider = self . client . create_base_path_mapping ( domainName = domain , basePath = self . _format_base_path ( self . trigger_settings [ 'api_name' ] ) , restApiId...
Create a cname for the API deployment .
50,091
def generate_uris ( self ) : lambda_arn = "arn:aws:execute-api:{0}:{1}:{2}/*/{3}/{4}" . format ( self . region , self . account_id , self . api_id , self . trigger_settings [ 'method' ] , self . trigger_settings [ 'resource' ] ) lambda_uri = ( "arn:aws:apigateway:{0}:lambda:path/{1}/functions/" "arn:aws:lambda:{0}:{2}:...
Generate several lambda uris .
50,092
def create_api ( self ) : created_api = self . client . create_rest_api ( name = self . trigger_settings . get ( 'api_name' , self . app_name ) ) api_id = created_api [ 'id' ] self . log . info ( "Successfully created API" ) return api_id
Create the REST API .
50,093
def create_resource ( self , parent_id = "" ) : resource_name = self . trigger_settings . get ( 'resource' , '' ) resource_name = resource_name . replace ( '/' , '' ) if not self . resource_id : created_resource = self . client . create_resource ( restApiId = self . api_id , parentId = parent_id , pathPart = resource_n...
Create the specified resource .
50,094
def attach_method ( self , resource_id ) : try : _response = self . client . put_method ( restApiId = self . api_id , resourceId = resource_id , httpMethod = self . trigger_settings [ 'method' ] , authorizationType = "NONE" , apiKeyRequired = False , ) self . log . debug ( 'Response for resource (%s) push authorization...
Attach the defined method .
50,095
def setup_lambda_api ( self ) : self . create_resource ( self . parent_id ) self . attach_method ( self . resource_id ) self . add_lambda_integration ( ) self . add_permission ( ) self . create_api_deployment ( ) self . create_api_key ( ) self . update_api_mappings ( )
A wrapper for all the steps needed to setup the integration .
50,096
def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) log = logging . getLogger ( __name__ ) parser = argparse . ArgumentParser ( ) add_debug ( parser ) add_app ( parser ) add_env ( parser ) add_region ( parser ) add_properties ( parser ) parser . add_argument ( "--elb-subnet" , help = "Subnetnet type, e.g. ...
Run newer stuffs .
50,097
def _validate_cidr ( self , rule ) : try : network = ipaddress . IPv4Network ( rule [ 'app' ] ) except ( ipaddress . NetmaskValueError , ValueError ) as error : raise SpinnakerSecurityGroupCreationFailed ( error ) self . log . debug ( 'Validating CIDR: %s' , network . exploded ) return True
Validate the cidr block in a rule .
50,098
def _process_rules ( self , rules ) : cidr = [ ] non_cidr = [ ] for rule in rules : if '.' in rule [ 'app' ] : self . log . debug ( 'Custom CIDR rule: %s' , rule ) self . _validate_cidr ( rule ) cidr . append ( rule ) else : self . log . debug ( 'SG reference rule: %s' , rule ) non_cidr . append ( rule ) self . log . d...
Process rules into cidr and non - cidr lists .
50,099
def add_tags ( self ) : session = boto3 . session . Session ( profile_name = self . env , region_name = self . region ) resource = session . resource ( 'ec2' ) group_id = get_security_group_id ( self . app_name , self . env , self . region ) security_group = resource . SecurityGroup ( group_id ) try : tag = security_gr...
Add tags to security group .