idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
50,100 | def add_cidr_rules ( self , rules ) : session = boto3 . session . Session ( profile_name = self . env , region_name = self . region ) client = session . client ( 'ec2' ) group_id = get_security_group_id ( self . app_name , self . env , self . region ) for rule in rules : data = { 'DryRun' : False , 'GroupId' : group_id... | Add cidr rules to security group via boto . |
50,101 | def update_default_rules ( self ) : app_ingress = self . properties [ 'security_group' ] [ 'ingress' ] ingress = conservative_merger . merge ( DEFAULT_SECURITYGROUP_RULES , app_ingress ) resolved_ingress = self . resolve_self_references ( ingress ) self . log . info ( 'Updated default rules:\n%s' , ingress ) return res... | Concatinate application and global security group rules . |
50,102 | def _create_security_group ( self , ingress ) : template_kwargs = { 'app' : self . app_name , 'env' : self . env , 'region' : self . region , 'vpc' : get_vpc_id ( self . env , self . region ) , 'description' : self . properties [ 'security_group' ] [ 'description' ] , 'ingress' : ingress , } secgroup_json = get_templat... | Send a POST to spinnaker to create a new security group . |
50,103 | def create_security_group ( self ) : ingress_rules = [ ] try : security_id = get_security_group_id ( name = self . app_name , env = self . env , region = self . region ) except ( SpinnakerSecurityGroupError , AssertionError ) : self . _create_security_group ( ingress_rules ) else : self . log . debug ( 'Security Group ... | Send a POST to spinnaker to create or update a security group . |
50,104 | def create_ingress_rule ( self , app , rule ) : if isinstance ( rule , dict ) : start_port = rule . get ( 'start_port' ) end_port = rule . get ( 'end_port' ) protocol = rule . get ( 'protocol' , 'tcp' ) requested_cross_account = rule . get ( 'env' , self . env ) if self . env == requested_cross_account : cross_account_... | Create a normalized ingress rule . |
50,105 | def get_lambda_arn ( app , account , region ) : session = boto3 . Session ( profile_name = account , region_name = region ) lambda_client = session . client ( 'lambda' ) lambda_arn = None paginator = lambda_client . get_paginator ( 'list_functions' ) for lambda_functions in paginator . paginate ( ) : for lambda_functio... | Get lambda ARN . |
50,106 | def get_lambda_alias_arn ( app , account , region ) : session = boto3 . Session ( profile_name = account , region_name = region ) lambda_client = session . client ( 'lambda' ) lambda_aliases = lambda_client . list_aliases ( FunctionName = app ) matched_alias = None for alias in lambda_aliases [ 'Aliases' ] : if alias [... | Get lambda alias ARN . Assumes that account name is equal to alias name . |
50,107 | def add_lambda_permissions ( function = '' , statement_id = '' , action = 'lambda:InvokeFunction' , principal = '' , source_arn = '' , env = '' , region = 'us-east-1' ) : session = boto3 . Session ( profile_name = env , region_name = region ) lambda_client = session . client ( 'lambda' ) response_action = None prefixed... | Add permission to Lambda for the event trigger . |
50,108 | def resource_action ( client , action = '' , log_format = 'item: %(key)s' , ** kwargs ) : result = None try : result = getattr ( client , action ) ( ** kwargs ) LOG . info ( log_format , kwargs ) except botocore . exceptions . ClientError as error : error_code = error . response [ 'Error' ] [ 'Code' ] if error_code == ... | Call _action_ using boto3 _client_ with _kwargs_ . |
50,109 | def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) parser = argparse . ArgumentParser ( description = 'Example with non-optional arguments' ) add_debug ( parser ) add_app ( parser ) add_env ( parser ) add_region ( parser ) add_properties ( parser ) args = parser . parse_args ( ) logging . getLogger ( __pa... | Entry point for ELB creation |
50,110 | def make_elb_json ( self ) : env = self . env region = self . region elb_settings = self . properties [ 'elb' ] LOG . debug ( 'Block ELB Settings:\n%s' , pformat ( elb_settings ) ) health_settings = elb_settings [ 'health' ] elb_subnet_purpose = elb_settings . get ( 'subnet_purpose' , 'internal' ) region_subnets = get_... | Render the JSON template with arguments . |
50,111 | def create_elb ( self ) : json_data = self . make_elb_json ( ) LOG . debug ( 'Block ELB JSON Data:\n%s' , pformat ( json_data ) ) wait_for_task ( json_data ) self . add_listener_policy ( json_data ) self . add_backend_policy ( json_data ) self . configure_attributes ( json_data ) | Create or Update the ELB after rendering JSON data from configs . Asserts that the ELB task was successful . |
50,112 | def add_listener_policy ( self , json_data ) : env = boto3 . session . Session ( profile_name = self . env , region_name = self . region ) elbclient = env . client ( 'elb' ) stickiness = { } elb_settings = self . properties [ 'elb' ] if elb_settings . get ( 'ports' ) : ports = elb_settings [ 'ports' ] for listener in p... | Attaches listerner policies to an ELB |
50,113 | def add_backend_policy ( self , json_data ) : env = boto3 . session . Session ( profile_name = self . env , region_name = self . region ) elbclient = env . client ( 'elb' ) for job in json . loads ( json_data ) [ 'job' ] : for listener in job [ 'listeners' ] : instance_port = listener [ 'internalPort' ] backend_policy_... | Attaches backend server policies to an ELB |
50,114 | def add_stickiness ( self ) : stickiness_dict = { } env = boto3 . session . Session ( profile_name = self . env , region_name = self . region ) elbclient = env . client ( 'elb' ) elb_settings = self . properties [ 'elb' ] for listener in elb_settings . get ( 'ports' ) : if listener . get ( "stickiness" ) : sticky_type ... | Adds stickiness policy to created ELB |
50,115 | def configure_attributes ( self , json_data ) : env = boto3 . session . Session ( profile_name = self . env , region_name = self . region ) elbclient = env . client ( 'elb' ) elb_settings = self . properties [ 'elb' ] LOG . debug ( 'Block ELB Settings Pre Configure Load Balancer Attributes:\n%s' , pformat ( elb_setting... | Configure load balancer attributes such as idle timeout connection draining etc |
50,116 | def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) parser = argparse . ArgumentParser ( description = main . __doc__ ) add_debug ( parser ) parser . add_argument ( '-o' , '--output' , required = True , help = 'Name of environment file to append to' ) parser . add_argument ( '-g' , '--git-short' , metavar ... | Append Application Configurations to a given file in multiple formats . |
50,117 | def add_infra ( subparsers ) : infra_parser = subparsers . add_parser ( 'infra' , help = runner . prepare_infrastructure . __doc__ ) infra_parser . set_defaults ( func = runner . prepare_infrastructure ) | Infrastructure subcommands . |
50,118 | def add_pipeline ( subparsers ) : pipeline_parser = subparsers . add_parser ( 'pipeline' , help = add_pipeline . __doc__ , formatter_class = argparse . ArgumentDefaultsHelpFormatter ) pipeline_parser . set_defaults ( func = pipeline_parser . print_help ) pipeline_subparsers = pipeline_parser . add_subparsers ( title = ... | Pipeline subcommands . |
50,119 | def add_rebuild ( subparsers ) : rebuild_parser = subparsers . add_parser ( 'rebuild' , help = runner . rebuild_pipelines . __doc__ , formatter_class = argparse . ArgumentDefaultsHelpFormatter ) rebuild_parser . set_defaults ( func = runner . rebuild_pipelines ) rebuild_parser . add_argument ( '-a' , '--all' , action =... | Rebuild Pipeline subcommands . |
50,120 | def add_autoscaling ( subparsers ) : autoscaling_parser = subparsers . add_parser ( 'autoscaling' , help = runner . create_scaling_policy . __doc__ , formatter_class = argparse . ArgumentDefaultsHelpFormatter ) autoscaling_parser . set_defaults ( func = runner . create_scaling_policy ) | Auto Scaling Group Policy subcommands . |
50,121 | def add_validate ( subparsers ) : validate_parser = subparsers . add_parser ( 'validate' , help = add_validate . __doc__ , formatter_class = argparse . ArgumentDefaultsHelpFormatter ) validate_parser . set_defaults ( func = validate_parser . print_help ) validate_subparsers = validate_parser . add_subparsers ( title = ... | Validate Spinnaker setup . |
50,122 | def get_existing_pipelines ( self ) : url = "{0}/applications/{1}/pipelineConfigs" . format ( API_URL , self . app_name ) resp = requests . get ( url , verify = GATE_CA_BUNDLE , cert = GATE_CLIENT_CERT ) assert resp . ok , 'Failed to lookup pipelines for {0}: {1}' . format ( self . app_name , resp . text ) return resp ... | Get existing pipeline configs for specific application . |
50,123 | def compare_with_existing ( self , region = 'us-east-1' , onetime = False ) : pipelines = self . get_existing_pipelines ( ) pipeline_id = None found = False for pipeline in pipelines : correct_app_and_region = ( pipeline [ 'application' ] == self . app_name ) and ( region in pipeline [ 'name' ] ) if onetime : onetime_s... | Compare desired pipeline with existing pipelines . |
50,124 | def create_pipeline ( self ) : clean_pipelines ( app = self . app_name , settings = self . settings ) pipeline_envs = self . environments self . log . debug ( 'Envs from pipeline.json: %s' , pipeline_envs ) regions_envs = collections . defaultdict ( list ) for env in pipeline_envs : for region in self . settings [ env ... | Main wrapper for pipeline creation . 1 . Runs clean_pipelines to clean up existing ones 2 . determines which environments the pipeline needs 3 . gets all subnets for template rendering 4 . Renders all of the pipeline blocks as defined in configs 5 . Runs post_pipeline to create pipeline |
50,125 | def ami_lookup ( region = 'us-east-1' , name = 'tomcat8' ) : if AMI_JSON_URL : ami_dict = _get_ami_dict ( AMI_JSON_URL ) ami_id = ami_dict [ region ] [ name ] elif GITLAB_TOKEN : warn_user ( 'Use AMI_JSON_URL feature instead.' ) ami_contents = _get_ami_file ( region = region ) ami_dict = json . loads ( ami_contents ) a... | Look up AMI ID . |
50,126 | def _get_ami_file ( region = 'us-east-1' ) : LOG . info ( "Getting AMI from Gitlab" ) lookup = FileLookup ( git_short = 'devops/ansible' ) filename = 'scripts/{0}.json' . format ( region ) ami_contents = lookup . remote_file ( filename = filename , branch = 'master' ) LOG . debug ( 'AMI file contents in %s: %s' , filen... | Get file from Gitlab . |
50,127 | def _get_ami_dict ( json_url ) : LOG . info ( "Getting AMI from %s" , json_url ) response = requests . get ( json_url ) assert response . ok , "Error getting ami info from {}" . format ( json_url ) ami_dict = response . json ( ) LOG . debug ( 'AMI json contents: %s' , ami_dict ) return ami_dict | Get ami from a web url . |
50,128 | def get_gitlab_project ( self ) : self . server = gitlab . Gitlab ( GIT_URL , private_token = GITLAB_TOKEN , api_version = 4 ) project = self . server . projects . get ( self . git_short ) if not project : raise GitLabApiError ( 'Could not get Project "{0}" from GitLab API.' . format ( self . git_short ) ) self . proje... | Get numerical GitLab Project ID . |
50,129 | def local_file ( self , filename ) : LOG . info ( 'Retrieving "%s" from "%s".' , filename , self . runway_dir ) file_contents = '' file_path = os . path . join ( self . runway_dir , filename ) try : with open ( file_path , 'rt' ) as lookup_file : file_contents = lookup_file . read ( ) except FileNotFoundError : LOG . w... | Read the local file in _self . runway_dir_ . |
50,130 | def remote_file ( self , branch = 'master' , filename = '' ) : LOG . info ( 'Retrieving "%s" from "%s".' , filename , self . git_short ) file_contents = '' try : file_blob = self . project . files . get ( file_path = filename , ref = branch ) except gitlab . exceptions . GitlabGetError : file_blob = None LOG . debug ( ... | Read the remote file on Git Server . |
50,131 | def banner ( text , border = '=' , width = 80 ) : text_padding = '{0:^%d}' % ( width ) LOG . info ( border * width ) LOG . info ( text_padding . format ( text ) ) LOG . info ( border * width ) | Center _text_ in a banner _width_ wide with _border_ characters . |
50,132 | def get_sns_topic_arn ( topic_name , account , region ) : if topic_name . count ( ':' ) == 5 and topic_name . startswith ( 'arn:aws:sns:' ) : return topic_name session = boto3 . Session ( profile_name = account , region_name = region ) sns_client = session . client ( 'sns' ) topics = sns_client . list_topics ( ) [ 'Top... | Get SNS topic ARN . |
50,133 | def notify_slack_channel ( self ) : message = get_template ( template_file = 'slack/pipeline-prepare-ran.j2' , info = self . info ) if self . settings [ 'pipeline' ] [ 'notifications' ] [ 'slack' ] : post_slack_message ( message = message , channel = self . settings [ 'pipeline' ] [ 'notifications' ] [ 'slack' ] , user... | Post message to a defined Slack channel . |
50,134 | def get_properties ( properties_file = 'raw.properties.json' , env = None , region = None ) : with open ( properties_file , 'rt' ) as file_handle : properties = json . load ( file_handle ) env_properties = properties . get ( env , properties ) contents = env_properties . get ( region , env_properties ) LOG . debug ( 'F... | Get contents of _properties_file_ for the _env_ . |
50,135 | def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) parser = argparse . ArgumentParser ( description = main . __doc__ ) add_debug ( parser ) add_app ( parser ) add_env ( parser ) add_region ( parser ) args = parser . parse_args ( ) logging . getLogger ( __package__ . split ( '.' ) [ 0 ] ) . setLevel ( args... | Destroy any ELB related Resources . |
50,136 | def get_security_group_id ( name = '' , env = '' , region = '' ) : vpc_id = get_vpc_id ( env , region ) LOG . info ( 'Find %s sg in %s [%s] in %s' , name , env , region , vpc_id ) url = '{0}/securityGroups/{1}/{2}/{3}?vpcId={4}' . format ( API_URL , env , region , name , vpc_id ) response = requests . get ( url , verif... | Get a security group ID . |
50,137 | def remove_duplicate_sg ( security_groups ) : for each_sg , duplicate_sg_name in SECURITYGROUP_REPLACEMENTS . items ( ) : if each_sg in security_groups and duplicate_sg_name in security_groups : LOG . info ( 'Duplicate SG found. Removing %s in favor of %s.' , duplicate_sg_name , each_sg ) security_groups . remove ( dup... | Removes duplicate Security Groups that share a same name alias |
50,138 | def generate_encoded_user_data ( env = 'dev' , region = 'us-east-1' , generated = None , group_name = '' , pipeline_type = '' , canary = False , ) : r if env in [ "prod" , "prodp" , "prods" ] : env_c , env_p , env_s = "prod" , "prodp" , "prods" else : env_c , env_p , env_s = env , env , env user_data = get_template ( t... | r Generate base64 encoded User Data . |
50,139 | def prepare_infrastructure ( ) : runner = ForemastRunner ( ) runner . write_configs ( ) runner . create_app ( ) archaius = runner . configs [ runner . env ] [ 'app' ] [ 'archaius_enabled' ] eureka = runner . configs [ runner . env ] [ 'app' ] [ 'eureka_enabled' ] deploy_type = runner . configs [ 'pipeline' ] [ 'type' ]... | Entry point for preparing the infrastructure in a specific env . |
50,140 | def prepare_app_pipeline ( ) : runner = ForemastRunner ( ) runner . write_configs ( ) runner . create_app ( ) runner . create_pipeline ( ) runner . cleanup ( ) | Entry point for application setup and initial pipeline in Spinnaker . |
50,141 | def prepare_onetime_pipeline ( ) : runner = ForemastRunner ( ) runner . write_configs ( ) runner . create_pipeline ( onetime = os . getenv ( 'ENV' ) ) runner . cleanup ( ) | Entry point for single use pipeline setup in the defined app . |
50,142 | def write_configs ( self ) : utils . banner ( "Generating Configs" ) if not self . runway_dir : app_configs = configs . process_git_configs ( git_short = self . git_short ) else : app_configs = configs . process_runway_configs ( runway_dir = self . runway_dir ) self . configs = configs . write_variables ( app_configs =... | Generate the configurations needed for pipes . |
50,143 | def create_app ( self ) : utils . banner ( "Creating Spinnaker App" ) spinnakerapp = app . SpinnakerApp ( app = self . app , email = self . email , project = self . group , repo = self . repo , pipeline_config = self . configs [ 'pipeline' ] ) spinnakerapp . create_app ( ) | Create the spinnaker application . |
50,144 | def create_iam ( self ) : utils . banner ( "Creating IAM" ) iam . create_iam_resources ( env = self . env , app = self . app ) | Create IAM resources . |
50,145 | def create_archaius ( self ) : utils . banner ( "Creating S3" ) s3 . init_properties ( env = self . env , app = self . app ) | Create S3 bucket for Archaius . |
50,146 | def create_s3app ( self ) : utils . banner ( "Creating S3 App Infrastructure" ) primary_region = self . configs [ 'pipeline' ] [ 'primary_region' ] s3obj = s3 . S3Apps ( app = self . app , env = self . env , region = self . region , prop_path = self . json_path , primary_region = primary_region ) s3obj . create_bucket ... | Create S3 infra for s3 applications |
50,147 | def deploy_s3app ( self ) : utils . banner ( "Deploying S3 App" ) primary_region = self . configs [ 'pipeline' ] [ 'primary_region' ] s3obj = s3 . S3Deployment ( app = self . app , env = self . env , region = self . region , prop_path = self . json_path , artifact_path = self . artifact_path , artifact_version = self .... | Deploys artifacts contents to S3 bucket |
50,148 | def promote_s3app ( self ) : utils . banner ( "Promoting S3 App" ) primary_region = self . configs [ 'pipeline' ] [ 'primary_region' ] s3obj = s3 . S3Deployment ( app = self . app , env = self . env , region = self . region , prop_path = self . json_path , artifact_path = self . artifact_path , artifact_version = self ... | promotes S3 deployment to LATEST |
50,149 | def create_elb ( self ) : utils . banner ( "Creating ELB" ) elbobj = elb . SpinnakerELB ( app = self . app , env = self . env , region = self . region , prop_path = self . json_path ) elbobj . create_elb ( ) | Create the ELB for the defined environment . |
50,150 | def create_dns ( self ) : utils . banner ( "Creating DNS" ) elb_subnet = self . configs [ self . env ] [ 'elb' ] [ 'subnet_purpose' ] regions = self . configs [ self . env ] [ 'regions' ] failover = self . configs [ self . env ] [ 'dns' ] [ 'failover_dns' ] primary_region = self . configs [ 'pipeline' ] [ 'primary_regi... | Create DNS for the defined app and environment . |
50,151 | def create_autoscaling_policy ( self ) : utils . banner ( "Creating Scaling Policy" ) policyobj = autoscaling_policy . AutoScalingPolicy ( app = self . app , env = self . env , region = self . region , prop_path = self . json_path ) policyobj . create_policy ( ) | Create Scaling Policy for app in environment |
50,152 | def create_datapipeline ( self ) : utils . banner ( "Creating Data Pipeline" ) dpobj = datapipeline . AWSDataPipeline ( app = self . app , env = self . env , region = self . region , prop_path = self . json_path ) dpobj . create_datapipeline ( ) dpobj . set_pipeline_definition ( ) if self . configs [ self . env ] . get... | Creates data pipeline and adds definition |
50,153 | def slack_notify ( self ) : utils . banner ( "Sending slack notification" ) if self . env . startswith ( "prod" ) : notify = slacknotify . SlackNotification ( app = self . app , env = self . env , prop_path = self . json_path ) notify . post_message ( ) else : LOG . info ( "No slack message sent, not production environ... | Send out a slack notification . |
50,154 | def add_debug ( parser ) : parser . add_argument ( '-d' , '--debug' , action = 'store_const' , const = logging . DEBUG , default = logging . INFO , help = 'Set DEBUG output' ) | Add a debug flag to the _parser_ . |
50,155 | def add_env ( parser ) : parser . add_argument ( '-e' , '--env' , choices = ENVS , default = os . getenv ( 'ENV' , default = 'dev' ) , help = 'Deploy environment, overrides $ENV' ) | Add an env flag to the _parser_ . |
50,156 | def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) log = logging . getLogger ( __name__ ) parser = argparse . ArgumentParser ( ) add_debug ( parser ) add_app ( parser ) add_properties ( parser ) add_env ( parser ) add_region ( parser ) args = parser . parse_args ( ) logging . getLogger ( __package__ . spl... | CLI entrypoint for scaling policy creation |
50,157 | def main ( ) : logging . basicConfig ( format = LOGGING_FORMAT ) log = logging . getLogger ( __name__ ) parser = argparse . ArgumentParser ( description = main . __doc__ ) add_debug ( parser ) add_app ( parser ) add_env ( parser ) add_properties ( parser ) add_region ( parser ) args = parser . parse_args ( ) logging . ... | Create Lambda events . |
50,158 | def post_slack_message ( message = None , channel = None , username = None , icon_emoji = None ) : LOG . debug ( 'Slack Channel: %s\nSlack Message: %s' , channel , message ) slack = slacker . Slacker ( SLACK_TOKEN ) try : slack . chat . post_message ( channel = channel , text = message , username = username , icon_emoj... | Format the message and post to the appropriate slack channel . |
50,159 | def destroy_dns ( app = '' , env = 'dev' , ** _ ) : client = boto3 . Session ( profile_name = env ) . client ( 'route53' ) generated = get_details ( app = app , env = env ) record = generated . dns_elb ( ) zone_ids = get_dns_zone_ids ( env = env , facing = 'external' ) for zone_id in zone_ids : record_sets = client . l... | Destroy DNS records . |
50,160 | def destroy_record ( client = None , found_record = None , record = '' , zone_id = '' ) : LOG . debug ( 'Found DNS record: %s' , found_record ) if found_record [ 'Name' ] . strip ( '.' ) == record : dns_json = get_template ( template_file = 'destroy/destroy_dns.json.j2' , record = json . dumps ( found_record ) ) dns_di... | Destroy an individual DNS record . |
50,161 | def create_sns_event ( app_name , env , region , rules ) : session = boto3 . Session ( profile_name = env , region_name = region ) sns_client = session . client ( 'sns' ) topic_name = rules . get ( 'topic' ) lambda_alias_arn = get_lambda_alias_arn ( app = app_name , account = env , region = region ) topic_arn = get_sns... | Create SNS lambda event from rules . |
50,162 | def destroy_sns_event ( app_name , env , region ) : session = boto3 . Session ( profile_name = env , region_name = region ) sns_client = session . client ( 'sns' ) lambda_subscriptions = get_sns_subscriptions ( app_name = app_name , env = env , region = region ) for subscription_arn in lambda_subscriptions : sns_client... | Destroy all Lambda SNS subscriptions . |
50,163 | def destroy_elb ( app = '' , env = 'dev' , region = 'us-east-1' , ** _ ) : task_json = get_template ( template_file = 'destroy/destroy_elb.json.j2' , app = app , env = env , region = region , vpc = get_vpc_id ( account = env , region = region ) ) wait_for_task ( task_json ) return True | Destroy ELB Resources . |
50,164 | def delete_pipeline ( app = '' , pipeline_name = '' ) : safe_pipeline_name = normalize_pipeline_name ( name = pipeline_name ) LOG . warning ( 'Deleting Pipeline: %s' , safe_pipeline_name ) url = '{host}/pipelines/{app}/{pipeline}' . format ( host = API_URL , app = app , pipeline = safe_pipeline_name ) response = reques... | Delete _pipeline_name_ from _app_ . |
50,165 | def clean_pipelines ( app = '' , settings = None ) : pipelines = get_all_pipelines ( app = app ) envs = settings [ 'pipeline' ] [ 'env' ] LOG . debug ( 'Find Regions in: %s' , envs ) regions = set ( ) for env in envs : try : regions . update ( settings [ env ] [ 'regions' ] ) except KeyError : error_msg = 'Missing "{}/... | Delete Pipelines for regions not defined in application . json files . |
50,166 | def extract_formats ( config_handle ) : configurations = dict ( config_handle ) formats = dict ( configurations . get ( 'formats' , { } ) ) return formats | Get application formats . |
50,167 | def load_dynamic_config ( config_file = DEFAULT_DYNAMIC_CONFIG_FILE ) : dynamic_configurations = { } sys . path . insert ( 0 , path . dirname ( path . abspath ( config_file ) ) ) try : config_module = __import__ ( 'config' ) dynamic_configurations = config_module . CONFIG except ImportError : LOG . error ( 'ImportError... | Load and parse dynamic config |
50,168 | def _remove_empty_entries ( entries ) : valid_entries = [ ] for entry in set ( entries ) : if entry : valid_entries . append ( entry ) return sorted ( valid_entries ) | Remove empty entries in a list |
50,169 | def _convert_string_to_native ( value ) : result = None try : result = ast . literal_eval ( str ( value ) ) except ( SyntaxError , ValueError ) : result = value . split ( ',' ) return result | Convert a string to its native python type |
50,170 | def _generate_security_groups ( config_key ) : raw_default_groups = validate_key_values ( CONFIG , 'base' , config_key , default = '' ) default_groups = _convert_string_to_native ( raw_default_groups ) LOG . debug ( 'Default security group for %s is %s' , config_key , default_groups ) entries = { } for env in ENVS : en... | Read config file and generate security group dict by environment . |
50,171 | def create_datapipeline ( self ) : tags = [ { "key" : "app_group" , "value" : self . group } , { "key" : "app_name" , "value" : self . app_name } ] response = self . client . create_pipeline ( name = self . datapipeline_data . get ( 'name' , self . app_name ) , uniqueId = self . app_name , description = self . datapipe... | Creates the data pipeline if it does not already exist |
50,172 | def set_pipeline_definition ( self ) : if not self . pipeline_id : self . get_pipeline_id ( ) json_def = self . datapipeline_data [ 'json_definition' ] try : pipelineobjects = translator . definition_to_api_objects ( json_def ) parameterobjects = translator . definition_to_api_parameters ( json_def ) parametervalues = ... | Translates the json definition and puts it on created pipeline |
50,173 | def get_pipeline_id ( self ) : all_pipelines = [ ] paginiator = self . client . get_paginator ( 'list_pipelines' ) for page in paginiator . paginate ( ) : all_pipelines . extend ( page [ 'pipelineIdList' ] ) for pipeline in all_pipelines : if pipeline [ 'name' ] == self . datapipeline_data . get ( 'name' , self . app_n... | Finds the pipeline ID for configured pipeline |
50,174 | def activate_pipeline ( self ) : self . client . activate_pipeline ( pipelineId = self . pipeline_id ) LOG . info ( "Activated Pipeline %s" , self . pipeline_id ) | Activates a deployed pipeline useful for OnDemand pipelines |
50,175 | def get_dns_zone_ids ( env = 'dev' , facing = 'internal' ) : client = boto3 . Session ( profile_name = env ) . client ( 'route53' ) zones = client . list_hosted_zones_by_name ( DNSName = '.' . join ( [ env , DOMAIN ] ) ) zone_ids = [ ] for zone in zones [ 'HostedZones' ] : LOG . debug ( 'Found Hosted Zone: %s' , zone )... | Get Route 53 Hosted Zone IDs for _env_ . |
50,176 | def update_dns_zone_record ( env , zone_id , ** kwargs ) : client = boto3 . Session ( profile_name = env ) . client ( 'route53' ) response = { } hosted_zone_info = client . get_hosted_zone ( Id = zone_id ) zone_name = hosted_zone_info [ 'HostedZone' ] [ 'Name' ] . rstrip ( '.' ) dns_name = kwargs . get ( 'dns_name' ) i... | Create a Route53 CNAME record in _env_ zone . |
50,177 | def find_existing_record ( env , zone_id , dns_name , check_key = None , check_value = None ) : client = boto3 . Session ( profile_name = env ) . client ( 'route53' ) pager = client . get_paginator ( 'list_resource_record_sets' ) existingrecord = None for rset in pager . paginate ( HostedZoneId = zone_id ) : for record... | Check if a specific DNS record exists . |
50,178 | def delete_existing_cname ( env , zone_id , dns_name ) : client = boto3 . Session ( profile_name = env ) . client ( 'route53' ) startrecord = None newrecord_name = dns_name startrecord = find_existing_record ( env , zone_id , newrecord_name , check_key = 'Type' , check_value = 'CNAME' ) if startrecord : LOG . info ( "D... | Delete an existing CNAME record . |
50,179 | def update_failover_dns_record ( env , zone_id , ** kwargs ) : client = boto3 . Session ( profile_name = env ) . client ( 'route53' ) response = { } hosted_zone_info = client . get_hosted_zone ( Id = zone_id ) zone_name = hosted_zone_info [ 'HostedZone' ] [ 'Name' ] . rstrip ( '.' ) dns_name = kwargs . get ( 'dns_name'... | Create a Failover Route53 alias record in _env_ zone . |
50,180 | def create_cloudwatch_log_event ( app_name , env , region , rules ) : session = boto3 . Session ( profile_name = env , region_name = region ) cloudwatch_client = session . client ( 'logs' ) log_group = rules . get ( 'log_group' ) filter_name = rules . get ( 'filter_name' ) filter_pattern = rules . get ( 'filter_pattern... | Create cloudwatch log event for lambda from rules . |
50,181 | def prepare_policy_template ( self , scaling_type , period_sec , server_group ) : template_kwargs = { 'app' : self . app , 'env' : self . env , 'region' : self . region , 'server_group' : server_group , 'period_sec' : period_sec , 'scaling_policy' : self . settings [ 'asg' ] [ 'scaling_policy' ] , } if scaling_type == ... | Renders scaling policy templates based on configs and variables . After rendering POSTs the json to Spinnaker for creation . |
50,182 | def create_policy ( self ) : if not self . settings [ 'asg' ] [ 'scaling_policy' ] : self . log . info ( "No scaling policy found, skipping..." ) return server_group = self . get_server_group ( ) scaling_policies = self . get_all_existing ( server_group ) for policy in scaling_policies : for subpolicy in policy : self ... | Wrapper function . Gets the server group sets sane defaults deletes existing policies and then runs self . prepare_policy_template for scaling up and scaling down policies . This function acts as the main driver for the scaling policy creationprocess |
50,183 | def get_server_group ( self ) : api_url = "{0}/applications/{1}" . format ( API_URL , self . app ) response = requests . get ( api_url , verify = GATE_CA_BUNDLE , cert = GATE_CLIENT_CERT ) for server_group in response . json ( ) [ 'clusters' ] [ self . env ] : return server_group [ 'serverGroups' ] [ - 1 ] | Finds the most recently deployed server group for the application . This is the server group that the scaling policy will be applied to . |
50,184 | def delete_existing_policy ( self , scaling_policy , server_group ) : self . log . info ( "Deleting policy %s on %s" , scaling_policy [ 'policyName' ] , server_group ) delete_dict = { "application" : self . app , "description" : "Delete scaling policy" , "job" : [ { "policyName" : scaling_policy [ 'policyName' ] , "ser... | Given a scaling_policy and server_group deletes the existing scaling_policy . Scaling policies need to be deleted instead of upserted for consistency . |
50,185 | def get_all_existing ( self , server_group ) : self . log . info ( "Checking for existing scaling policy" ) url = "{0}/applications/{1}/clusters/{2}/{1}/serverGroups" . format ( API_URL , self . app , self . env ) response = requests . get ( url , verify = GATE_CA_BUNDLE , cert = GATE_CLIENT_CERT ) assert response . ok... | Finds all existing scaling policies for an application |
50,186 | def destroy_cloudwatch_event ( app = '' , env = 'dev' , region = '' ) : session = boto3 . Session ( profile_name = env , region_name = region ) cloudwatch_client = session . client ( 'events' ) event_rules = get_cloudwatch_event_rule ( app_name = app , account = env , region = region ) for rule in event_rules : cloudwa... | Destroy Cloudwatch event subscription . |
50,187 | def create_bucket ( self ) : bucket_exists = self . _bucket_exists ( ) if self . s3props . get ( 'shared_bucket_target' ) : if bucket_exists : LOG . info ( 'App uses shared bucket - %s ' , self . bucket ) else : LOG . error ( "Shared bucket %s does not exist" , self . bucket ) raise S3SharedBucketNotFound else : if sel... | Create or update bucket based on app name . |
50,188 | def _bucket_exists ( self ) : try : self . s3client . get_bucket_location ( Bucket = self . bucket ) return True except ClientError as error : LOG . error ( error ) return False | Check if the bucket exists . |
50,189 | def _put_bucket_policy ( self ) : if self . s3props [ 'bucket_policy' ] : policy_str = json . dumps ( self . s3props [ 'bucket_policy' ] ) _response = self . s3client . put_bucket_policy ( Bucket = self . bucket , Policy = policy_str ) else : _response = self . s3client . delete_bucket_policy ( Bucket = self . bucket )... | Attach a bucket policy to app bucket . |
50,190 | def _put_bucket_website ( self ) : if self . s3props [ 'website' ] [ 'enabled' ] : website_config = { 'ErrorDocument' : { 'Key' : self . s3props [ 'website' ] [ 'error_document' ] } , 'IndexDocument' : { 'Suffix' : self . s3props [ 'website' ] [ 'index_suffix' ] } } _response = self . s3client . put_bucket_website ( Bu... | Configure static website on S3 bucket . |
50,191 | def _set_bucket_dns ( self ) : dotformat_regions = [ "eu-west-2" , "eu-central-1" , "ap-northeast-2" , "ap-south-1" , "ca-central-1" , "us-east-2" ] if self . region in dotformat_regions : s3_endpoint = "{0}.s3-website.{1}.amazonaws.com" . format ( self . bucket , self . region ) else : s3_endpoint = "{0}.s3-website-{1... | Create CNAME for S3 endpoint . |
50,192 | def _put_bucket_cors ( self ) : if self . s3props [ 'cors' ] [ 'enabled' ] and self . s3props [ 'website' ] [ 'enabled' ] : cors_config = { } cors_rules = [ ] for each_rule in self . s3props [ 'cors' ] [ 'cors_rules' ] : cors_rules . append ( { 'AllowedHeaders' : each_rule [ 'cors_headers' ] , 'AllowedMethods' : each_r... | Adds bucket cors configuration . |
50,193 | def _put_bucket_encryption ( self ) : if self . s3props [ 'encryption' ] [ 'enabled' ] : encryption_config = { 'Rules' : [ { } ] } encryption_config = { 'Rules' : self . s3props [ 'encryption' ] [ 'encryption_rules' ] } LOG . debug ( encryption_config ) _response = self . s3client . put_bucket_encryption ( Bucket = sel... | Adds bucket encryption configuration . |
50,194 | def _put_bucket_lifecycle ( self ) : status = 'deleted' if self . s3props [ 'lifecycle' ] [ 'enabled' ] : lifecycle_config = { 'Rules' : self . s3props [ 'lifecycle' ] [ 'lifecycle_rules' ] } LOG . debug ( 'Lifecycle Config: %s' , lifecycle_config ) _response = self . s3client . put_bucket_lifecycle_configuration ( Buc... | Adds bucket lifecycle configuration . |
50,195 | def _put_bucket_logging ( self ) : logging_config = { } if self . s3props [ 'logging' ] [ 'enabled' ] : logging_config = { 'LoggingEnabled' : { 'TargetBucket' : self . s3props [ 'logging' ] [ 'logging_bucket' ] , 'TargetGrants' : self . s3props [ 'logging' ] [ 'logging_grants' ] , 'TargetPrefix' : self . s3props [ 'log... | Adds bucket logging policy to bucket for s3 access requests |
50,196 | def _put_bucket_tagging ( self ) : all_tags = self . s3props [ 'tagging' ] [ 'tags' ] all_tags . update ( { 'app_group' : self . group , 'app_name' : self . app_name } ) tag_set = generate_s3_tags . generated_tag_data ( all_tags ) tagging_config = { 'TagSet' : tag_set } self . s3client . put_bucket_tagging ( Bucket = s... | Add bucket tags to bucket . |
50,197 | def _put_bucket_versioning ( self ) : status = 'Suspended' if self . s3props [ 'versioning' ] [ 'enabled' ] : status = 'Enabled' versioning_config = { 'MFADelete' : self . s3props [ 'versioning' ] [ 'mfa_delete' ] , 'Status' : status } _response = self . s3client . put_bucket_versioning ( Bucket = self . bucket , Versi... | Adds bucket versioning policy to bucket |
50,198 | def process_git_configs ( git_short = '' ) : LOG . info ( 'Processing application.json files from GitLab "%s".' , git_short ) file_lookup = FileLookup ( git_short = git_short ) app_configs = process_configs ( file_lookup , RUNWAY_BASE_PATH + '/application-master-{env}.json' , RUNWAY_BASE_PATH + '/pipeline.json' ) commi... | Retrieve _application . json_ files from GitLab . |
50,199 | def process_runway_configs ( runway_dir = '' ) : LOG . info ( 'Processing application.json files from local directory "%s".' , runway_dir ) file_lookup = FileLookup ( runway_dir = runway_dir ) app_configs = process_configs ( file_lookup , 'application-master-{env}.json' , 'pipeline.json' ) return app_configs | Read the _application . json_ files . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.