idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
49,100
def _build_js ( inputs , outputs , name , implementation , support_code ) : input_fields = json . dumps ( [ f [ 0 ] for f in inputs ] ) output_fields = [ { 'name' : f [ 0 ] , 'type' : f [ 1 ] } for f in outputs ] output_fields = json . dumps ( output_fields , sort_keys = True ) if support_code is None : support_code = '' return ( '{code}\n{name}={implementation};\nbigquery.defineFunction(\'{name}\', {inputs}, ' '{outputs}, {name});' ) . format ( code = support_code , name = name , implementation = implementation , inputs = str ( input_fields ) , outputs = str ( output_fields ) )
Creates a BigQuery SQL UDF javascript object .
49,101
def sampling_query ( sql , fields = None , count = 5 , sampling = None ) : if sampling is None : sampling = Sampling . default ( count = count , fields = fields ) return sampling ( sql )
Returns a sampling query for the SQL object .
49,102
def _remove_nonascii ( self , df ) : df_copy = df . copy ( deep = True ) for col in df_copy . columns : if ( df_copy [ col ] . dtype == np . dtype ( 'O' ) ) : df_copy [ col ] = df [ col ] . apply ( lambda x : re . sub ( r'[^\x00-\x7f]' , r'' , x ) if isinstance ( x , six . string_types ) else x ) return df_copy
Make copy and remove non - ascii characters from it .
49,103
def plot ( self , data ) : import IPython if not isinstance ( data , dict ) or not all ( isinstance ( v , pd . DataFrame ) for v in data . values ( ) ) : raise ValueError ( 'Expect a dictionary where the values are all dataframes.' ) gfsg = GenericFeatureStatisticsGenerator ( ) data = [ { 'name' : k , 'table' : self . _remove_nonascii ( v ) } for k , v in six . iteritems ( data ) ] data_proto = gfsg . ProtoFromDataFrames ( data ) protostr = base64 . b64encode ( data_proto . SerializeToString ( ) ) . decode ( "utf-8" ) html_id = 'f' + datalab . utils . commands . Html . next_id ( ) HTML_TEMPLATE = html = HTML_TEMPLATE . format ( html_id = html_id , protostr = protostr ) return IPython . core . display . HTML ( html )
Plots an overview in a list of dataframes
49,104
def plot ( self , data , height = 1000 , render_large_data = False ) : import IPython if not isinstance ( data , pd . DataFrame ) : raise ValueError ( 'Expect a DataFrame.' ) if ( len ( data ) > 10000 and not render_large_data ) : raise ValueError ( 'Facets dive may not work well with more than 10000 rows. ' + 'Reduce data or set "render_large_data" to True.' ) jsonstr = data . to_json ( orient = 'records' ) html_id = 'f' + datalab . utils . commands . Html . next_id ( ) HTML_TEMPLATE = html = HTML_TEMPLATE . format ( html_id = html_id , jsonstr = jsonstr , height = height ) return IPython . core . display . HTML ( html )
Plots a detail view of data .
49,105
def DtypeToType ( self , dtype ) : if dtype . char in np . typecodes [ 'AllFloat' ] : return self . fs_proto . FLOAT elif ( dtype . char in np . typecodes [ 'AllInteger' ] or dtype == np . bool or np . issubdtype ( dtype , np . datetime64 ) or np . issubdtype ( dtype , np . timedelta64 ) ) : return self . fs_proto . INT else : return self . fs_proto . STRING
Converts a Numpy dtype to the FeatureNameStatistics . Type proto enum .
49,106
def NdarrayToEntry ( self , x ) : row_counts = [ ] for row in x : try : rc = np . count_nonzero ( ~ np . isnan ( row ) ) if rc != 0 : row_counts . append ( rc ) except TypeError : try : row_counts . append ( row . size ) except AttributeError : row_counts . append ( 1 ) data_type = self . DtypeToType ( x . dtype ) converter = self . DtypeToNumberConverter ( x . dtype ) flattened = x . ravel ( ) orig_size = len ( flattened ) flattened = flattened [ flattened != np . array ( None ) ] if converter : flattened = converter ( flattened ) if data_type == self . fs_proto . STRING : flattened_temp = [ ] for x in flattened : try : if str ( x ) != 'nan' : flattened_temp . append ( x ) except UnicodeEncodeError : if x . encode ( 'utf-8' ) != 'nan' : flattened_temp . append ( x ) flattened = flattened_temp else : flattened = flattened [ ~ np . isnan ( flattened ) ] . tolist ( ) missing = orig_size - len ( flattened ) return { 'vals' : flattened , 'counts' : row_counts , 'missing' : missing , 'type' : data_type }
Converts an ndarray to the Entry format .
49,107
def serving_from_csv_input ( train_config , args , keep_target ) : examples = tf . placeholder ( dtype = tf . string , shape = ( None , ) , name = 'csv_input_string' ) features = parse_example_tensor ( examples = examples , train_config = train_config , keep_target = keep_target ) if keep_target : target = features . pop ( train_config [ 'target_column' ] ) else : target = None features , target = preprocess_input ( features = features , target = target , train_config = train_config , preprocess_output_dir = args . preprocess_output_dir , model_type = args . model_type ) return input_fn_utils . InputFnOps ( features , target , { 'csv_line' : examples } )
Read the input features from a placeholder csv string tensor .
49,108
def parse_example_tensor ( examples , train_config , keep_target ) : csv_header = [ ] if keep_target : csv_header = train_config [ 'csv_header' ] else : csv_header = [ name for name in train_config [ 'csv_header' ] if name != train_config [ 'target_column' ] ] record_defaults = [ [ train_config [ 'csv_defaults' ] [ name ] ] for name in csv_header ] tensors = tf . decode_csv ( examples , record_defaults , name = 'csv_to_tensors' ) tensors = [ tf . expand_dims ( x , axis = 1 ) for x in tensors ] tensor_dict = dict ( zip ( csv_header , tensors ) ) return tensor_dict
Read the csv files .
49,109
def get_estimator ( output_dir , train_config , args ) : target_name = train_config [ 'target_column' ] if is_classification_model ( args . model_type ) and target_name not in train_config [ 'categorical_columns' ] : raise ValueError ( 'When using a classification model, the target must be a ' 'categorical variable.' ) if is_regression_model ( args . model_type ) and target_name not in train_config [ 'numerical_columns' ] : raise ValueError ( 'When using a regression model, the target must be a ' 'numerical variable.' ) if is_dnn_model ( args . model_type ) and not args . layer_sizes : raise ValueError ( '--layer-size* must be used with DNN models' ) if is_linear_model ( args . model_type ) and args . layer_sizes : raise ValueError ( '--layer-size* cannot be used with linear models' ) feature_columns = _tflearn_features ( train_config , args ) config = tf . contrib . learn . RunConfig ( save_checkpoints_secs = args . save_checkpoints_secs ) train_dir = os . path . join ( output_dir , 'train' ) if args . model_type == 'dnn_regression' : estimator = tf . contrib . learn . DNNRegressor ( feature_columns = feature_columns , hidden_units = args . layer_sizes , config = config , model_dir = train_dir , optimizer = tf . train . AdamOptimizer ( args . learning_rate , epsilon = args . epsilon ) ) elif args . model_type == 'linear_regression' : estimator = tf . contrib . learn . LinearRegressor ( feature_columns = feature_columns , config = config , model_dir = train_dir , optimizer = tf . train . AdamOptimizer ( args . learning_rate , epsilon = args . epsilon ) ) elif args . model_type == 'dnn_classification' : estimator = tf . contrib . learn . DNNClassifier ( feature_columns = feature_columns , hidden_units = args . layer_sizes , n_classes = train_config [ 'vocab_stats' ] [ target_name ] [ 'n_classes' ] , config = config , model_dir = train_dir , optimizer = tf . train . AdamOptimizer ( args . learning_rate , epsilon = args . epsilon ) ) elif args . model_type == 'linear_classification' : estimator = tf . contrib . learn . LinearClassifier ( feature_columns = feature_columns , n_classes = train_config [ 'vocab_stats' ] [ target_name ] [ 'n_classes' ] , config = config , model_dir = train_dir , optimizer = tf . train . AdamOptimizer ( args . learning_rate , epsilon = args . epsilon ) ) else : raise ValueError ( 'bad --model-type value' ) return estimator
Returns a tf learn estimator .
49,110
def preprocess_input ( features , target , train_config , preprocess_output_dir , model_type ) : target_name = train_config [ 'target_column' ] key_name = train_config [ 'key_column' ] with tf . name_scope ( 'numerical_feature_preprocess' ) : if train_config [ 'numerical_columns' ] : numerical_analysis_file = os . path . join ( preprocess_output_dir , NUMERICAL_ANALYSIS ) if not file_io . file_exists ( numerical_analysis_file ) : raise ValueError ( 'File %s not found in %s' % ( NUMERICAL_ANALYSIS , preprocess_output_dir ) ) numerical_anlysis = json . loads ( python_portable_string ( file_io . read_file_to_string ( numerical_analysis_file ) ) ) for name in train_config [ 'numerical_columns' ] : if name == target_name or name == key_name : continue transform_config = train_config [ 'transforms' ] . get ( name , { } ) transform_name = transform_config . get ( 'transform' , None ) if transform_name == 'scale' : value = float ( transform_config . get ( 'value' , 1.0 ) ) features [ name ] = _scale_tensor ( features [ name ] , range_min = numerical_anlysis [ name ] [ 'min' ] , range_max = numerical_anlysis [ name ] [ 'max' ] , scale_min = - value , scale_max = value ) elif transform_name == 'identity' or transform_name is None : pass else : raise ValueError ( ( 'For numerical variables, only scale ' 'and identity are supported: ' 'Error for %s' ) % name ) if target is not None : with tf . name_scope ( 'target_feature_preprocess' ) : if target_name in train_config [ 'categorical_columns' ] : labels = train_config [ 'vocab_stats' ] [ target_name ] [ 'labels' ] table = tf . contrib . lookup . string_to_index_table_from_tensor ( labels ) target = table . lookup ( target ) with tf . name_scope ( 'categorical_feature_preprocess' ) : for name in train_config [ 'categorical_columns' ] : if name == key_name or name == target_name : continue transform_config = train_config [ 'transforms' ] . get ( name , { } ) transform_name = transform_config . get ( 'transform' , None ) if is_dnn_model ( model_type ) : if transform_name == 'embedding' or transform_name == 'one_hot' or transform_name is None : map_vocab = True else : raise ValueError ( 'Unknown transform %s' % transform_name ) elif is_linear_model ( model_type ) : if ( transform_name == 'one_hot' or transform_name is None ) : map_vocab = True elif transform_name == 'embedding' : map_vocab = False else : raise ValueError ( 'Unknown transform %s' % transform_name ) if map_vocab : labels = train_config [ 'vocab_stats' ] [ name ] [ 'labels' ] table = tf . contrib . lookup . string_to_index_table_from_tensor ( labels ) features [ name ] = table . lookup ( features [ name ] ) return features , target
Perform some transformations after reading in the input tensors .
49,111
def _scale_tensor ( tensor , range_min , range_max , scale_min , scale_max ) : if range_min == range_max : return tensor float_tensor = tf . to_float ( tensor ) scaled_tensor = tf . divide ( ( tf . subtract ( float_tensor , range_min ) * tf . constant ( float ( scale_max - scale_min ) ) ) , tf . constant ( float ( range_max - range_min ) ) ) shifted_tensor = scaled_tensor + tf . constant ( float ( scale_min ) ) return shifted_tensor
Scale a tensor to scale_min to scale_max .
49,112
def _tflearn_features ( train_config , args ) : feature_columns = [ ] target_name = train_config [ 'target_column' ] key_name = train_config [ 'key_column' ] for name in train_config [ 'numerical_columns' ] : if name != target_name and name != key_name : feature_columns . append ( tf . contrib . layers . real_valued_column ( name , dimension = 1 ) ) for name in train_config [ 'categorical_columns' ] : if name != target_name and name != key_name : transform_config = train_config [ 'transforms' ] . get ( name , { } ) transform_name = transform_config . get ( 'transform' , None ) if is_dnn_model ( args . model_type ) : if transform_name == 'embedding' : sparse = tf . contrib . layers . sparse_column_with_integerized_feature ( name , bucket_size = train_config [ 'vocab_stats' ] [ name ] [ 'n_classes' ] ) learn_feature = tf . contrib . layers . embedding_column ( sparse , dimension = transform_config [ 'embedding_dim' ] ) elif transform_name == 'one_hot' or transform_name is None : sparse = tf . contrib . layers . sparse_column_with_integerized_feature ( name , bucket_size = train_config [ 'vocab_stats' ] [ name ] [ 'n_classes' ] ) learn_feature = tf . contrib . layers . one_hot_column ( sparse ) else : raise ValueError ( ( 'Unknown transform name. Only \'embedding\' ' 'and \'one_hot\' transforms are supported. Got %s' ) % transform_name ) elif is_linear_model ( args . model_type ) : if transform_name == 'one_hot' or transform_name is None : learn_feature = tf . contrib . layers . sparse_column_with_integerized_feature ( name , bucket_size = train_config [ 'vocab_stats' ] [ name ] [ 'n_classes' ] ) elif transform_name == 'embedding' : learn_feature = tf . contrib . layers . sparse_column_with_hash_bucket ( name , hash_bucket_size = transform_config [ 'embedding_dim' ] ) else : raise ValueError ( ( 'Unknown transform name. Only \'embedding\' ' 'and \'one_hot\' transforms are supported. Got %s' ) % transform_name ) feature_columns . append ( learn_feature ) return feature_columns
Builds the tf . learn feature list .
49,113
def get_vocabulary ( preprocess_output_dir , name ) : vocab_file = os . path . join ( preprocess_output_dir , CATEGORICAL_ANALYSIS % name ) if not file_io . file_exists ( vocab_file ) : raise ValueError ( 'File %s not found in %s' % ( CATEGORICAL_ANALYSIS % name , preprocess_output_dir ) ) labels = python_portable_string ( file_io . read_file_to_string ( vocab_file ) ) . split ( '\n' ) label_values = [ x for x in labels if x ] return label_values
Loads the vocabulary file as a list of strings .
49,114
def validate_metadata ( train_config ) : if len ( train_config [ 'csv_header' ] ) != len ( train_config [ 'csv_defaults' ] ) : raise ValueError ( 'Unequal number of columns in input features file and ' 'schema file.' ) sorted_columns = sorted ( train_config [ 'csv_header' ] + [ train_config [ 'target_column' ] ] ) sorted_columns2 = sorted ( train_config [ 'categorical_columns' ] + train_config [ 'numerical_columns' ] + [ train_config [ 'key_column' ] ] + [ train_config [ 'target_column' ] ] ) if sorted_columns2 != sorted_columns : raise ValueError ( 'Each csv header must be a numerical/categorical type, a ' ' key, or a target.' )
Perform some checks that the trainig config is correct .
49,115
def get_default_id ( credentials = None ) : project_id = _utils . get_project_id ( ) if project_id is None : projects , _ = Projects ( credentials ) . _retrieve_projects ( None , 2 ) if len ( projects ) == 1 : project_id = projects [ 0 ] . id return project_id
Get default project id .
49,116
def init_app ( state ) : app = state . app app . config . setdefault ( 'SPLIT_ALLOW_MULTIPLE_EXPERIMENTS' , False ) app . config . setdefault ( 'SPLIT_DB_FAILOVER' , False ) app . config . setdefault ( 'SPLIT_IGNORE_IP_ADDRESSES' , [ ] ) app . config . setdefault ( 'SPLIT_ROBOT_REGEX' , r ) app . jinja_env . globals . update ( { 'ab_test' : ab_test , 'finished' : finished } ) @ app . template_filter ( ) def percentage ( number ) : number *= 100 if abs ( number ) < 10 : return "%.1f%%" % round ( number , 1 ) else : return "%d%%" % round ( number )
Prepare the Flask application for Flask - Split .
49,117
def finished ( experiment_name , reset = True ) : if _exclude_visitor ( ) : return redis = _get_redis_connection ( ) try : experiment = Experiment . find ( redis , experiment_name ) if not experiment : return alternative_name = _get_session ( ) . get ( experiment . key ) if alternative_name : split_finished = set ( session . get ( 'split_finished' , [ ] ) ) if experiment . key not in split_finished : alternative = Alternative ( redis , alternative_name , experiment_name ) alternative . increment_completion ( ) if reset : _get_session ( ) . pop ( experiment . key , None ) try : split_finished . remove ( experiment . key ) except KeyError : pass else : split_finished . add ( experiment . key ) session [ 'split_finished' ] = list ( split_finished ) except ConnectionError : if not current_app . config [ 'SPLIT_DB_FAILOVER' ] : raise
Track a conversion .
49,118
def _is_robot ( ) : robot_regex = current_app . config [ 'SPLIT_ROBOT_REGEX' ] user_agent = request . headers . get ( 'User-Agent' , '' ) return re . search ( robot_regex , user_agent , flags = re . VERBOSE )
Return True if the current visitor is a robot or spider or False otherwise .
49,119
def start_time ( self ) : t = self . redis . hget ( 'experiment_start_times' , self . name ) if t : return datetime . strptime ( t , '%Y-%m-%dT%H:%M:%S' )
The start time of this experiment .
49,120
def reset ( self ) : for alternative in self . alternatives : alternative . reset ( ) self . reset_winner ( ) self . increment_version ( )
Delete all data for this experiment .
49,121
def delete ( self ) : for alternative in self . alternatives : alternative . delete ( ) self . reset_winner ( ) self . redis . srem ( 'experiments' , self . name ) self . redis . delete ( self . name ) self . increment_version ( )
Delete this experiment and all its data .
49,122
def _get_redis_connection ( ) : url = current_app . config . get ( 'REDIS_URL' , 'redis://localhost:6379' ) return redis . from_url ( url , decode_responses = True )
Return a Redis connection based on the Flask application s configuration .
49,123
def set_experiment_winner ( experiment ) : redis = _get_redis_connection ( ) experiment = Experiment . find ( redis , experiment ) if experiment : alternative_name = request . form . get ( 'alternative' ) alternative = Alternative ( redis , alternative_name , experiment . name ) if alternative . name in experiment . alternative_names : experiment . winner = alternative . name return redirect ( url_for ( '.index' ) )
Mark an alternative as the winner of the experiment .
49,124
def reset_experiment ( experiment ) : redis = _get_redis_connection ( ) experiment = Experiment . find ( redis , experiment ) if experiment : experiment . reset ( ) return redirect ( url_for ( '.index' ) )
Delete all data for an experiment .
49,125
def delete_experiment ( experiment ) : redis = _get_redis_connection ( ) experiment = Experiment . find ( redis , experiment ) if experiment : experiment . delete ( ) return redirect ( url_for ( '.index' ) )
Delete an experiment and all its data .
49,126
def _get_ipaddress ( node ) : if "ipaddress" not in node : with settings ( hide ( 'stdout' ) , warn_only = True ) : output = sudo ( 'ohai -l warn ipaddress' ) if output . succeeded : try : node [ 'ipaddress' ] = json . loads ( output ) [ 0 ] except ValueError : abort ( "Could not parse ohai's output for ipaddress" ":\n {0}" . format ( output ) ) return True return False
Adds the ipaddress attribute to the given node object if not already present and it is correctly given by ohai Returns True if ipaddress is added False otherwise
49,127
def sync_node ( node ) : if node . get ( 'dummy' ) or 'dummy' in node . get ( 'tags' , [ ] ) : lib . print_header ( "Skipping dummy: {0}" . format ( env . host ) ) return False current_node = lib . get_node ( node [ 'name' ] ) solo . configure ( current_node ) ipaddress = _get_ipaddress ( node ) filepath = save_config ( node , ipaddress ) try : _synchronize_node ( filepath , node ) _configure_node ( ) finally : _node_cleanup ( ) return True
Builds synchronizes and configures a node . It also injects the ipaddress to the node s config file if not already existent .
49,128
def build_dct ( dic , keys , value ) : key = keys . pop ( 0 ) if len ( keys ) : dic . setdefault ( key , { } ) build_dct ( dic [ key ] , keys , value ) else : if value == "false" : value = False elif value == "true" : value = True dic [ key ] = deepcopy ( value )
Builds a dictionary with arbitrary depth out of a key list
49,129
def update_dct ( dic1 , dic2 ) : for key , val in dic2 . items ( ) : if isinstance ( val , dict ) : dic1 . setdefault ( key , { } ) update_dct ( dic1 [ key ] , val ) else : dic1 [ key ] = val
Merges two dictionaries recursively dic2 will have preference over dic1
49,130
def _add_merged_attributes ( node , all_recipes , all_roles ) : attributes = { } for recipe in node [ 'recipes' ] : found = False for r in all_recipes : if recipe == r [ 'name' ] : found = True for attr in r [ 'attributes' ] : if r [ 'attributes' ] [ attr ] . get ( 'type' ) == "hash" : value = { } else : value = r [ 'attributes' ] [ attr ] . get ( 'default' ) build_dct ( attributes , attr . split ( "/" ) , value ) if not found : error = "Could not find recipe '{0}' while " . format ( recipe ) error += "building node data bag for '{0}'" . format ( node [ 'name' ] ) abort ( error ) for role in node [ 'roles' ] : for r in all_roles : if role == r [ 'name' ] : update_dct ( attributes , r . get ( 'default_attributes' , { } ) ) environment = lib . get_environment ( node [ 'chef_environment' ] ) update_dct ( attributes , environment . get ( 'default_attributes' , { } ) ) non_attribute_fields = [ 'id' , 'name' , 'role' , 'roles' , 'recipes' , 'run_list' , 'ipaddress' ] node_attributes = { } for key in node : if key in non_attribute_fields : continue node_attributes [ key ] = node [ key ] update_dct ( attributes , node_attributes ) for role in node [ 'roles' ] : for r in all_roles : if role == r [ 'name' ] : update_dct ( attributes , r . get ( 'override_attributes' , { } ) ) update_dct ( attributes , environment . get ( 'override_attributes' , { } ) ) node . update ( attributes )
Merges attributes from cookbooks node and roles
49,131
def build_node_data_bag ( ) : nodes = lib . get_nodes ( ) node_data_bag_path = os . path . join ( 'data_bags' , 'node' ) remove_local_node_data_bag ( ) os . makedirs ( node_data_bag_path ) all_recipes = lib . get_recipes ( ) all_roles = lib . get_roles ( ) for node in nodes : node [ 'id' ] = node [ 'name' ] . replace ( '.' , '_' ) node [ 'role' ] = lib . get_roles_in_node ( node ) node [ 'roles' ] = node [ 'role' ] [ : ] for role in node [ 'role' ] : node [ 'roles' ] . extend ( lib . get_roles_in_role ( role ) ) node [ 'roles' ] = list ( set ( node [ 'roles' ] ) ) node [ 'recipes' ] = lib . get_recipes_in_node ( node ) for role in node [ 'roles' ] : node [ 'recipes' ] . extend ( lib . get_recipes_in_role ( role ) ) node [ 'recipes' ] = list ( set ( node [ 'recipes' ] ) ) _add_merged_attributes ( node , all_recipes , all_roles ) _add_automatic_attributes ( node ) with open ( os . path . join ( 'data_bags' , 'node' , node [ 'id' ] + '.json' ) , 'w' ) as f : f . write ( json . dumps ( node ) )
Builds one node data bag item per file found in the nodes directory
49,132
def remove_local_node_data_bag ( ) : node_data_bag_path = os . path . join ( 'data_bags' , 'node' ) if os . path . exists ( node_data_bag_path ) : shutil . rmtree ( node_data_bag_path )
Removes generated node data_bag locally
49,133
def ensure_berksfile_cookbooks_are_installed ( ) : msg = "Vendoring cookbooks from Berksfile {0} to directory {1}..." print ( msg . format ( env . berksfile , env . berksfile_cookbooks_directory ) ) run_vendor = True cookbooks_dir = env . berksfile_cookbooks_directory berksfile_lock_path = cookbooks_dir + '/Berksfile.lock' berksfile_lock_exists = os . path . isfile ( berksfile_lock_path ) cookbooks_dir_exists = os . path . isdir ( cookbooks_dir ) if cookbooks_dir_exists and berksfile_lock_exists : berksfile_mtime = os . stat ( 'Berksfile' ) . st_mtime cookbooks_mtime = os . stat ( berksfile_lock_path ) . st_mtime run_vendor = berksfile_mtime > cookbooks_mtime if run_vendor : if cookbooks_dir_exists : shutil . rmtree ( env . berksfile_cookbooks_directory ) p = subprocess . Popen ( [ 'berks' , 'vendor' , env . berksfile_cookbooks_directory ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) stdout , stderr = p . communicate ( ) if env . verbose or p . returncode : print stdout , stderr
Run berks vendor to berksfile cookbooks directory
49,134
def _remove_remote_node_data_bag ( ) : node_data_bag_path = os . path . join ( env . node_work_path , 'data_bags' , 'node' ) if exists ( node_data_bag_path ) : sudo ( "rm -rf {0}" . format ( node_data_bag_path ) )
Removes generated node data_bag from the remote node
49,135
def _remove_remote_data_bags ( ) : data_bags_path = os . path . join ( env . node_work_path , 'data_bags' ) if exists ( data_bags_path ) : sudo ( "rm -rf {0}" . format ( data_bags_path ) )
Remove remote data bags so it won t leak any sensitive information
49,136
def _configure_node ( ) : print ( "" ) msg = "Cooking..." if env . parallel : msg = "[{0}]: {1}" . format ( env . host_string , msg ) print ( msg ) with settings ( hide ( 'stdout' , 'warnings' , 'running' ) , warn_only = True ) : sudo ( "mv {0} {0}.1" . format ( LOGFILE ) ) cmd = "RUBYOPT=-Ku chef-solo" if whyrun : cmd += " --why-run" cmd += ' -l {0} -j /etc/chef/node.json' . format ( env . loglevel ) if ENABLE_LOGS : cmd += ' | tee {0}' . format ( LOGFILE ) if env . loglevel == "debug" : print ( "Executing Chef Solo with the following command:\n" "{0}" . format ( cmd ) ) with settings ( hide ( 'warnings' , 'running' ) , warn_only = True ) : output = sudo ( cmd ) if ( output . failed or "FATAL: Stacktrace dumped" in output or ( "Chef Run complete" not in output and "Report handlers complete" not in output ) ) : if 'chef-solo: command not found' in output : print ( colors . red ( "\nFAILED: Chef Solo is not installed on this node" ) ) print ( "Type 'fix node:{0} deploy_chef' to install it" . format ( env . host ) ) abort ( "" ) else : print ( colors . red ( "\nFAILED: chef-solo could not finish configuring the node\n" ) ) import sys sys . exit ( 1 ) else : msg = "\n" if env . parallel : msg += "[{0}]: " . format ( env . host_string ) msg += "SUCCESS: Node correctly configured" print ( colors . green ( msg ) )
Exectutes chef - solo to apply roles and recipes to a node
49,137
def _resolve_hostname ( name ) : if env . ssh_config is None : return name elif not os . path . exists ( os . path . join ( "nodes" , name + ".json" ) ) : resolved_name = env . ssh_config . lookup ( name ) [ 'hostname' ] if os . path . exists ( os . path . join ( "nodes" , resolved_name + ".json" ) ) : name = resolved_name return name
Returns resolved hostname using the ssh config
49,138
def get_environment ( name ) : if name == "_default" : return env_from_template ( name ) filename = os . path . join ( "environments" , name + ".json" ) try : with open ( filename ) as f : try : return json . loads ( f . read ( ) ) except ValueError as e : msg = 'LittleChef found the following error in' msg += ' "{0}":\n {1}' . format ( filename , str ( e ) ) abort ( msg ) except IOError : raise FileNotFoundError ( 'File {0} not found' . format ( filename ) )
Returns a JSON environment file as a dictionary
49,139
def get_environments ( ) : envs = [ ] for root , subfolders , files in os . walk ( 'environments' ) : for filename in files : if filename . endswith ( ".json" ) : path = os . path . join ( root [ len ( 'environments' ) : ] , filename [ : - len ( '.json' ) ] ) envs . append ( get_environment ( path ) ) return sorted ( envs , key = lambda x : x [ 'name' ] )
Gets all environments found in the environments directory
49,140
def get_node ( name , merged = False ) : if merged : node_path = os . path . join ( "data_bags" , "node" , name . replace ( '.' , '_' ) + ".json" ) else : node_path = os . path . join ( "nodes" , name + ".json" ) if os . path . exists ( node_path ) : with open ( node_path , 'r' ) as f : try : node = json . loads ( f . read ( ) ) except ValueError as e : msg = 'LittleChef found the following error in' msg += ' "{0}":\n {1}' . format ( node_path , str ( e ) ) abort ( msg ) else : print "Creating new node file '{0}.json'" . format ( name ) node = { 'run_list' : [ ] } node [ 'name' ] = name if not node . get ( 'chef_environment' ) : node [ 'chef_environment' ] = '_default' return node
Returns a JSON node file as a dictionary
49,141
def get_nodes_with_role ( role_name , environment = None ) : prefix_search = role_name . endswith ( "*" ) if prefix_search : role_name = role_name . rstrip ( "*" ) for n in get_nodes ( environment ) : roles = get_roles_in_node ( n , recursive = True ) if prefix_search : if any ( role . startswith ( role_name ) for role in roles ) : yield n else : if role_name in roles : yield n
Get all nodes which include a given role prefix - searches are also supported
49,142
def get_nodes_with_tag ( tag , environment = None , include_guests = False ) : nodes = get_nodes ( environment ) nodes_mapping = dict ( ( n [ 'name' ] , n ) for n in nodes ) for n in nodes : if tag in n . get ( 'tags' , [ ] ) : try : del nodes_mapping [ n [ 'fqdn' ] ] except KeyError : pass yield n if include_guests and n . get ( 'virtualization' , { } ) . get ( 'role' ) == 'host' : for guest in n [ 'virtualization' ] . get ( 'guests' , [ ] ) : try : yield nodes_mapping [ guest [ 'fqdn' ] ] except KeyError : pass
Get all nodes which include a given tag
49,143
def get_nodes_with_recipe ( recipe_name , environment = None ) : prefix_search = recipe_name . endswith ( "*" ) if prefix_search : recipe_name = recipe_name . rstrip ( "*" ) for n in get_nodes ( environment ) : recipes = get_recipes_in_node ( n ) for role in get_roles_in_node ( n , recursive = True ) : recipes . extend ( get_recipes_in_role ( role ) ) if prefix_search : if any ( recipe . startswith ( recipe_name ) for recipe in recipes ) : yield n else : if recipe_name in recipes : yield n
Get all nodes which include a given recipe prefix - searches are also supported
49,144
def print_node ( node , detailed = False ) : nodename = node [ 'name' ] print ( colors . yellow ( "\n" + nodename ) ) if detailed : for role in get_roles_in_node ( node ) : print_role ( _get_role ( role ) , detailed = False ) else : print ( ' Roles: {0}' . format ( ", " . join ( get_roles_in_node ( node ) ) ) ) if detailed : for recipe in get_recipes_in_node ( node ) : print " Recipe:" , recipe print " attributes: {0}" . format ( node . get ( recipe , "" ) ) else : print ( ' Recipes: {0}' . format ( ", " . join ( get_recipes_in_node ( node ) ) ) ) print " Node attributes:" for attribute in node . keys ( ) : if attribute == "run_list" or attribute == "name" : continue print " {0}: {1}" . format ( attribute , node [ attribute ] )
Pretty prints the given node
49,145
def print_nodes ( nodes , detailed = False ) : found = 0 for node in nodes : found += 1 print_node ( node , detailed = detailed ) print ( "\nFound {0} node{1}" . format ( found , "s" if found != 1 else "" ) )
Prints all the given nodes
49,146
def _generate_metadata ( path , cookbook_path , name ) : global knife_installed if not knife_installed : return metadata_path_rb = os . path . join ( path , 'metadata.rb' ) metadata_path_json = os . path . join ( path , 'metadata.json' ) if ( os . path . exists ( metadata_path_rb ) and ( not os . path . exists ( metadata_path_json ) or os . stat ( metadata_path_rb ) . st_mtime > os . stat ( metadata_path_json ) . st_mtime ) ) : error_msg = "Warning: metadata.json for {0}" . format ( name ) error_msg += " in {0} is older that metadata.rb" . format ( cookbook_path ) error_msg += ", cookbook attributes could be out of date\n\n" try : proc = subprocess . Popen ( [ 'knife' , 'cookbook' , 'metadata' , '-o' , cookbook_path , name ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) resp , error = proc . communicate ( ) if ( 'ERROR:' in resp or 'FATAL:' in resp or 'Generating metadata for' not in resp ) : if ( "No user specified, pass via -u or specifiy 'node_name'" in error ) : error_msg += "You need to have an up-to-date (>=0.10.x)" error_msg += " version of knife installed locally in order" error_msg += " to generate metadata.json.\nError " else : error_msg += "Unkown error " error_msg += "while executing knife to generate " error_msg += "metadata.json for {0}" . format ( path ) print ( error_msg ) print resp if env . loglevel == 'debug' : print "\n" . join ( resp . split ( "\n" ) [ : 2 ] ) except OSError : knife_installed = False error_msg += "If you locally install Chef's knife tool, LittleChef" error_msg += " will regenerate metadata.json files automatically\n" print ( error_msg ) else : print ( "Generated metadata.json for {0}\n" . format ( path ) )
Checks whether metadata . rb has changed and regenerate metadata . json
49,147
def get_recipes_in_cookbook ( name ) : recipes = { } path = None cookbook_exists = False metadata_exists = False for cookbook_path in cookbook_paths : path = os . path . join ( cookbook_path , name ) path_exists = os . path . exists ( path ) cookbook_exists = cookbook_exists or path_exists if not path_exists : continue _generate_metadata ( path , cookbook_path , name ) try : with open ( os . path . join ( path , 'metadata.json' ) , 'r' ) as f : try : cookbook = json . loads ( f . read ( ) ) except ValueError as e : msg = "Little Chef found the following error in your" msg += " {0} file:\n {1}" . format ( os . path . join ( path , 'metadata.json' ) , e ) abort ( msg ) metadata_exists = True recipe_defaults = { 'description' : '' , 'version' : cookbook . get ( 'version' ) , 'dependencies' : cookbook . get ( 'dependencies' , { } ) . keys ( ) , 'attributes' : cookbook . get ( 'attributes' , { } ) } for recipe in cookbook . get ( 'recipes' , [ ] ) : recipes [ recipe ] = dict ( recipe_defaults , name = recipe , description = cookbook [ 'recipes' ] [ recipe ] ) break except IOError : pass if not cookbook_exists : abort ( 'Unable to find cookbook "{0}"' . format ( name ) ) elif not metadata_exists : abort ( 'Cookbook "{0}" has no metadata.json' . format ( name ) ) for cookbook_path in cookbook_paths : recipes_dir = os . path . join ( cookbook_path , name , 'recipes' ) if not os . path . isdir ( recipes_dir ) : continue for basename in os . listdir ( recipes_dir ) : fname , ext = os . path . splitext ( basename ) if ext != '.rb' : continue if fname != 'default' : recipe = '%s::%s' % ( name , fname ) else : recipe = name if recipe not in recipes : recipes [ recipe ] = dict ( recipe_defaults , name = recipe ) if not recipes : recipes [ name ] = dict ( recipe_defaults , name = name , description = 'This cookbook has no default recipe' ) return recipes . values ( )
Gets the name of all recipes present in a cookbook Returns a list of dictionaries
49,148
def get_recipes_in_node ( node ) : recipes = [ ] for elem in node . get ( 'run_list' , [ ] ) : if elem . startswith ( "recipe" ) : recipe = elem . split ( '[' ) [ 1 ] . split ( ']' ) [ 0 ] recipes . append ( recipe ) return recipes
Gets the name of all recipes present in the run_list of a node
49,149
def get_recipes ( ) : dirnames = set ( ) for path in cookbook_paths : dirnames . update ( [ d for d in os . listdir ( path ) if os . path . isdir ( os . path . join ( path , d ) ) and not d . startswith ( '.' ) ] ) recipes = [ ] for dirname in dirnames : recipes . extend ( get_recipes_in_cookbook ( dirname ) ) return sorted ( recipes , key = lambda x : x [ 'name' ] )
Gets all recipes found in the cookbook directories
49,150
def print_recipe ( recipe ) : print ( colors . yellow ( "\n{0}" . format ( recipe [ 'name' ] ) ) ) print " description: {0}" . format ( recipe [ 'description' ] ) print " version: {0}" . format ( recipe [ 'version' ] ) print " dependencies: {0}" . format ( ", " . join ( recipe [ 'dependencies' ] ) ) print " attributes: {0}" . format ( ", " . join ( recipe [ 'attributes' ] ) )
Pretty prints the given recipe
49,151
def _get_role ( rolename ) : path = os . path . join ( 'roles' , rolename + '.json' ) if not os . path . exists ( path ) : abort ( "Couldn't read role file {0}" . format ( path ) ) with open ( path , 'r' ) as f : try : role = json . loads ( f . read ( ) ) except ValueError as e : msg = "Little Chef found the following error in your" msg += " {0}.json file:\n {1}" . format ( rolename , str ( e ) ) abort ( msg ) role [ 'fullname' ] = rolename return role
Reads and parses a file containing a role
49,152
def get_roles ( ) : roles = [ ] for root , subfolders , files in os . walk ( 'roles' ) : for filename in files : if filename . endswith ( ".json" ) : path = os . path . join ( root [ len ( 'roles' ) : ] , filename [ : - len ( '.json' ) ] ) roles . append ( _get_role ( path ) ) return sorted ( roles , key = lambda x : x [ 'fullname' ] )
Gets all roles found in the roles directory
49,153
def print_role ( role , detailed = True ) : if detailed : print ( colors . yellow ( role . get ( 'fullname' ) ) ) else : print ( " Role: {0}" . format ( role . get ( 'fullname' ) ) ) if detailed : print ( " description: {0}" . format ( role . get ( 'description' ) ) ) if 'default_attributes' in role : print ( " default_attributes:" ) _pprint ( role [ 'default_attributes' ] ) if 'override_attributes' in role : print ( " override_attributes:" ) _pprint ( role [ 'override_attributes' ] ) if detailed : print ( " run_list: {0}" . format ( role . get ( 'run_list' ) ) ) print ( "" )
Pretty prints the given role
49,154
def import_plugin ( name ) : path = os . path . join ( "plugins" , name + ".py" ) try : with open ( path , 'rb' ) as f : try : plugin = imp . load_module ( "p_" + name , f , name + '.py' , ( '.py' , 'rb' , imp . PY_SOURCE ) ) except SyntaxError as e : error = "Found plugin '{0}', but it seems" . format ( name ) error += " to have a syntax error: {0}" . format ( str ( e ) ) abort ( error ) except IOError : abort ( "Sorry, could not find '{0}.py' in the plugin directory" . format ( name ) ) return plugin
Imports plugin python module
49,155
def get_cookbook_path ( cookbook_name ) : for cookbook_path in cookbook_paths : path = os . path . join ( cookbook_path , cookbook_name ) if os . path . exists ( path ) : return path raise IOError ( 'Can\'t find cookbook with name "{0}"' . format ( cookbook_name ) )
Returns path to the cookbook for the given cookbook name
49,156
def global_confirm ( question , default = True ) : if env . abort_on_prompts : return True original_parallel = env . parallel env . parallel = False result = confirm ( question , default ) env . parallel = original_parallel return result
Shows a confirmation that applies to all hosts by temporarily disabling parallel execution in Fabric
49,157
def _pprint ( dic ) : for key , value in dic . items ( ) : print ( " {0}: {1}" . format ( key , value ) )
Prints a dictionary with one indentation level
49,158
def get_margin ( length ) : if length > 23 : margin_left = "\t" chars = 1 elif length > 15 : margin_left = "\t\t" chars = 2 elif length > 7 : margin_left = "\t\t\t" chars = 3 else : margin_left = "\t\t\t\t" chars = 4 return margin_left
Add enough tabs to align in two columns
49,159
def configure ( current_node = None ) : current_node = current_node or { } cache_dir = "{0}/cache" . format ( env . node_work_path ) try : cache_exists = exists ( cache_dir ) except EOFError as e : abort ( "Could not login to node, got: {0}" . format ( e ) ) if not cache_exists : with settings ( hide ( 'running' , 'stdout' ) , warn_only = True ) : output = sudo ( 'mkdir -p {0}' . format ( cache_dir ) ) if output . failed : error = "Could not create {0} dir. " . format ( env . node_work_path ) error += "Do you have sudo rights?" abort ( error ) with hide ( 'running' , 'stdout' ) : with settings ( warn_only = True ) : output = sudo ( 'chown -R {0} {1}' . format ( env . user , env . node_work_path ) ) if output . failed : error = "Could not modify {0} dir. " . format ( env . node_work_path ) error += "Do you have sudo rights?" abort ( error ) logging_path = os . path . dirname ( LOGFILE ) if not exists ( logging_path ) : sudo ( 'mkdir -p {0}' . format ( logging_path ) ) if not exists ( '/etc/chef' ) : sudo ( 'mkdir -p /etc/chef' ) reversed_cookbook_paths = cookbook_paths [ : ] reversed_cookbook_paths . reverse ( ) cookbook_paths_list = '[{0}]' . format ( ', ' . join ( [ '"{0}/{1}"' . format ( env . node_work_path , x ) for x in reversed_cookbook_paths ] ) ) data = { 'node_work_path' : env . node_work_path , 'cookbook_paths_list' : cookbook_paths_list , 'environment' : current_node . get ( 'chef_environment' , '_default' ) , 'verbose' : "true" if env . verbose else "false" , 'http_proxy' : env . http_proxy , 'https_proxy' : env . https_proxy } with settings ( hide ( 'everything' ) ) : try : upload_template ( 'solo.rb.j2' , '/etc/chef/solo.rb' , context = data , use_sudo = True , backup = False , template_dir = BASEDIR , use_jinja = True , mode = 0400 ) except SystemExit : error = ( "Failed to upload '/etc/chef/solo.rb'\nThis " "can happen when the deployment user does not have a " "home directory, which is needed as a temporary location" ) abort ( error ) with hide ( 'stdout' ) : sudo ( 'chown root:$(id -g -n root) {0}' . format ( '/etc/chef/solo.rb' ) )
Deploy chef - solo specific files
49,160
def execute ( node ) : with hide ( 'everything' ) : virt = json . loads ( sudo ( 'ohai virtualization' ) ) if not len ( virt ) or virt [ 0 ] [ 1 ] != "host" : print ( "This node is not a Xen host, doing nothing" ) return node [ 'virtualization' ] = { 'role' : 'host' , 'system' : 'xen' , 'vms' : [ ] , } with hide ( 'everything' ) : vm_list = sudo ( "xm list" ) for vm in vm_list . split ( "\n" ) [ 2 : ] : data = vm . split ( ) if len ( data ) != 6 : break node [ 'virtualization' ] [ 'vms' ] . append ( { 'fqdn' : data [ 0 ] , 'RAM' : data [ 2 ] , 'cpus' : data [ 3 ] } ) print ( "Found {0} VMs for this Xen host" . format ( len ( node [ 'virtualization' ] [ 'vms' ] ) ) ) del node [ 'name' ] os . remove ( chef . save_config ( node , True ) )
Uses ohai to get virtualization information which is then saved to then node file
49,161
def nodes_with_role ( rolename ) : nodes = [ n [ 'name' ] for n in lib . get_nodes_with_role ( rolename , env . chef_environment ) ] if not len ( nodes ) : print ( "No nodes found with role '{0}'" . format ( rolename ) ) sys . exit ( 0 ) return node ( * nodes )
Configures a list of nodes that have the given role in their run list
49,162
def nodes_with_recipe ( recipename ) : nodes = [ n [ 'name' ] for n in lib . get_nodes_with_recipe ( recipename , env . chef_environment ) ] if not len ( nodes ) : print ( "No nodes found with recipe '{0}'" . format ( recipename ) ) sys . exit ( 0 ) return node ( * nodes )
Configures a list of nodes that have the given recipe in their run list
49,163
def node ( * nodes ) : chef . build_node_data_bag ( ) if not len ( nodes ) or nodes [ 0 ] == '' : abort ( 'No node was given' ) elif nodes [ 0 ] == 'all' : for node in lib . get_nodes ( env . chef_environment ) : env . hosts . append ( node [ 'name' ] ) if not len ( env . hosts ) : abort ( 'No nodes found in /nodes/' ) message = "Are you sure you want to configure all nodes ({0})" . format ( len ( env . hosts ) ) if env . chef_environment : message += " in the {0} environment" . format ( env . chef_environment ) message += "?" if not __testing__ : if not lib . global_confirm ( message ) : abort ( 'Aborted by user' ) else : env . hosts = list ( nodes ) env . all_hosts = list ( env . hosts ) if not ( littlechef . __cooking__ and 'node:' not in sys . argv [ - 1 ] and 'nodes_with_role:' not in sys . argv [ - 1 ] and 'nodes_with_recipe:' not in sys . argv [ - 1 ] and 'nodes_with_tag:' not in sys . argv [ - 1 ] ) : with settings ( ) : execute ( _node_runner ) chef . remove_local_node_data_bag ( )
Selects and configures a list of nodes . all configures all nodes
49,164
def _node_runner ( ) : env . host_string = lib . get_env_host_string ( ) node = lib . get_node ( env . host_string ) _configure_fabric_for_platform ( node . get ( "platform" ) ) if __testing__ : print "TEST: would now configure {0}" . format ( env . host_string ) else : lib . print_header ( "Configuring {0}" . format ( env . host_string ) ) if env . autodeploy_chef and not chef . chef_test ( ) : deploy_chef ( ask = "no" ) chef . sync_node ( node )
This is only used by node so that we can execute in parallel
49,165
def deploy_chef ( ask = "yes" , version = "11" ) : env . host_string = lib . get_env_host_string ( ) if ask == "no" or littlechef . noninteractive : print ( "Deploying Chef using omnibus installer version: ..." . format ( version ) ) else : message = ( '\nAre you sure you want to install Chef version:' '{0} on node {1}?' . format ( version , env . host_string ) ) if not confirm ( message ) : abort ( 'Aborted by user' ) lib . print_header ( "Configuring Chef Solo on {0}" . format ( env . host_string ) ) if not __testing__ : solo . install ( version ) solo . configure ( ) with settings ( hide ( 'stdout' ) , warn_only = True ) : output = sudo ( 'ohai -l warn' ) if output . succeeded : try : ohai = json . loads ( output ) except ValueError : abort ( "Could not parse ohai's output" ":\n {0}" . format ( output ) ) node = { "run_list" : [ ] } for attribute in [ "ipaddress" , "platform" , "platform_family" , "platform_version" ] : if ohai . get ( attribute ) : node [ attribute ] = ohai [ attribute ] chef . save_config ( node )
Install chef - solo on a node
49,166
def plugin ( name ) : env . host_string = lib . get_env_host_string ( ) plug = lib . import_plugin ( name ) lib . print_header ( "Executing plugin '{0}' on " "{1}" . format ( name , env . host_string ) ) node = lib . get_node ( env . host_string ) if node == { 'run_list' : [ ] } : node [ 'name' ] = env . host_string plug . execute ( node ) print ( "Finished executing plugin" )
Executes the selected plugin Plugins are expected to be found in the kitchen s plugins directory
49,167
def list_envs ( ) : for env in lib . get_environments ( ) : margin_left = lib . get_margin ( len ( env [ 'name' ] ) ) print ( "{0}{1}{2}" . format ( env [ 'name' ] , margin_left , env . get ( 'description' , '(no description)' ) ) )
List all environments
49,168
def list_nodes_with_tag ( tag ) : lib . print_nodes ( lib . get_nodes_with_tag ( tag , env . chef_environment , littlechef . include_guests ) )
Show all nodes which have assigned a given tag
49,169
def list_recipes ( ) : for recipe in lib . get_recipes ( ) : margin_left = lib . get_margin ( len ( recipe [ 'name' ] ) ) print ( "{0}{1}{2}" . format ( recipe [ 'name' ] , margin_left , recipe [ 'description' ] ) )
Show a list of all available recipes
49,170
def list_roles ( ) : for role in lib . get_roles ( ) : margin_left = lib . get_margin ( len ( role [ 'fullname' ] ) ) print ( "{0}{1}{2}" . format ( role [ 'fullname' ] , margin_left , role . get ( 'description' , '(no description)' ) ) )
Show a list of all available roles
49,171
def _check_appliances ( ) : filenames = os . listdir ( os . getcwd ( ) ) missing = [ ] for dirname in [ 'nodes' , 'environments' , 'roles' , 'cookbooks' , 'data_bags' ] : if ( dirname not in filenames ) or ( not os . path . isdir ( dirname ) ) : missing . append ( dirname ) return ( not bool ( missing ) ) , missing
Looks around and return True or False based on whether we are in a kitchen
49,172
def create_ticket_str ( self , prefix = None ) : if not prefix : prefix = self . model . TICKET_PREFIX return "%s-%d-%s" % ( prefix , int ( time . time ( ) ) , get_random_string ( length = self . model . TICKET_RAND_LEN ) )
Generate a sufficiently opaque ticket string to ensure the ticket is not guessable . If a prefix is provided prepend it to the string .
49,173
def validate_ticket ( self , ticket , service , renew = False , require_https = False ) : if not ticket : raise InvalidRequest ( "No ticket string provided" ) if not self . model . TICKET_RE . match ( ticket ) : raise InvalidTicket ( "Ticket string %s is invalid" % ticket ) try : t = self . get ( ticket = ticket ) except self . model . DoesNotExist : raise InvalidTicket ( "Ticket %s does not exist" % ticket ) if t . is_consumed ( ) : raise InvalidTicket ( "%s %s has already been used" % ( t . name , ticket ) ) if t . is_expired ( ) : raise InvalidTicket ( "%s %s has expired" % ( t . name , ticket ) ) if not service : raise InvalidRequest ( "No service identifier provided" ) if require_https and not is_scheme_https ( service ) : raise InvalidService ( "Service %s is not HTTPS" % service ) if not service_allowed ( service ) : raise InvalidService ( "Service %s is not a valid %s URL" % ( service , t . name ) ) try : if not match_service ( t . service , service ) : raise InvalidService ( "%s %s for service %s is invalid for " "service %s" % ( t . name , ticket , t . service , service ) ) except AttributeError : pass try : if renew and not t . is_primary ( ) : raise InvalidTicket ( "%s %s was not issued via primary " "credentials" % ( t . name , ticket ) ) except AttributeError : pass logger . debug ( "Validated %s %s" % ( t . name , ticket ) ) return t
Given a ticket string and service identifier validate the corresponding Ticket . If validation succeeds return the Ticket . If validation fails raise an appropriate error .
49,174
def delete_invalid_tickets ( self ) : for ticket in self . filter ( Q ( consumed__isnull = False ) | Q ( expires__lte = now ( ) ) ) . order_by ( '-expires' ) : try : ticket . delete ( ) except models . ProtectedError : pass
Delete consumed or expired Ticket s that are not referenced by other Ticket s . Invalid tickets are no longer valid for authentication and can be safely deleted .
49,175
def consume_tickets ( self , user ) : for ticket in self . filter ( user = user , consumed__isnull = True , expires__gt = now ( ) ) : ticket . consume ( )
Consume all valid Ticket s for a specified user . This is run when the user logs out to ensure all issued tickets are no longer valid for future authentication attempts .
49,176
def request_sign_out ( self , user ) : session = Session ( ) for ticket in self . filter ( user = user , consumed__gte = user . last_login ) : ticket . request_sign_out ( session = session )
Send a single logout request to each service accessed by a specified user . This is called at logout when single logout is enabled .
49,177
def request_sign_out ( self , session = requests ) : if logout_allowed ( self . service ) : request = SingleSignOutRequest ( context = { 'ticket' : self } ) url = get_logout_url ( self . service ) or self . service session . post ( url , data = { 'logoutRequest' : request . render_content ( ) } ) logger . info ( "Single sign-out request sent to %s" % url )
Send a POST request to the ServiceTicket s logout URL to request sign - out .
49,178
def validate_callback ( self , service , pgturl , pgtid , pgtiou ) : if not proxy_allowed ( service ) : raise UnauthorizedServiceProxy ( "%s is not authorized to use proxy authentication" % service ) if not is_scheme_https ( pgturl ) : raise InvalidProxyCallback ( "Proxy callback %s is not HTTPS" % pgturl ) if not proxy_callback_allowed ( service , pgturl ) : raise InvalidProxyCallback ( "%s is not an authorized proxy callback URL" % pgturl ) verify = os . environ . get ( 'REQUESTS_CA_BUNDLE' , True ) try : requests . get ( pgturl , verify = verify , timeout = 5 ) except requests . exceptions . SSLError : raise InvalidProxyCallback ( "SSL certificate validation failed for proxy callback %s" % pgturl ) except requests . exceptions . RequestException as e : raise InvalidProxyCallback ( e ) pgturl = add_query_params ( pgturl , { 'pgtId' : pgtid , 'pgtIou' : pgtiou } ) try : response = requests . get ( pgturl , verify = verify , timeout = 5 ) except requests . exceptions . RequestException as e : raise InvalidProxyCallback ( e ) try : response . raise_for_status ( ) except requests . exceptions . HTTPError as e : raise InvalidProxyCallback ( "Proxy callback %s returned %s" % ( pgturl , e ) )
Verify the provided proxy callback URL .
49,179
def _get_backends ( ) : backends = [ ] backend_paths = getattr ( settings , 'MAMA_CAS_SERVICE_BACKENDS' , [ 'mama_cas.services.backends.SettingsBackend' ] ) for backend_path in backend_paths : backend = import_string ( backend_path ) ( ) backends . append ( backend ) return backends
Retrieve the list of configured service backends .
49,180
def _is_allowed ( attr , * args ) : for backend in _get_backends ( ) : try : if getattr ( backend , attr ) ( * args ) : return True except AttributeError : raise NotImplementedError ( "%s.%s.%s() not implemented" % ( backend . __class__ . __module__ , backend . __class__ . __name__ , attr ) ) return False
Test if a given attribute is allowed according to the current set of configured service backends .
49,181
def _is_valid_service_url ( url ) : valid_services = getattr ( settings , 'MAMA_CAS_VALID_SERVICES' , ( ) ) if not valid_services : return True warnings . warn ( 'The MAMA_CAS_VALID_SERVICES setting is deprecated. Services ' 'should be configured using MAMA_CAS_SERVICES.' , DeprecationWarning ) for service in [ re . compile ( s ) for s in valid_services ] : if service . match ( url ) : return True return False
Access services list from MAMA_CAS_VALID_SERVICES .
49,182
def get_backend_path ( service ) : for backend in _get_backends ( ) : try : if backend . service_allowed ( service ) : return "%s.%s" % ( backend . __class__ . __module__ , backend . __class__ . __name__ ) except AttributeError : raise NotImplementedError ( "%s.%s.service_allowed() not implemented" % ( backend . __class__ . __module__ , backend . __class__ . __name__ ) ) return None
Return the dotted path of the matching backend .
49,183
def get_callbacks ( service ) : callbacks = list ( getattr ( settings , 'MAMA_CAS_ATTRIBUTE_CALLBACKS' , [ ] ) ) if callbacks : warnings . warn ( 'The MAMA_CAS_ATTRIBUTE_CALLBACKS setting is deprecated. Service callbacks ' 'should be configured using MAMA_CAS_SERVICES.' , DeprecationWarning ) for backend in _get_backends ( ) : try : callbacks . extend ( backend . get_callbacks ( service ) ) except AttributeError : raise NotImplementedError ( "%s.%s.get_callbacks() not implemented" % ( backend . __class__ . __module__ , backend . __class__ . __name__ ) ) return callbacks
Get configured callbacks list for a given service identifier .
49,184
def get_logout_url ( service ) : for backend in _get_backends ( ) : try : return backend . get_logout_url ( service ) except AttributeError : raise NotImplementedError ( "%s.%s.get_logout_url() not implemented" % ( backend . __class__ . __module__ , backend . __class__ . __name__ ) ) return None
Get the configured logout URL for a given service identifier if any .
49,185
def logout_allowed ( service ) : if hasattr ( settings , 'MAMA_CAS_SERVICES' ) : return _is_allowed ( 'logout_allowed' , service ) if hasattr ( settings , 'MAMA_CAS_ENABLE_SINGLE_SIGN_OUT' ) : warnings . warn ( 'The MAMA_CAS_ENABLE_SINGLE_SIGN_OUT setting is deprecated. SLO ' 'should be configured using MAMA_CAS_SERVICES.' , DeprecationWarning ) return getattr ( settings , 'MAMA_CAS_ENABLE_SINGLE_SIGN_OUT' , False )
Check if a given service identifier should be sent a logout request .
49,186
def proxy_callback_allowed ( service , pgturl ) : if hasattr ( settings , 'MAMA_CAS_SERVICES' ) : return _is_allowed ( 'proxy_callback_allowed' , service , pgturl ) return _is_valid_service_url ( service )
Check if a given proxy callback is allowed for the given service identifier .
49,187
def clean ( self ) : username = self . cleaned_data . get ( 'username' ) password = self . cleaned_data . get ( 'password' ) if username and password : try : self . user = authenticate ( request = self . request , username = username , password = password ) except Exception : logger . exception ( "Error authenticating %s" % username ) error_msg = _ ( 'Internal error while authenticating user' ) raise forms . ValidationError ( error_msg ) if self . user is None : logger . warning ( "Failed authentication for %s" % username ) error_msg = _ ( 'The username or password is not correct' ) raise forms . ValidationError ( error_msg ) else : if not self . user . is_active : logger . warning ( "User account %s is disabled" % username ) error_msg = _ ( 'This user account is disabled' ) raise forms . ValidationError ( error_msg ) return self . cleaned_data
Pass the provided username and password to the active authentication backends and verify the user account is not disabled . If authentication succeeds the User object is assigned to the form so it can be accessed in the view .
49,188
def ns ( self , prefix , tag ) : return etree . QName ( self . prefixes [ prefix ] , tag )
Given a prefix and an XML tag output the qualified name for proper namespace handling on output .
49,189
def validate_service_ticket ( service , ticket , pgturl = None , renew = False , require_https = False ) : logger . debug ( "Service validation request received for %s" % ticket ) if ticket and ticket . startswith ( ProxyTicket . TICKET_PREFIX ) : raise InvalidTicketSpec ( 'Proxy tickets cannot be validated with /serviceValidate' ) st = ServiceTicket . objects . validate_ticket ( ticket , service , renew = renew , require_https = require_https ) attributes = get_attributes ( st . user , st . service ) if pgturl is not None : logger . debug ( "Proxy-granting ticket request received for %s" % pgturl ) pgt = ProxyGrantingTicket . objects . create_ticket ( service , pgturl , user = st . user , granted_by_st = st ) else : pgt = None return st , attributes , pgt
Validate a service ticket string . Return a triplet containing a ServiceTicket and an optional ProxyGrantingTicket or a ValidationError if ticket validation failed .
49,190
def validate_proxy_ticket ( service , ticket , pgturl = None ) : logger . debug ( "Proxy validation request received for %s" % ticket ) pt = ProxyTicket . objects . validate_ticket ( ticket , service ) attributes = get_attributes ( pt . user , pt . service ) proxies = [ pt . service ] prior_pt = pt . granted_by_pgt . granted_by_pt while prior_pt : proxies . append ( prior_pt . service ) prior_pt = prior_pt . granted_by_pgt . granted_by_pt if pgturl is not None : logger . debug ( "Proxy-granting ticket request received for %s" % pgturl ) pgt = ProxyGrantingTicket . objects . create_ticket ( service , pgturl , user = pt . user , granted_by_pt = pt ) else : pgt = None return pt , attributes , pgt , proxies
Validate a proxy ticket string . Return a 4 - tuple containing a ProxyTicket an optional ProxyGrantingTicket and a list of proxies through which authentication proceeded or a ValidationError if ticket validation failed .
49,191
def validate_proxy_granting_ticket ( pgt , target_service ) : logger . debug ( "Proxy ticket request received for %s using %s" % ( target_service , pgt ) ) pgt = ProxyGrantingTicket . objects . validate_ticket ( pgt , target_service ) pt = ProxyTicket . objects . create_ticket ( service = target_service , user = pgt . user , granted_by_pgt = pgt ) return pt
Validate a proxy granting ticket string . Return an ordered pair containing a ProxyTicket or a ValidationError if ticket validation failed .
49,192
def get_attributes ( user , service ) : attributes = { } for path in get_callbacks ( service ) : callback = import_string ( path ) attributes . update ( callback ( user , service ) ) return attributes
Return a dictionary of user attributes from the set of configured callback functions .
49,193
def logout_user ( request ) : logger . debug ( "Logout request received for %s" % request . user ) if is_authenticated ( request . user ) : ServiceTicket . objects . consume_tickets ( request . user ) ProxyTicket . objects . consume_tickets ( request . user ) ProxyGrantingTicket . objects . consume_tickets ( request . user ) ServiceTicket . objects . request_sign_out ( request . user ) logger . info ( "Single sign-on session ended for %s" % request . user ) logout ( request ) messages . success ( request , _ ( 'You have been successfully logged out' ) )
End a single sign - on session for the current user .
49,194
def user_name_attributes ( user , service ) : attributes = { } attributes [ 'username' ] = user . get_username ( ) attributes [ 'full_name' ] = user . get_full_name ( ) attributes [ 'short_name' ] = user . get_short_name ( ) return attributes
Return all available user name related fields and methods .
49,195
def user_model_attributes ( user , service ) : ignore_fields = [ 'id' , 'password' ] attributes = { } for field in user . _meta . fields : if field . name not in ignore_fields : attributes [ field . name ] = getattr ( user , field . name ) return attributes
Return all fields on the user object that are not in the list of fields to ignore .
49,196
def add_query_params ( url , params ) : def encode ( s ) : return force_bytes ( s , settings . DEFAULT_CHARSET ) params = dict ( [ ( encode ( k ) , encode ( v ) ) for k , v in params . items ( ) if v ] ) parts = list ( urlparse ( url ) ) query = dict ( parse_qsl ( parts [ 4 ] ) ) query . update ( params ) parts [ 4 ] = urlencode ( query ) return urlunparse ( parts )
Inject additional query parameters into an existing URL . If parameters already exist with the same name they will be overwritten . Parameters with empty values are ignored . Return the modified URL as a string .
49,197
def match_service ( service1 , service2 ) : s1 , s2 = urlparse ( service1 ) , urlparse ( service2 ) try : return ( s1 . scheme , s1 . netloc , s1 . path ) == ( s2 . scheme , s2 . netloc , s2 . path ) except ValueError : return False
Compare two service URLs . Return True if the scheme hostname optional port and path match .
49,198
def redirect ( to , * args , ** kwargs ) : params = kwargs . pop ( 'params' , { } ) try : to = reverse ( to , args = args , kwargs = kwargs ) except NoReverseMatch : if '/' not in to and '.' not in to : to = reverse ( 'cas_login' ) elif not service_allowed ( to ) : raise PermissionDenied ( ) if params : to = add_query_params ( to , params ) logger . debug ( "Redirecting to %s" % to ) return HttpResponseRedirect ( to )
Similar to the Django redirect shortcut but with altered functionality . If an optional params argument is provided the dictionary items will be injected as query parameters on the redirection URL .
49,199
def get_config ( self , service , setting ) : try : return self . get_service ( service ) [ setting ] except KeyError : return getattr ( self , setting + '_DEFAULT' )
Access the configuration for a given service and setting . If the service is not found return a default value .