idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
8,100 | def baby_names ( max_length = 15 ) : names = [ ] lengths = [ ] targets = [ ] with open ( os . path . join ( os . path . dirname ( sys . modules [ __name__ ] . __file__ ) , 'baby_names.csv' ) , 'rb' ) as f : first = True for l in csv . reader ( f , delimiter = ',' ) : if first : first = False continue assert len ( l ) =... | Opens the baby_names csv file and produces numpy array . |
8,101 | def reshape_data ( tensor , per_example_length = 1 ) : dims = [ 1 , 0 ] for i in xrange ( 2 , tensor . get_shape ( ) . ndims ) : dims . append ( i ) return pt . wrap ( tf . transpose ( tensor , dims ) ) . reshape ( [ - 1 , per_example_length ] ) | Reshapes input so that it is appropriate for sequence_lstm .. |
8,102 | def batch_normalize_with_arguments ( x , arguments ) : x = prettytensor . wrap ( x ) if isinstance ( arguments , bool ) : if arguments : return x . batch_normalize ( ) else : return x kwargs = arguments . _asdict ( ) defaults = prettytensor . _defaults for arg in ( 'learned_moments_update_rate' , 'variance_epsilon' , '... | Applies batch normalization to x as specified in arguments . |
8,103 | def multilayer_fully_connected ( images , labels ) : images = pt . wrap ( images ) with pt . defaults_scope ( activation_fn = tf . nn . relu , l2loss = 0.00001 ) : return ( images . flatten ( ) . fully_connected ( 100 ) . fully_connected ( 100 ) . softmax_classifier ( 10 , labels ) ) | Creates a multi layer network of fully_connected layers . |
8,104 | def lenet5 ( images , labels ) : images = pt . wrap ( images ) with pt . defaults_scope ( activation_fn = tf . nn . relu , l2loss = 0.00001 ) : return ( images . conv2d ( 5 , 20 ) . max_pool ( 2 , 2 ) . conv2d ( 5 , 50 ) . max_pool ( 2 , 2 ) . flatten ( ) . fully_connected ( 500 ) . softmax_classifier ( 10 , labels ) ) | Creates a multi layer convolutional network . |
8,105 | def _full_map ( self ) : result = { } if self . _parent : result . update ( self . _parent ) result . update ( self . _map ) return result | Creates a full mapping of this and all parent key value pairs . |
8,106 | def var_and_name_scope ( names ) : if not names : yield None , None else : name , var_scope = names with tf . name_scope ( name ) as scope : old_vs = tf . get_variable_scope ( ) if var_scope is None : count = len ( name . split ( '/' ) ) scoped_name = '/' . join ( scope . split ( '/' ) [ - count - 1 : - 1 ] ) full_name... | Creates a variable scope and a name scope . |
8,107 | def get_current_name_scope ( ) : g = tf . get_default_graph ( ) if isinstance ( g . _name_stack , tuple ) : return g . _name_stack [ 0 ] + '/' else : return g . _name_stack + '/' | Gets the current name scope . |
8,108 | def make_template ( name , func , * args , ** kwargs ) : if args or kwargs : func = functools . partial ( func , * args , ** kwargs ) return Template ( name , func ) | Given an arbitrary function wrap it so that it does parameter sharing . |
8,109 | def skip_common_stack_elements ( stacktrace , base_case ) : for i , ( trace , base ) in enumerate ( zip ( stacktrace , base_case ) ) : if trace != base : return stacktrace [ i : ] return stacktrace [ - 1 : ] | Skips items that the target stacktrace shares with the base stacktrace . |
8,110 | def create_model ( text_in , timesteps , phase ) : with pt . defaults_scope ( activation_fn = tf . nn . relu , l2loss = 0.00001 ) : with tf . device ( '/cpu:0' ) : embedded = text_in . embedding_lookup ( CHARS , [ EMBEDDING_SIZE ] ) lstm = ( embedded . cleave_sequence ( timesteps ) . sequence_lstm ( LOWER ) . sequence_... | Creates a 2 layer LSTM model with dropout . |
8,111 | def sample ( input_placeholder , logits , seed = None , max_length = 1024 , temperature = 1.0 ) : assert temperature > 0 , 'Temperature must be greater than 0.' if not seed : seed = chr ( ord ( 'A' ) + random . randint ( 0 , 25 ) ) result = '' recurrent_runner = pt . train . RecurrentRunner ( ) recurrent_runner . reset... | Samples from the LSTM model . |
8,112 | def reshape ( input_layer , shape_spec ) : old_shape = input_layer . get_shape ( ) . as_list ( ) try : new_shape = _infer_unknown_dims ( old_shape , shape_spec ) except TypeError : return tf . reshape ( input_layer , shape_spec ) reshape_tensor = [ ] runner = [ ] for i , s in enumerate ( new_shape ) : if s is DIM_SAME ... | Reshapes this tensor to the given spec . |
8,113 | def flatten ( input_layer , preserve_batch = True ) : if preserve_batch : return reshape ( input_layer , [ DIM_SAME , - 1 ] ) else : return reshape ( input_layer , [ - 1 ] ) | Flattens this . |
8,114 | def stop_gradient ( input_layer ) : if input_layer . is_sequence ( ) : result = [ tf . stop_gradient ( t ) for t in input_layer . sequence ] return input_layer . with_sequence ( result ) else : return tf . stop_gradient ( input_layer ) | Cuts off the gradient at this point . |
8,115 | def dropout ( input_layer , keep_prob , phase = Phase . train , name = PROVIDED ) : if phase == Phase . train : return tf . nn . dropout ( input_layer , keep_prob , name = name ) else : return input_layer | Aplies dropout if this is in the train phase . |
8,116 | def apply_with_summary ( input_layer , operation , * op_args , ** op_kwargs ) : return layers . apply_activation ( input_layer . bookkeeper , input_layer . tensor , operation , activation_args = op_args , activation_kwargs = op_kwargs ) | Applies the given operation to input_layer and create a summary . |
8,117 | def _rapply ( input_layer , operation , * op_args , ** op_kwargs ) : op_args = list ( op_args ) op_args . append ( input_layer . tensor ) return input_layer . with_tensor ( operation ( * op_args , ** op_kwargs ) ) | Applies the given operation to this after expanding op_args . |
8,118 | def apply_op ( input_layer , operation , * op_args , ** op_kwargs ) : return input_layer . with_tensor ( operation ( input_layer . tensor , * op_args , ** op_kwargs ) ) | Applies the given operation to this before without adding any summaries . |
8,119 | def join ( input_layer , others , include_self = True , join_function = None ) : if include_self : list_of_tensors = [ input_layer ] list_of_tensors . extend ( others ) else : list_of_tensors = others return prettytensor . join_pretty_tensors ( list_of_tensors , input_layer , join_function ) | Joins the provided PrettyTensors with this using the join function . |
8,120 | def unzip ( input_layer , split_dim = 0 , num_splits = 2 ) : shape = input_layer . shape _check_split_dims ( num_splits , split_dim , shape ) splits = functions . unzip ( input_layer , split_dim , shape [ split_dim ] , num_splits ) return input_layer . with_sequence ( splits ) | Unzips this Tensor along the split_dim into num_splits Equal chunks . |
8,121 | def concat ( input_layer , concat_dim , other_tensors = None ) : if input_layer . is_sequence ( ) : all_tensors = input_layer . sequence all_tensors . extend ( other_tensors or [ ] ) else : all_tensors = [ input_layer ] if other_tensors is None : raise ValueError ( 'Other Tensors must be supplied.' ) all_tensors . exte... | Concatenates input PrettyTensor with other_tensors along the specified dim . |
8,122 | def split ( input_layer , split_dim = 0 , num_splits = 2 ) : shape = input_layer . shape _check_split_dims ( num_splits , split_dim , shape ) splits = tf . split ( value = input_layer , num_or_size_splits = num_splits , axis = split_dim ) return input_layer . with_sequence ( splits ) | Splits this Tensor along the split_dim into num_splits Equal chunks . |
8,123 | def _zip_with_scalars ( args ) : zipped = [ ] for arg in args : if isinstance ( arg , prettytensor . PrettyTensor ) : zipped . append ( arg if arg . is_sequence ( ) else itertools . repeat ( arg ) ) elif ( isinstance ( arg , collections . Sequence ) and not isinstance ( arg , tf . compat . bytes_or_text_types ) ) : zip... | Zips across args in order and replaces non - iterables with repeats . |
8,124 | def map_ ( input_layer , fn ) : if not input_layer . is_sequence ( ) : raise ValueError ( 'Can only map a sequence.' ) return [ fn ( x ) for x in input_layer ] | Maps the given function across this sequence . |
8,125 | def _map_or_apply ( input_layer , op , * args , ** kwargs ) : kwargs . pop ( 'name' ) right = kwargs . pop ( 'right_' , False ) if input_layer . is_sequence ( ) : if right : args += ( input_layer , ) else : args = ( ( input_layer , ) + args ) result = [ op ( * x , ** kwargs ) for x in _zip_with_scalars ( args ) ] if le... | Map op across the input if it is a sequence ; otherwise apply it . |
8,126 | def feed_numpy ( batch_size , * arrays ) : if not arrays : raise ValueError ( 'Arrays cannot be empty.' ) size = len ( arrays [ 0 ] ) for a in arrays : if size != len ( a ) : raise ValueError ( 'All arrays must be the same size.' ) count = int ( size / batch_size ) for i in xrange ( count ) : start = i * batch_size end... | Given a set of numpy arrays produce slices of batch_size . |
8,127 | def batch ( input_iter , batch_size = 32 ) : input_iter = iter ( input_iter ) next_ = list ( itertools . islice ( input_iter , batch_size ) ) while next_ : yield next_ next_ = list ( itertools . islice ( input_iter , batch_size ) ) | Batches data from an iterator that returns single items at a time . |
8,128 | def slice_constant ( data , batch_size = 32 , name = 'constant_data' , global_step = None ) : with tf . name_scope ( name ) : all_data = tf . convert_to_tensor ( data ) global_step = global_step or bookkeeper . global_step ( ) count = len ( data ) / batch_size extra = len ( data ) - count * batch_size if extra : offset... | Provide a slice based on the global_step . |
8,129 | def session ( self , master = '' , config = None ) : session_manager = SESSION_MANAGER_FACTORY ( ) with session_manager . prepare_session ( master , None , config = config , init_fn = lambda _ : None ) as sess : try : yield sess finally : self . stop_queues ( ) | Takes care of starting any local servers and stopping queues on exit . |
8,130 | def prepare_model ( self , sess , allow_initialize = True ) : if self . _follower : self . wait_for_initialization ( ) else : self . _init_model ( sess , allow_initialize ) if sess is not self . _sess : if self . threads : raise ValueError ( 'You must call stop_queues() before ' 'starting a new session with QueueRunner... | Initialize the model and if necessary launch the queue runners . |
8,131 | def load_from_checkpoint ( self , sess , latest_filename = None ) : self . _create_initializers ( ) if self . _save_path : ckpt = tf . train . get_checkpoint_state ( os . path . dirname ( self . _save_path ) , latest_filename ) if ckpt and ckpt . all_model_checkpoint_paths : self . _saver = tf . train . Saver ( saver_d... | Loads the model from the most recent checkpoint . |
8,132 | def run_model ( self , op_list , num_steps , feed_vars = ( ) , feed_data = None , print_every = 100 , allow_initialize = True ) : feed_data = feed_data or itertools . repeat ( ( ) ) ops = [ bookkeeper . global_step ( ) ] ops . extend ( op_list ) sess = tf . get_default_session ( ) self . prepare_model ( sess , allow_in... | Runs op_list for num_steps . |
8,133 | def train_model ( self , train_op , cost_to_log , num_steps , feed_vars = ( ) , feed_data = None , print_every = 100 ) : costs = [ train_op ] if ( isinstance ( cost_to_log , collections . Sequence ) and not isinstance ( cost_to_log , six . string_types ) ) : costs . extend ( cost_to_log ) else : costs . append ( cost_t... | Trains the given model . |
8,134 | def evaluate_model ( self , accuracy , num_steps , feed_vars = ( ) , feed_data = None , summary_tag = None , print_every = 0 ) : if not hasattr ( self , '_saver' ) : raise ValueError ( 'Before evaluating, you must initialize the model with ' 'load_from_checkpoint, prepare or saver.' ) self . _run_init_test_vars_op ( ) ... | Evaluates the given model . |
8,135 | def add_summaries ( self , step , * tags_and_values ) : values = [ ] to_print = [ ] for tag , value in tags_and_values : values . append ( tf . Summary . Value ( tag = tag , simple_value = float ( value ) ) ) to_print . append ( '%s=%g' % ( tag , value ) ) if self . _summary_writer : summary = tf . Summary ( value = va... | Adds summaries to the writer and prints a log statement . |
8,136 | def load_new_checkpoint_when_available ( self , sess , current_checkpoint , sleep_seconds = 10 ) : while True : next_checkpoint = self . load_from_checkpoint ( sess ) if not next_checkpoint or next_checkpoint == current_checkpoint : print ( 'Model not yet available, sleeping for %d seconds: ' 'path %s; found: %s' % ( s... | Waits for a new checkpoint to be available and then loads it . |
8,137 | def evaluate_repeatedly ( self , accuracy , num_steps , feed_vars = ( ) , feed_data = None , summary_tag = None , evaluation_times = - 1 ) : current_checkpoint = None try : for i in itertools . count ( 0 ) : with self . session ( ) as sess : current_checkpoint = self . load_new_checkpoint_when_available ( sess , curren... | Runs the evaluation in a loop for evaluation_times . |
8,138 | def create_model ( text_in , labels , timesteps , per_example_weights , phase = pt . Phase . train ) : with pt . defaults_scope ( phase = phase , l2loss = 0.00001 ) : with tf . device ( '/cpu:0' ) : embedded = text_in . embedding_lookup ( CHARS , [ EMBEDDING_SIZE ] ) lstm = ( embedded . cleave_sequence ( timesteps ) . ... | Creates a model for running baby names . |
8,139 | def for_default_graph ( * args , ** kwargs ) : graph = tf . get_default_graph ( ) collection = graph . get_collection ( _BOOKKEEPER ) if collection : if args or kwargs : raise ValueError ( 'Requesting construction of a BookKeeper that already ' 'exists: %s %s' % ( args , kwargs ) ) return collection [ 0 ] else : books ... | Creates a bookkeeper for the default graph . |
8,140 | def for_new_graph ( * args , ** kwargs ) : graph = tf . Graph ( ) with graph . as_default ( ) : return for_default_graph ( * args , ** kwargs ) | Creates a Bookkeeper for a new graph . |
8,141 | def regroup_if_changed ( group , op_list , name = None ) : has_deltas = isinstance ( op_list , sequence_with_deltas . SequenceWithDeltas ) if ( group is None or len ( group . control_inputs ) != len ( op_list ) or ( has_deltas and op_list . has_changed ( ) ) ) : if has_deltas : op_list . mark ( ) if op_list : return tf... | Creates a new group for op_list if it has changed . |
8,142 | def apply_optimizer ( optimizer , losses , regularize = True , include_marked = True , clip_gradients_by_norm = None , ** kwargs ) : books = for_default_graph ( ) g_step = kwargs . pop ( 'global_step' , books . global_step ) total_loss = books . create_composite_loss ( losses = losses , regularize = regularize , includ... | Apply an optimizer to the graph and returns a train_op . |
8,143 | def _add_global_counter ( self ) : assert self . _global_step is None with self . g . as_default ( ) , self . g . name_scope ( None ) : try : self . _global_step = self . g . get_tensor_by_name ( 'global_step:0' ) except KeyError : self . _global_step = tf . Variable ( 0 , name = 'global_step' , trainable = False ) | Adds a global counter called once for setup by |
8,144 | def add_scalar_summary ( self , x , tag = None ) : if not self . summary_collections : return with self . g . as_default ( ) : tag = tag or _tag_for ( x . name ) summary = ( tf . summary . scalar ( tag , x , collections = self . summary_collections ) ) return summary | Adds a scalar summary for x . |
8,145 | def add_histogram_summary ( self , x , tag = None ) : if not self . summary_collections : return with self . g . as_default ( ) : tag = tag or _tag_for ( x . name ) summary = tf . summary . histogram ( tag , x , collections = self . summary_collections ) return summary | Add a summary operation to visualize the histogram of x s values . |
8,146 | def exponential_moving_average ( self , var , avg_var = None , decay = 0.999 , ignore_nan = False ) : with self . _g . as_default ( ) : if decay < 0 or decay >= 1.0 : raise ValueError ( 'Decay is %5.2f, but has to be in [0, 1).' % decay ) if avg_var is None : avg_name = '%s_average' % _bare_var_name ( var ) with tf . c... | Calculates the exponential moving average . |
8,147 | def add_average_summary ( self , var , tag = None , decay = 0.999 , ignore_nan = True ) : if not self . summary_collections : return with self . g . as_default ( ) : if decay < 0.9 or decay >= 1.0 : raise ValueError ( 'Decay is %5.2f, but has to be in [0, 1).' % decay ) avg_var = self . exponential_moving_average ( var... | Add a summary with the moving average of var . |
8,148 | def add_loss ( self , loss , name = None , regularization = False , add_summaries = True ) : _ = name if regularization : self . _g . add_to_collection ( GraphKeys . REGULARIZATION_LOSSES , loss ) tf . add_to_collection ( GraphKeys . LOSSES , loss ) if add_summaries : self . add_scalar_summary ( loss , 'loss' ) self . ... | Append a loss to the total loss for the network . |
8,149 | def add_state ( self , state_name , initial_state , batch_size = None ) : state_shape = initial_state . get_shape ( ) . as_list ( ) full_shape = [ batch_size ] + state_shape if not batch_size : shape_proto = self . _as_shape_proto ( [ 0 ] + state_shape ) batch_size = 1 else : shape_proto = self . _as_shape_proto ( [ ba... | Adds a state to the state saver . |
8,150 | def to_dense_one_hot ( labels , class_count ) : if not isinstance ( class_count , tf . compat . integral_types ) : raise TypeError ( 'class_count must be an integer type.' ) if labels . dtype . base_dtype not in ( tf . int32 , tf . int64 ) : raise TypeError ( 'Labels must be an integer: %s' % labels . dtype ) if labels... | Converts a vector that specified one - hot per batch into a dense version . |
8,151 | def _convert_and_assert_per_example_weights_compatible ( input_ , per_example_weights , dtype ) : per_example_weights = tf . convert_to_tensor ( per_example_weights , name = 'per_example_weights' , dtype = dtype ) if input_ . get_shape ( ) . ndims : expected_length = input_ . get_shape ( ) . dims [ 0 ] message = ( 'per... | Converts per_example_weights to a tensor and validates the shape . |
8,152 | def apply_regression ( input_ , regression_fn , target , regression_args = ( ) , regression_kwargs = None , name = PROVIDED , loss_weight = None , per_example_weights = None ) : if regression_kwargs is None : regression_kwargs = { } if name is not None and 'name' not in regression_kwargs : regression_kwargs [ 'name' ] ... | Applies the given regression and adds the loss to the bookkeeper . |
8,153 | def binary_cross_entropy_with_logits ( input_ , target , name = PROVIDED , loss_weight = None , per_example_weights = None , per_output_weights = None ) : if target is None : raise ValueError ( 'target must be set' ) target = _convert_and_assert_tensors_compatible ( input_ , target ) with tf . name_scope ( 'stats' ) : ... | Calculates the binary cross entropy of the input_ vs inputs . |
8,154 | def softmax_classifier_with_sampled_loss ( inputs , num_classes , labels , num_sampled , num_true = None , sampled_values = None , remove_accidental_hits = True , loss_weight = None , per_example_weights = None , weights = None , bias = tf . zeros_initializer ( ) , parameter_modifier = parameters . identity , name = 's... | Applies softmax and if labels is not None then it adds a sampled loss . |
8,155 | def softmax_classifier ( input_ , num_classes , labels = None , loss_weight = None , per_example_weights = None , weights = None , bias = tf . zeros_initializer ( ) , parameter_modifier = parameters . identity , name = PROVIDED ) : full = input_ . fully_connected ( num_classes , activation_fn = None , name = name , wei... | Creates a fully - connected linear layer followed by a softmax . |
8,156 | def softmax ( input_ , labels = None , name = PROVIDED , loss_weight = None , per_example_weights = None ) : if labels is not None : full = input_ . as_layer ( ) return SoftmaxResult ( input_ . softmax_activation ( ) , full . cross_entropy ( labels , name = name , loss_weight = loss_weight , per_example_weights = per_e... | Applies softmax and if labels is not None then it also adds a loss . |
8,157 | def evaluate_precision_recall ( input_ , labels , threshold = 0.5 , per_example_weights = None , name = PROVIDED , phase = Phase . train ) : _ = name selected , sum_retrieved , sum_relevant = _compute_precision_recall ( input_ , labels , threshold , per_example_weights ) if phase != Phase . train : dtype = tf . float32... | Computes the precision and recall of the prediction vs the labels . |
8,158 | def _eval_metric ( input_ , topk , correct_predictions , examples , phase ) : my_parameters = { } if phase in ( Phase . test , Phase . infer ) : dtype = tf . float32 count = tf . Variable ( tf . constant ( 0 , dtype = dtype ) , name = 'count_%d' % topk , collections = [ bookkeeper . GraphKeys . TEST_VARIABLES ] , train... | Creates the standard tracking varibles if in test and returns accuracy . |
8,159 | def _compute_precision_recall ( input_ , labels , threshold , per_example_weights ) : labels . get_shape ( ) . assert_is_compatible_with ( input_ . get_shape ( ) ) relevant = tf . to_float ( tf . greater ( labels , 0 ) ) retrieved = tf . to_float ( tf . greater ( input_ , threshold ) ) selected = relevant * retrieved i... | Returns the numerator of both the denominator of precision and recall . |
8,160 | def unroll_state_saver ( input_layer , name , state_shapes , template , lengths = None ) : state_saver = input_layer . bookkeeper . recurrent_state state_names = [ STATE_NAME % name + '_%d' % i for i in xrange ( len ( state_shapes ) ) ] if hasattr ( state_saver , 'add_state' ) : for state_name , state_shape in zip ( st... | Unrolls the given function with state taken from the state saver . |
8,161 | def cleave_sequence ( input_layer , unroll = None ) : if unroll is None : raise ValueError ( 'You must set unroll either here or in the defaults.' ) shape = input_layer . shape if shape [ 0 ] is not None and shape [ 0 ] % unroll != 0 : raise ValueError ( 'Must divide the split dimension evenly: %d mod %d != 0' % ( shap... | Cleaves a tensor into a sequence this is the inverse of squash . |
8,162 | def create_sequence_pretty_tensor ( sequence_input , shape = None , save_state = True ) : inputs = prettytensor . wrap_sequence ( sequence_input . inputs , tensor_shape = shape ) targets = prettytensor . wrap_sequence ( sequence_input . targets ) if save_state : bookkeeper . set_recurrent_state_saver ( sequence_input )... | Creates a PrettyTensor object for the given sequence . |
8,163 | def flatten ( self ) : ls = [ self . output ] ls . extend ( self . state ) return ls | Create a flattened version by putting output first and then states . |
8,164 | def run ( self , fetch_list , feed_dict = None , sess = None ) : if tf . get_default_graph ( ) != self . _graph : raise ValueError ( 'The current default graph is different from the graph' ' used at construction time of RecurrentRunner.' ) if feed_dict is None : all_feeds_dict = { } else : all_feeds_dict = dict ( feed_... | Runs the graph with the provided feeds and fetches . |
8,165 | def __get_vpc_info ( self , ifarr ) : if ( self . vpc_vbtbl == None ) : self . vpc_vbtbl = self . snmpobj . get_bulk ( OID_VPC_PEERLINK_IF ) if ( ( self . vpc_vbtbl == None ) | ( len ( self . vpc_vbtbl ) == 0 ) ) : return ( None , None ) domain = natlas_snmp . get_last_oid_token ( self . vpc_vbtbl [ 0 ] [ 0 ] [ 0 ] ) i... | If VPC is enabled Return the VPC domain and interface name of the VPC peerlink . |
8,166 | def get_macs ( self , ip , display_progress ) : if ( ip == '0.0.0.0' ) : return None ret_macs = [ ] snmpobj = natlas_snmp ( ip ) if ( snmpobj . get_cred ( self . config . snmp_creds ) == 0 ) : return None system_name = util . shorten_host_name ( snmpobj . get_val ( OID_SYSNAME ) , self . config . host_domains ) vlan_vb... | Return array of MAC addresses from single node at IP |
8,167 | def get_macs_for_vlan ( self , ip , vlan , display_progress = 0 , snmpobj = None , system_name = None , ifname_vbtbl = None ) : ret_macs = [ ] if ( snmpobj == None ) : snmpobj = natlas_snmp ( ip ) if ( snmpobj . get_cred ( self . config . snmp_creds ) == 0 ) : return None if ( ifname_vbtbl == None ) : ifname_vbtbl = sn... | Return array of MAC addresses for a single VLAN from a single node at an IP |
8,168 | def mac_hex_to_ascii ( mac_hex , inc_dots ) : v = mac_hex [ 2 : ] ret = '' for i in range ( 0 , len ( v ) , 4 ) : ret += v [ i : i + 4 ] if ( ( inc_dots ) & ( ( i + 4 ) < len ( v ) ) ) : ret += '.' return ret | Format a hex MAC string to ASCII |
8,169 | def get_switch_macs ( self , switch_ip = None , node = None , vlan = None , mac = None , port = None , verbose = 0 ) : if ( switch_ip == None ) : if ( node == None ) : raise Exception ( 'get_switch_macs() requires switch_ip or node parameter' ) return None switch_ip = node . get_ipaddr ( ) mac_obj = natlas_mac ( self .... | Get the CAM table from a switch . |
8,170 | def get_arp_table ( self , switch_ip , ip = None , mac = None , interf = None , arp_type = None ) : node = natlas_node ( switch_ip ) if ( node . try_snmp_creds ( self . config . snmp_creds ) == 0 ) : return [ ] arp = node . get_arp_table ( ) if ( arp == None ) : return [ ] if ( ( ip == None ) & ( mac == None ) & ( inte... | Get the ARP table from a switch . |
8,171 | def __query_node ( self , ip , host ) : host = util . shorten_host_name ( host , self . config . host_domains ) node , node_updated = self . __get_known_node ( ip , host ) if ( node == None ) : node = natlas_node ( ) node . name = host node . ip = [ ip ] state = NODE_NEW else : if ( node . snmpobj . success == 1 ) : re... | Query this node . Return node details and if we already knew about it or if this is a new node . Don t save the node to the known list just return info about it . |
8,172 | def __get_known_node ( self , ip , host ) : for ex in self . nodes : for exip in ex . ip : if ( exip == '0.0.0.0' ) : continue if ( exip == ip ) : return ( ex , 0 ) node = self . __get_known_node_by_host ( host ) if ( node != None ) : if ( ip not in node . ip ) : node . ip . append ( ip ) return ( node , 1 ) return ( n... | Look for known nodes by IP and HOST . If found by HOST add the IP if not already known . |
8,173 | def __get_known_node_by_host ( self , hostname ) : for n in self . nodes : if ( n . name == hostname ) : return n return None | Determine if the node is already known by hostname . If it is return it . |
8,174 | def create_file_and_add_volume ( self , runtime , volume , host_outdir_tgt , secret_store , tmpdir_prefix ) : if not host_outdir_tgt : tmp_dir , tmp_prefix = os . path . split ( tmpdir_prefix ) new_file = os . path . join ( tempfile . mkdtemp ( prefix = tmp_prefix , dir = tmp_dir ) , os . path . basename ( volume . res... | Create the file and add a mapping . |
8,175 | def add_volumes ( self , pathmapper , runtime , tmpdir_prefix , secret_store = None , any_path_okay = False ) : container_outdir = self . builder . outdir for key , vol in ( itm for itm in pathmapper . items ( ) if itm [ 1 ] . staged ) : host_outdir_tgt = None if vol . target . startswith ( container_outdir + "/" ) : h... | Append volume mappings to the runtime option list . |
8,176 | def docker_monitor ( self , cidfile , tmpdir_prefix , cleanup_cidfile , process ) : cid = None while cid is None : time . sleep ( 1 ) if process . returncode is not None : if cleanup_cidfile : os . remove ( cidfile ) return try : with open ( cidfile ) as cidhandle : cid = cidhandle . readline ( ) . strip ( ) except ( O... | Record memory usage of the running Docker container . |
8,177 | def adjustFiles ( rec , op ) : if isinstance ( rec , MutableMapping ) : if rec . get ( "class" ) == "File" : rec [ "path" ] = op ( rec [ "path" ] ) for d in rec : adjustFiles ( rec [ d ] , op ) if isinstance ( rec , MutableSequence ) : for d in rec : adjustFiles ( d , op ) | Apply a mapping function to each File path in the object rec . |
8,178 | def check_types ( srctype , sinktype , linkMerge , valueFrom ) : if valueFrom is not None : return "pass" if linkMerge is None : if can_assign_src_to_sink ( srctype , sinktype , strict = True ) : return "pass" if can_assign_src_to_sink ( srctype , sinktype , strict = False ) : return "warning" return "exception" if lin... | Check if the source and sink types are pass warning or exception . |
8,179 | def merge_flatten_type ( src ) : if isinstance ( src , MutableSequence ) : return [ merge_flatten_type ( t ) for t in src ] if isinstance ( src , MutableMapping ) and src . get ( "type" ) == "array" : return src return { "items" : src , "type" : "array" } | Return the merge flattened type of the source type |
8,180 | def can_assign_src_to_sink ( src , sink , strict = False ) : if src == "Any" or sink == "Any" : return True if isinstance ( src , MutableMapping ) and isinstance ( sink , MutableMapping ) : if sink . get ( "not_connected" ) and strict : return False if src [ "type" ] == "array" and sink [ "type" ] == "array" : return c... | Check for identical type specifications ignoring extra keys like inputBinding . |
8,181 | def _compare_records ( src , sink , strict = False ) : def _rec_fields ( rec ) : out = { } for field in rec [ "fields" ] : name = shortname ( field [ "name" ] ) out [ name ] = field [ "type" ] return out srcfields = _rec_fields ( src ) sinkfields = _rec_fields ( sink ) for key in six . iterkeys ( sinkfields ) : if ( no... | Compare two records ensuring they have compatible fields . |
8,182 | def check_all_types ( src_dict , sinks , sourceField ) : validation = { "warning" : [ ] , "exception" : [ ] } for sink in sinks : if sourceField in sink : valueFrom = sink . get ( "valueFrom" ) if isinstance ( sink [ sourceField ] , MutableSequence ) : srcs_of_sink = [ src_dict [ parm_id ] for parm_id in sink [ sourceF... | Given a list of sinks check if their types match with the types of their sources . |
8,183 | def output_callback ( self , out , process_status ) : self . final_status . append ( process_status ) self . final_output . append ( out ) | Collect the final status and outputs . |
8,184 | def _runner ( self , job , runtime_context ) : try : job . run ( runtime_context ) except WorkflowException as err : _logger . exception ( "Got workflow error" ) self . exceptions . append ( err ) except Exception as err : _logger . exception ( "Got workflow error" ) self . exceptions . append ( WorkflowException ( Tex... | Job running thread . |
8,185 | def run_job ( self , job , runtime_context ) : if job is not None : with self . pending_jobs_lock : self . pending_jobs . append ( job ) with self . pending_jobs_lock : n = 0 while ( n + 1 ) <= len ( self . pending_jobs ) : job = self . pending_jobs [ n ] if isinstance ( job , JobBase ) : if ( ( job . builder . resourc... | Execute a single Job in a seperate thread . |
8,186 | def wait_for_next_completion ( self , runtime_context ) : if runtime_context . workflow_eval_lock is not None : runtime_context . workflow_eval_lock . wait ( ) if self . exceptions : raise self . exceptions [ 0 ] | Wait for jobs to finish . |
8,187 | def append_volume ( runtime , source , target , writable = False ) : runtime . append ( u"--volume={}:{}:{}" . format ( docker_windows_path_adjust ( source ) , target , "rw" if writable else "ro" ) ) | Add binding arguments to the runtime list . |
8,188 | def add_writable_file_volume ( self , runtime , volume , host_outdir_tgt , tmpdir_prefix ) : if self . inplace_update : self . append_volume ( runtime , volume . resolved , volume . target , writable = True ) else : if host_outdir_tgt : if not os . path . exists ( os . path . dirname ( host_outdir_tgt ) ) : os . makedi... | Append a writable file mapping to the runtime option list . |
8,189 | def add_writable_directory_volume ( self , runtime , volume , host_outdir_tgt , tmpdir_prefix ) : if volume . resolved . startswith ( "_:" ) : if not host_outdir_tgt : tmp_dir , tmp_prefix = os . path . split ( tmpdir_prefix ) new_dir = os . path . join ( tempfile . mkdtemp ( prefix = tmp_prefix , dir = tmp_dir ) , os ... | Append a writable directory mapping to the runtime option list . |
8,190 | def make ( self , cwl ) : load = load_tool . load_tool ( cwl , self . loading_context ) if isinstance ( load , int ) : raise Exception ( "Error loading tool" ) return Callable ( load , self ) | Instantiate a CWL object from a CWl document . |
8,191 | def realize_input_schema ( input_types , schema_defs ) : for index , entry in enumerate ( input_types ) : if isinstance ( entry , string_types ) : if '#' in entry : _ , input_type_name = entry . split ( '#' ) else : input_type_name = entry if input_type_name in schema_defs : entry = input_types [ index ] = schema_defs ... | Replace references to named typed with the actual types . |
8,192 | def generate_input_template ( tool ) : template = yaml . comments . CommentedMap ( ) for inp in realize_input_schema ( tool . tool [ "inputs" ] , tool . schemaDefs ) : name = shortname ( inp [ "id" ] ) value , comment = generate_example_input ( inp [ 'type' ] , inp . get ( 'default' , None ) ) template . insert ( 0 , n... | Generate an example input object for the given CWL process . |
8,193 | def make_relative ( base , obj ) : uri = obj . get ( "location" , obj . get ( "path" ) ) if ":" in uri . split ( "/" ) [ 0 ] and not uri . startswith ( "file://" ) : pass else : if uri . startswith ( "file://" ) : uri = uri_file_path ( uri ) obj [ "location" ] = os . path . relpath ( uri , base ) | Relativize the location URI of a File or Directory object . |
8,194 | def printdeps ( obj , document_loader , stdout , relative_deps , uri , basedir = None , nestdirs = True ) : deps = find_deps ( obj , document_loader , uri , basedir = basedir , nestdirs = nestdirs ) if relative_deps == "primary" : base = basedir if basedir else os . path . dirname ( uri_file_path ( str ( uri ) ) ) elif... | Print a JSON representation of the dependencies of the CWL document . |
8,195 | def find_deps ( obj , document_loader , uri , basedir = None , nestdirs = True ) : deps = { "class" : "File" , "location" : uri , "format" : CWL_IANA } def loadref ( base , uri ) : return document_loader . fetch ( document_loader . fetcher . urljoin ( base , uri ) ) sfs = scandeps ( basedir if basedir else uri , obj , ... | Find the dependencies of the CWL document . |
8,196 | def print_pack ( document_loader , processobj , uri , metadata ) : packed = pack ( document_loader , processobj , uri , metadata ) if len ( packed [ "$graph" ] ) > 1 : return json_dumps ( packed , indent = 4 ) return json_dumps ( packed [ "$graph" ] [ 0 ] , indent = 4 ) | Return a CWL serialization of the CWL document in JSON . |
8,197 | def find_default_container ( builder , default_container = None , use_biocontainers = None , ) : if not default_container and use_biocontainers : default_container = get_container_from_software_requirements ( use_biocontainers , builder ) return default_container | Default finder for default containers . |
8,198 | def run ( * args , ** kwargs ) : signal . signal ( signal . SIGTERM , _signal_handler ) try : sys . exit ( main ( * args , ** kwargs ) ) finally : _terminate_processes ( ) | Run cwltool . |
8,199 | def add ( self , value ) : if not isinstance ( value , string_types ) : raise Exception ( "Secret store only accepts strings" ) if value not in self . secrets : placeholder = "(secret-%s)" % Text ( uuid . uuid4 ( ) ) self . secrets [ placeholder ] = value return placeholder return value | Add the given value to the store . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.