idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
8,000
def itemgetter_handle ( tokens ) : internal_assert ( len ( tokens ) == 2 , "invalid implicit itemgetter args" , tokens ) op , args = tokens if op == "[" : return "_coconut.operator.itemgetter(" + args + ")" elif op == "$[" : return "_coconut.functools.partial(_coconut_igetitem, index=" + args + ")" else : raise CoconutInternalException ( "invalid implicit itemgetter type" , op )
Process implicit itemgetter partials .
8,001
def namelist_handle ( tokens ) : if len ( tokens ) == 1 : return tokens [ 0 ] elif len ( tokens ) == 2 : return tokens [ 0 ] + "\n" + tokens [ 0 ] + " = " + tokens [ 1 ] else : raise CoconutInternalException ( "invalid in-line nonlocal / global tokens" , tokens )
Process inline nonlocal and global statements .
8,002
def compose_item_handle ( tokens ) : if len ( tokens ) < 1 : raise CoconutInternalException ( "invalid function composition tokens" , tokens ) elif len ( tokens ) == 1 : return tokens [ 0 ] else : return "_coconut_forward_compose(" + ", " . join ( reversed ( tokens ) ) + ")"
Process function composition .
8,003
def tco_return_handle ( tokens ) : internal_assert ( len ( tokens ) == 2 , "invalid tail-call-optimizable return statement tokens" , tokens ) if tokens [ 1 ] . startswith ( "()" ) : return "return _coconut_tail_call(" + tokens [ 0 ] + ")" + tokens [ 1 ] [ 2 : ] else : return "return _coconut_tail_call(" + tokens [ 0 ] + ", " + tokens [ 1 ] [ 1 : ]
Process tail - call - optimizable return statements .
8,004
def split_func_name_args_params_handle ( tokens ) : internal_assert ( len ( tokens ) == 2 , "invalid function definition splitting tokens" , tokens ) func_name = tokens [ 0 ] func_args = [ ] func_params = [ ] for arg in tokens [ 1 ] : if len ( arg ) > 1 and arg [ 0 ] in ( "*" , "**" ) : func_args . append ( arg [ 1 ] ) elif arg [ 0 ] != "*" : func_args . append ( arg [ 0 ] ) func_params . append ( "" . join ( arg ) ) return [ func_name , ", " . join ( func_args ) , "(" + ", " . join ( func_params ) + ")" , ]
Process splitting a function into name params and args .
8,005
def join_match_funcdef ( tokens ) : if len ( tokens ) == 2 : ( func , insert_after_docstring ) , body = tokens docstring = None elif len ( tokens ) == 3 : ( func , insert_after_docstring ) , docstring , body = tokens else : raise CoconutInternalException ( "invalid docstring insertion tokens" , tokens ) insert_after_docstring , dedent = split_trailing_indent ( insert_after_docstring ) indent , body = split_leading_indent ( body ) indentation = collapse_indents ( dedent + indent ) return ( func + ( docstring if docstring is not None else "" ) + insert_after_docstring + indentation + body )
Join the pieces of a pattern - matching function together .
8,006
def where_stmt_handle ( tokens ) : internal_assert ( len ( tokens ) == 2 , "invalid where statement tokens" , tokens ) base_stmt , assignment_stmts = tokens stmts = list ( assignment_stmts ) + [ base_stmt ] return "\n" . join ( stmts ) + "\n"
Process a where statement .
8,007
def set_grammar_names ( ) : for varname , val in vars ( Grammar ) . items ( ) : if isinstance ( val , ParserElement ) : setattr ( Grammar , varname , val . setName ( varname ) )
Set names of grammar elements to their variable names .
8,008
def version ( which = "num" ) : if which in VERSIONS : return VERSIONS [ which ] else : raise CoconutException ( "invalid version type " + ascii ( which ) , extra = "valid versions are " + ", " . join ( VERSIONS ) , )
Get the Coconut version .
8,009
def parse ( code = "" , mode = "sys" ) : if CLI . comp is None : setup ( ) if mode in PARSERS : return PARSERS [ mode ] ( CLI . comp ) ( code ) else : raise CoconutException ( "invalid parse mode " + ascii ( mode ) , extra = "valid modes are " + ", " . join ( PARSERS ) , )
Compile Coconut code .
8,010
def auto_compilation ( on = True ) : if on : if coconut_importer not in sys . meta_path : sys . meta_path . insert ( 0 , coconut_importer ) else : try : sys . meta_path . remove ( coconut_importer ) except ValueError : pass
Turn automatic compilation of Coconut files on or off .
8,011
def find_module ( self , fullname , path = None ) : basepaths = [ "" ] + list ( sys . path ) if fullname . startswith ( "." ) : if path is None : return None fullname = fullname [ 1 : ] basepaths . insert ( 0 , path ) fullpath = os . path . join ( * fullname . split ( "." ) ) for head in basepaths : path = os . path . join ( head , fullpath ) filepath = path + self . ext dirpath = os . path . join ( path , "__init__" + self . ext ) if os . path . exists ( filepath ) : self . run_compiler ( filepath ) return None if os . path . exists ( dirpath ) : self . run_compiler ( path ) return None return None
Searches for a Coconut file of the given name and compiles it .
8,012
def report ( self , morfs = None ) : units = None if hasattr ( self , 'find_code_units' ) : self . find_code_units ( morfs ) else : units = self . find_file_reporters ( morfs ) if units is None : if hasattr ( self , 'code_units' ) : units = self . code_units else : units = self . file_reporters for cu in units : try : analyzed = self . coverage . _analyze ( cu ) self . parse_file ( cu , analyzed ) except NoSource : if not self . config . ignore_errors : log . warning ( 'No source for %s' , cu . filename ) except NotPython : if cu . should_be_python ( ) and not self . config . ignore_errors : log . warning ( 'Source file is not python %s' , cu . filename ) except KeyError : cov3x = __version__ [ 0 ] < 4 cov40 = __version__ [ 0 ] == 4 and __version__ [ 1 ] < 1 if cov3x or cov40 : raise CoverallsException ( 'Old (<4.1) versions of coverage.py do not work ' 'consistently on new versions of Python. Please ' 'upgrade your coverage.py.' ) raise return self . source_files
Generate a part of json report for coveralls
8,013
def get_arcs ( analysis ) : if not analysis . has_arcs ( ) : return None branch_lines = analysis . branch_lines ( ) branches = [ ] for l1 , l2 in analysis . arcs_executed ( ) : if l1 in branch_lines : branches . extend ( ( l1 , 0 , abs ( l2 ) , 1 ) ) for l1 , l2 in analysis . arcs_missing ( ) : if l1 in branch_lines : branches . extend ( ( l1 , 0 , abs ( l2 ) , 0 ) ) return branches
Hit stats for each branch .
8,014
def parse_file ( self , cu , analysis ) : if hasattr ( analysis , 'parser' ) : filename = cu . file_locator . relative_filename ( cu . filename ) source_lines = analysis . parser . lines with cu . source_file ( ) as source_file : source = source_file . read ( ) try : if sys . version_info < ( 3 , 0 ) : encoding = source_encoding ( source ) if encoding != 'utf-8' : source = source . decode ( encoding ) . encode ( 'utf-8' ) except UnicodeDecodeError : log . warning ( 'Source file %s can not be properly decoded, skipping. ' 'Please check if encoding declaration is ok' , os . path . basename ( cu . filename ) ) return else : if hasattr ( cu , 'relative_filename' ) : filename = cu . relative_filename ( ) else : filename = analysis . coverage . file_locator . relative_filename ( cu . filename ) token_lines = analysis . file_reporter . source_token_lines ( ) source_lines = list ( enumerate ( token_lines ) ) source = analysis . file_reporter . source ( ) coverage_lines = [ self . get_hits ( i , analysis ) for i in range ( 1 , len ( source_lines ) + 1 ) ] posix_filename = filename . replace ( os . path . sep , '/' ) results = { 'name' : posix_filename , 'source' : source , 'coverage' : coverage_lines , } branches = self . get_arcs ( analysis ) if branches : results [ 'branches' ] = branches self . source_files . append ( results )
Generate data for single file
8,015
def git_info ( ) : try : branch = ( os . environ . get ( 'APPVEYOR_REPO_BRANCH' ) or os . environ . get ( 'BUILDKITE_BRANCH' ) or os . environ . get ( 'CI_BRANCH' ) or os . environ . get ( 'CIRCLE_BRANCH' ) or os . environ . get ( 'GIT_BRANCH' ) or os . environ . get ( 'TRAVIS_BRANCH' ) or os . environ . get ( 'BRANCH_NAME' ) or run_command ( 'git' , 'rev-parse' , '--abbrev-ref' , 'HEAD' ) ) head = { 'id' : gitlog ( '%H' ) , 'author_name' : gitlog ( '%aN' ) , 'author_email' : gitlog ( '%ae' ) , 'committer_name' : gitlog ( '%cN' ) , 'committer_email' : gitlog ( '%ce' ) , 'message' : gitlog ( '%s' ) , } remotes = [ { 'name' : line . split ( ) [ 0 ] , 'url' : line . split ( ) [ 1 ] } for line in run_command ( 'git' , 'remote' , '-v' ) . splitlines ( ) if '(fetch)' in line ] except ( CoverallsException , EnvironmentError ) as ex : branch = os . environ . get ( 'GIT_BRANCH' ) head = { 'id' : os . environ . get ( 'GIT_ID' ) , 'author_name' : os . environ . get ( 'GIT_AUTHOR_NAME' ) , 'author_email' : os . environ . get ( 'GIT_AUTHOR_EMAIL' ) , 'committer_name' : os . environ . get ( 'GIT_COMMITTER_NAME' ) , 'committer_email' : os . environ . get ( 'GIT_COMMITTER_EMAIL' ) , 'message' : os . environ . get ( 'GIT_MESSAGE' ) , } remotes = [ { 'name' : os . environ . get ( 'GIT_REMOTE' ) , 'url' : os . environ . get ( 'GIT_URL' ) , } ] if not all ( head . values ( ) ) : log . warning ( 'Failed collecting git data. Are you running ' 'coveralls inside a git repository? Is git installed?' , exc_info = ex ) return { } return { 'git' : { 'branch' : branch , 'head' : head , 'remotes' : remotes , } , }
A hash of Git data that can be used to display more information to users .
8,016
def create_report ( self ) : data = self . create_data ( ) try : json_string = json . dumps ( data ) except UnicodeDecodeError as e : log . error ( 'ERROR: While preparing JSON:' , exc_info = e ) self . debug_bad_encoding ( data ) raise log_string = re . sub ( r'"repo_token": "(.+?)"' , '"repo_token": "[secure]"' , json_string ) log . debug ( log_string ) log . debug ( '==\nReporting %s files\n==\n' , len ( data [ 'source_files' ] ) ) for source_file in data [ 'source_files' ] : log . debug ( '%s - %s/%s' , source_file [ 'name' ] , sum ( filter ( None , source_file [ 'coverage' ] ) ) , len ( source_file [ 'coverage' ] ) ) return json_string
Generate json dumped report for coveralls api .
8,017
def save_report ( self , file_path ) : try : report = self . create_report ( ) except coverage . CoverageException as e : log . error ( 'Failure to gather coverage:' , exc_info = e ) else : with open ( file_path , 'w' ) as report_file : report_file . write ( report )
Write coveralls report to file .
8,018
def create_data ( self , extra = None ) : r if self . _data : return self . _data self . _data = { 'source_files' : self . get_coverage ( ) } self . _data . update ( git_info ( ) ) self . _data . update ( self . config ) if extra : if 'source_files' in extra : self . _data [ 'source_files' ] . extend ( extra [ 'source_files' ] ) else : log . warning ( 'No data to be merged; does the json file contain ' '"source_files" data?' ) return self . _data
r Generate object for api .
8,019
def debug_bad_encoding ( data ) : at_fault_files = set ( ) for source_file_data in data [ 'source_files' ] : for value in source_file_data . values ( ) : try : json . dumps ( value ) except UnicodeDecodeError : at_fault_files . add ( source_file_data [ 'name' ] ) if at_fault_files : log . error ( 'HINT: Following files cannot be decoded properly into ' 'unicode. Check their content: %s' , ', ' . join ( at_fault_files ) )
Let s try to help user figure out what is at fault .
8,020
def clean_params ( params , drop_nones = True , recursive = True ) : cleaned = { } for key , value in six . iteritems ( params ) : if drop_nones and value is None : continue if recursive and isinstance ( value , dict ) : value = clean_params ( value , drop_nones , recursive ) cleaned [ key ] = value return cleaned
Clean up a dict of API parameters to be sent to the Coinbase API .
8,021
def encode_params ( params , ** kwargs ) : cleaned = clean_params ( params , ** kwargs ) return json . dumps ( cleaned )
Clean and JSON - encode a dict of parameters .
8,022
def check_uri_security ( uri ) : if urlparse ( uri ) . scheme != 'https' : warning_message = ( 'WARNING: this client is sending a request to an insecure' ' API endpoint. Any API request you make may expose your API key and' ' secret to third parties. Consider using the default endpoint:\n\n' ' %s\n' ) % uri warnings . warn ( warning_message , UserWarning ) return uri
Warns if the URL is insecure .
8,023
def _build_session ( self , auth_class , * args , ** kwargs ) : session = requests . session ( ) session . auth = auth_class ( * args , ** kwargs ) session . headers . update ( { 'CB-VERSION' : self . API_VERSION , 'Accept' : 'application/json' , 'Content-Type' : 'application/json' , 'User-Agent' : 'coinbase/python/2.0' } ) return session
Internal helper for creating a requests session with the correct authentication handling .
8,024
def _create_api_uri ( self , * parts ) : return urljoin ( self . BASE_API_URI , '/' . join ( imap ( quote , parts ) ) )
Internal helper for creating fully qualified endpoint URIs .
8,025
def _request ( self , method , * relative_path_parts , ** kwargs ) : uri = self . _create_api_uri ( * relative_path_parts ) data = kwargs . get ( 'data' , None ) if data and isinstance ( data , dict ) : kwargs [ 'data' ] = encode_params ( data ) if self . VERIFY_SSL : kwargs . setdefault ( 'verify' , COINBASE_CRT_PATH ) else : kwargs . setdefault ( 'verify' , False ) kwargs . update ( verify = self . VERIFY_SSL ) response = getattr ( self . session , method ) ( uri , ** kwargs ) return self . _handle_response ( response )
Internal helper for creating HTTP requests to the Coinbase API .
8,026
def _handle_response ( self , response ) : if not str ( response . status_code ) . startswith ( '2' ) : raise build_api_error ( response ) return response
Internal helper for handling API responses from the Coinbase server .
8,027
def _get ( self , * args , ** kwargs ) : prev_data = kwargs . pop ( 'prev_data' , [ ] ) resp = self . _request ( 'get' , * args , ** kwargs ) resp_content = resp . _content if not resp_content : return resp if isinstance ( resp_content , bytes ) : resp_content = resp_content . decode ( 'utf-8' ) content = json . loads ( resp_content ) if 'pagination' not in content : return resp page_info = content [ 'pagination' ] if not page_info [ 'next_uri' ] : content [ 'data' ] . extend ( prev_data ) if isinstance ( resp_content , bytes ) : resp . _content = json . dumps ( content ) . decode ( 'utf-8' ) else : resp . _content = json . dumps ( content ) return resp prev_data . extend ( content [ 'data' ] ) next_page_id = page_info [ 'next_uri' ] . split ( '=' ) [ - 1 ] kwargs . update ( { 'prev_data' : prev_data , 'params' : { 'starting_after' : next_page_id } } ) return self . _get ( * args , ** kwargs )
Get requests can be paginated ensure we iterate through all the pages .
8,028
def _remove_non_methods ( ) : cur_module = sys . modules [ __name__ ] my_globals = dict ( globals ( ) ) from prettytensor . pretty_tensor_class import PrettyTensor for name , _ in six . iteritems ( my_globals ) : if not hasattr ( PrettyTensor , name ) : delattr ( cur_module , name ) if hasattr ( cur_module , 'bookkeeper' ) : delattr ( cur_module , 'bookkeeper' )
Removes any object in dict that is not a registered method .
8,029
def regularizer ( name , regularization_fn , name_filter = 'weights' ) : regex = re . compile ( name_filter ) def fn ( var_name , variable , phase ) : if phase is pt . Phase . train and regex . search ( var_name ) : with tf . name_scope ( None , name , [ variable ] ) : loss = regularization_fn ( variable ) if loss is not None : tf . add_to_collection ( tf . GraphKeys . REGULARIZATION_LOSSES , loss ) return variable return fn
Wraps a regularizer in a parameter - function .
8,030
def l2_regularizer ( decay , name_filter = 'weights' ) : return regularizer ( 'l2_regularizer' , lambda x : tf . nn . l2_loss ( x ) * decay , name_filter = name_filter )
Create an l2 regularizer .
8,031
def l1_regularizer ( decay , name_filter = 'weights' ) : return regularizer ( 'l1_regularizer' , lambda x : tf . reduce_sum ( tf . abs ( x ) ) * decay , name_filter = name_filter )
Create an l1 regularizer .
8,032
def compose ( * parameter_functions ) : def composed_fn ( var_name , variable , phase ) : for fn in parameter_functions : variable = fn ( var_name , variable , phase ) return variable return composed_fn
Composes multiple modification functions in order .
8,033
def l1_regression_loss ( y , target , name = None ) : with tf . name_scope ( name , 'l1_regression' , [ y , target ] ) as scope : y = tf . convert_to_tensor ( y , name = 'y' ) target = tf . convert_to_tensor ( target , name = 'target' ) return reduce_batch_sum ( tf . abs ( y - target ) , name = scope )
Calculates the sum of absolute errors between y and target .
8,034
def l2_regression_sq_loss ( y , target , name = None ) : with tf . name_scope ( name , 'l2_regression_sq' , [ y , target ] ) as scope : y = tf . convert_to_tensor ( y , name = 'y' ) target = tf . convert_to_tensor ( target , name = 'target' ) return reduce_batch_sum ( tf . square ( y - target ) , name = scope )
Calculates the sum of squared errors between y and target .
8,035
def l2_regression_loss ( y , target , name = None ) : with tf . name_scope ( name , 'l2_regression' , [ y , target ] ) as scope : y = tf . convert_to_tensor ( y , name = 'y' ) target = tf . convert_to_tensor ( target , name = 'target' ) return tf . sqrt ( l2_regression_sq_loss ( y , target , name = scope ) )
Calculates the square root of the SSE between y and target .
8,036
def cos_distance ( t1 , t2 , epsilon = 1e-12 , name = None ) : with tf . name_scope ( name , 'cos_distance' , [ t1 , t2 ] ) as scope : t1 = tf . convert_to_tensor ( t1 , name = 't1' ) t2 = tf . convert_to_tensor ( t2 , name = 't2' ) x_inv_norm = tf . rsqrt ( tf . maximum ( length_squared ( t1 ) * length_squared ( t2 ) , epsilon ) ) return tf . subtract ( 1.0 , dot_product ( t1 , t2 ) * x_inv_norm , name = scope )
Cos distance between t1 and t2 and caps the gradient of the Square Root .
8,037
def dot_distance ( t1 , t2 , name = None ) : with tf . name_scope ( name , 'dot_distance' , [ t1 , t2 ] ) as scope : return - dot_product ( t1 , t2 , name = scope )
dot distance between t1 and t2 .
8,038
def l2_distance_sq ( t1 , t2 , name = None ) : with tf . name_scope ( name , 'l2_distance_sq' , [ t1 , t2 ] ) as scope : t1 = tf . convert_to_tensor ( t1 , name = 't1' ) t2 = tf . convert_to_tensor ( t2 , name = 't2' ) return length_squared ( tf . subtract ( t1 , t2 ) , name = scope )
Square of l2 distance between t1 and t2 .
8,039
def l2_distance ( t1 , t2 , epsilon = 1e-12 , name = None ) : with tf . name_scope ( name , 'l2_distance' , [ t1 , t2 ] ) as scope : t1 = tf . convert_to_tensor ( t1 , name = 't1' ) t2 = tf . convert_to_tensor ( t2 , name = 't2' ) return tf . sqrt ( tf . maximum ( l2_distance_sq ( t1 , t2 , scope ) , epsilon ) )
l2 distance between t1 and t2 and caps the gradient of the Square Root .
8,040
def l1_distance ( t1 , t2 , name = None ) : with tf . name_scope ( name , 'l1_distance' , [ t1 , t2 ] ) as scope : t1 = tf . convert_to_tensor ( t1 , name = 't1' ) t2 = tf . convert_to_tensor ( t2 , name = 't2' ) sub = tf . subtract ( t1 , t2 ) reduction_dim = _last_index ( sub , 1 ) return tf . reduce_sum ( tf . abs ( sub ) , reduction_dim , name = scope )
l1 distance between t1 and t2 .
8,041
def leaky_relu ( x , name = None ) : with tf . name_scope ( name , 'leaky_relu' , [ x ] ) as scope : x = tf . convert_to_tensor ( x , name = 'x' ) return tf . where ( tf . less ( x , 0.0 ) , 0.01 * x , x , name = scope )
Creates a leaky_relu .
8,042
def softplus ( x , scale = 1.0 , name = None ) : if scale == 1 : return tf . nn . softplus ( x ) else : with tf . name_scope ( name , 'softplus' , [ x ] ) : scale = tf . convert_to_tensor ( scale , dtype = x . dtype . base_dtype ) return tf . nn . softplus ( x * scale ) / scale
Computes softplus with a scale factor to sharpen of the hinge .
8,043
def l1_normalize ( x , dim , epsilon = 1e-12 , name = None ) : with tf . name_scope ( name , 'l1_normalize' , [ x ] ) as scope : x = tf . convert_to_tensor ( x , name = 'x' ) x = tf . verify_tensor_all_finite ( x , 'Error at input %s' % scope ) x_norm = tf . maximum ( tf . reduce_sum ( tf . abs ( x ) , [ dim ] , keep_dims = True ) , epsilon ) return tf . div ( x , x_norm , name = scope )
l1 normalizes x .
8,044
def every_other ( x , name = None ) : with tf . name_scope ( name , 'every_other' , [ x ] ) as scope : x = tf . convert_to_tensor ( x , name = 'x' ) return tf . reshape ( tf . slice ( tf . reshape ( x , [ - 1 , 2 ] ) , [ 0 , 0 ] , [ - 1 , 1 ] ) , [ - 1 ] , name = scope )
Drops every other value from the tensor and returns a 1D tensor .
8,045
def dot_product ( t1 , t2 , keep_dims = False , name = None , reduction_dim = None ) : with tf . name_scope ( name , 'dot' , [ t1 , t2 ] ) as scope : t1 = tf . convert_to_tensor ( t1 , name = 't1' ) t2 = tf . convert_to_tensor ( t2 , name = 't2' ) mul = tf . multiply ( t1 , t2 ) if not reduction_dim : reduction_dim = _last_index ( mul , 1 ) return tf . reduce_sum ( mul , reduction_dim , name = scope , keep_dims = keep_dims )
Computes the dot product of t1 and t2 .
8,046
def length_squared ( x , keep_dims = False , name = None , reduction_dim = None ) : with tf . name_scope ( name , 'length_squared' , [ x ] ) as scope : x = tf . convert_to_tensor ( x , name = 'x' ) if not reduction_dim : reduction_dim = _last_index ( x , 1 ) return tf . reduce_sum ( tf . square ( x ) , reduction_dim , keep_dims = keep_dims , name = scope )
Computes the squared length of x .
8,047
def unzip ( x , split_dim , current_length , num_splits = 2 , name = None ) : with tf . name_scope ( name , 'unzip' , [ x ] ) as scope : x = tf . convert_to_tensor ( x , name = 'x' ) all_splits = tf . split ( value = x , num_or_size_splits = current_length , axis = split_dim , name = scope ) splits = [ [ ] for _ in xrange ( num_splits ) ] for i in xrange ( current_length ) : splits [ i % num_splits ] . append ( all_splits [ i ] ) return [ tf . concat ( s , split_dim ) for s in splits ]
Splits a tensor by unzipping along the split_dim .
8,048
def _last_index ( x , default_dim ) : if x . get_shape ( ) . ndims is not None : return len ( x . get_shape ( ) ) - 1 else : return default_dim
Returns the last dimension s index or default_dim if x has no shape .
8,049
def _all_dims ( x , default_dims = None ) : if x . get_shape ( ) . ndims is not None : return list ( xrange ( x . get_shape ( ) . ndims ) ) else : return default_dims
Returns a list of dims in x or default_dims if the rank is unknown .
8,050
def he_init ( n_inputs , n_outputs , activation_fn , uniform = True ) : def in_relu_family ( activation_fn ) : if isinstance ( activation_fn , collections . Sequence ) : activation_fn = activation_fn [ 0 ] return activation_fn in ( tf . nn . relu , tf . nn . relu6 ) if in_relu_family ( activation_fn ) : stddev = math . sqrt ( 2.0 / n_inputs ) return tf . random_normal_initializer ( stddev = stddev ) else : return xavier_init ( n_inputs , n_outputs , uniform )
Sets the parameter initialization using the method described .
8,051
def xavier_init ( n_inputs , n_outputs , uniform = True ) : if uniform : init_range = math . sqrt ( 6.0 / ( n_inputs + n_outputs ) ) return tf . random_uniform_initializer ( - init_range , init_range ) else : stddev = math . sqrt ( 3.0 / ( n_inputs + n_outputs ) ) return tf . truncated_normal_initializer ( stddev = stddev )
Set the parameter initialization using the method described .
8,052
def spatial_slice_zeros ( x ) : return tf . cast ( tf . reduce_all ( tf . less_equal ( x , 0.0 ) , [ 0 , 1 , 2 ] ) , tf . float32 )
Experimental summary that shows how many planes are unused for a batch .
8,053
def _pool ( input_layer , pool_fn , kernel , stride , edges , name ) : input_layer . get_shape ( ) . assert_has_rank ( 4 ) if input_layer . get_shape ( ) . ndims not in ( None , 4 ) : raise ValueError ( 'Pooling requires a rank 4 tensor: %s' % input_layer . get_shape ( ) ) kernel = _kernel ( kernel ) stride = _stride ( stride ) size = [ 1 , kernel [ 0 ] , kernel [ 1 ] , 1 ] new_head = pool_fn ( input_layer . tensor , size , stride , edges , name = name ) return input_layer . with_tensor ( new_head )
Applies a pooling function .
8,054
def average_pool ( input_layer , kernel , stride , edges = PAD_SAME , name = PROVIDED ) : return _pool ( input_layer , tf . nn . avg_pool , kernel , stride , edges , name )
Performs average pooling .
8,055
def max_pool ( input_layer , kernel , stride , edges = PAD_SAME , name = PROVIDED ) : return _pool ( input_layer , tf . nn . max_pool , kernel , stride , edges , name )
Performs max pooling .
8,056
def bilinear_sampling ( input_layer , x , y , name = PROVIDED ) : input_layer . get_shape ( ) . assert_has_rank ( 4 ) return _interpolate ( im = input_layer , x = x , y = y , name = name )
Performs bilinear sampling . This must be a rank 4 Tensor .
8,057
def _kernel ( kernel_spec ) : if isinstance ( kernel_spec , tf . compat . integral_types ) : return [ kernel_spec , kernel_spec ] elif len ( kernel_spec ) == 1 : return [ kernel_spec [ 0 ] , kernel_spec [ 0 ] ] else : assert len ( kernel_spec ) == 2 return kernel_spec
Expands the kernel spec into a length 2 list .
8,058
def _stride ( stride_spec ) : if stride_spec is None : return [ 1 , 1 , 1 , 1 ] elif isinstance ( stride_spec , tf . compat . integral_types ) : return [ 1 , stride_spec , stride_spec , 1 ] elif len ( stride_spec ) == 1 : return [ 1 , stride_spec [ 0 ] , stride_spec [ 0 ] , 1 ] elif len ( stride_spec ) == 2 : return [ 1 , stride_spec [ 0 ] , stride_spec [ 1 ] , 1 ] else : assert len ( stride_spec ) == 4 return stride_spec
Expands the stride spec into a length 4 list .
8,059
def _set_shape_on_tensor ( tensor , shape ) : if shape is not None : try : tensor . set_shape ( shape ) except ValueError : raise ValueError ( "Requested shape does not match tensor's shape: %s vs %s" % ( shape , tensor . get_shape ( ) ) ) elif tensor . get_shape ( ) . ndims is None : raise ValueError ( 'Unknown shape on tensor: %s' % tensor )
Convenience to set a shape or check it .
8,060
def unwrap ( tensor ) : while isinstance ( tensor , ( PrettyTensor , Loss ) ) : tensor = tensor . tensor return tensor
Returns the underlying tensor if tensor is wrapped or tensor .
8,061
def wrap ( tensor , books = None , tensor_shape = None ) : if books is None : books = bookkeeper . for_default_graph ( ) if isinstance ( tensor , PrettyTensor ) : return tensor . as_layer ( ) elif isinstance ( tensor , UnboundVariable ) : def set_input_from_unbound_var ( data ) : if data is not None : return wrap ( data , books ) else : return None return _DeferredLayer ( books , set_input_from_unbound_var , [ tensor ] , { } ) else : tensor = tf . convert_to_tensor ( tensor , name = 'input' ) if tensor_shape : _set_shape_on_tensor ( tensor , tensor_shape ) return Layer ( books , tensor = tensor , name = tensor . name )
Creates an input layer representing the given tensor .
8,062
def template ( key , books = None , optional = False ) : if books is None : books = bookkeeper . for_default_graph ( ) def set_input_from_unbound_var ( data ) : if data is not None : return wrap ( data , books ) else : return None if optional : data = UnboundVariable ( key = key , default = None ) else : data = UnboundVariable ( key = key ) return _DeferredLayer ( books , set_input_from_unbound_var , [ data ] , { } )
Starts a Pretty Tensor graph template .
8,063
def wrap_sequence ( sequence , books = None , tensor_shape = None ) : if books is None : books = bookkeeper . for_default_graph ( ) my_sequence = [ wrap ( t , books = books , tensor_shape = tensor_shape ) for t in sequence ] return Layer ( books , sequence = my_sequence , name = my_sequence [ 0 ] . name )
Creates an input layer representing the given sequence of tensors .
8,064
def defaults_scope ( ** kwargs ) : _assert_value_not_string ( 'summary_collections' , kwargs ) _assert_value_not_string ( 'variable_collections' , kwargs ) _check_defaults ( kwargs ) global _defaults old_defaults = _defaults _defaults = chain_dict . ChainDict ( _defaults ) _defaults . update ( kwargs ) books = bookkeeper . for_default_graph ( ) if 'summary_collections' in _defaults : books . summary_collections = _defaults [ 'summary_collections' ] else : books . reset_summary_collections ( ) try : yield _defaults finally : _defaults = old_defaults
Creates a scope for the defaults that are used in a with block .
8,065
def join_pretty_tensors ( tensors , output , join_function = None , name = 'join' ) : if not tensors : raise ValueError ( 'pretty_tensors must be a non-empty sequence.' ) with output . g . name_scope ( name ) : if join_function is None : last_dim = len ( tensors [ 0 ] . shape ) - 1 return output . with_tensor ( tf . concat ( tensors , last_dim ) ) else : return output . with_tensor ( join_function ( tensors ) )
Joins the list of pretty_tensors and sets head of output_pretty_tensor .
8,066
def _merge_unbound_var_dicts ( src , dst ) : for k , v in six . iteritems ( src ) : if dst . get ( k , v ) != v : trace1 = '' . join ( scopes . skip_common_stack_elements ( v . stacktrace , dst [ k ] . stacktrace ) ) trace2 = '' . join ( scopes . skip_common_stack_elements ( dst [ k ] . stacktrace , v . stacktrace ) ) raise ValueError ( 'Key conflict: %s\nDefined At:\n%s\nand\n%s' % ( k , trace1 , trace2 ) ) else : dst [ k ] = v
Merges src into dst and throws an exception if a value is incompatible .
8,067
def _assign_values_to_unbound_vars ( unbound_vars , unbound_var_values ) : context = { } for key , value in six . iteritems ( unbound_var_values ) : if key not in unbound_vars : raise ValueError ( 'unexpected key: %s. Legal values are: %s' % ( key , list ( six . iterkeys ( unbound_vars ) ) ) ) context [ unbound_vars [ key ] ] = value unspecified = [ ] for unbound_var in six . itervalues ( unbound_vars ) : if unbound_var not in context : if unbound_var . has_default ( ) : context [ unbound_var ] = unbound_var . default else : unspecified . append ( unbound_var . key ) if unspecified : raise ValueError ( 'Unspecified keys: %s' % unspecified ) return context
Assigns values to the vars and raises ValueError if one is missing .
8,068
def construct_all ( templates , ** unbound_var_values ) : def _merge_dicts ( src , dst ) : for k , v in six . iteritems ( src ) : if dst . get ( k , v ) != v : raise ValueError ( 'Conflicting values bound for %s: %s and %s' % ( k , v , dst [ k ] ) ) else : dst [ k ] = v all_unbound_vars = { } context = { } for x in templates : if isinstance ( x , _DeferredLayer ) : _merge_unbound_var_dicts ( x . unbound_vars , all_unbound_vars ) _merge_dicts ( x . _partial_context , context ) else : raise TypeError ( 'Unexpected type: %s' % type ( x ) ) _merge_dicts ( _assign_values_to_unbound_vars ( all_unbound_vars , unbound_var_values ) , context ) result = list ( templates ) for i , x in enumerate ( result ) : if isinstance ( x , _DeferredLayer ) : result [ i ] = x . _construct ( context ) return result
Constructs all the given templates in a single pass without redundancy .
8,069
def _strip_unnecessary_contents_from_stack ( result , processed ) : if isinstance ( result , ( PrettyTensor , Loss ) ) : if result . is_sequence ( ) : for tensor in result . sequence : _strip_unnecessary_contents_from_stack ( tensor , processed ) return else : result = result . tensor if hasattr ( result , 'op' ) : result = result . op if result in processed : return else : processed . add ( result ) trace = [ ] found = False for f , line_no , method , _ in result . _traceback : if ( method in ( '_replace_deferred' , '_construct' ) and f . endswith ( 'pretty_tensor_class.py' ) ) : found = True continue trace . append ( ( f , line_no , method , { } ) ) result . _traceback = trace if not found : return for inp in result . inputs : _strip_unnecessary_contents_from_stack ( inp , processed )
Remove the distracting lines from the stored tracebacks .
8,070
def _gen_ipython_string ( func , args , defaults , original_doc ) : magic_string = '%s(' % func . __name__ if defaults : default_offset = len ( args ) - len ( defaults ) else : default_offset = len ( args ) for i , value in enumerate ( args ) : if i >= default_offset : magic_string += '%s=%s, ' % ( value , defaults [ i - default_offset ] ) else : magic_string += '%s, ' % value if args : magic_string = magic_string [ : - 2 ] magic_string += ')\n\n' if original_doc is not None : magic_string += original_doc return magic_string
Provides auto - complete hint to ipython .
8,071
def _should_defer ( input_layer , args , kwargs ) : for arg in itertools . chain ( [ input_layer ] , args , six . itervalues ( kwargs ) ) : if isinstance ( arg , ( _DeferredLayer , UnboundVariable ) ) : return True elif ( isinstance ( arg , collections . Sequence ) and not isinstance ( arg , six . string_types ) ) : if _should_defer ( None , arg , { } ) : return True elif isinstance ( arg , collections . Mapping ) : if _should_defer ( None , ( ) , arg ) : return True return False
Checks to see if any of the args are templates .
8,072
def _method_scope ( input_layer , name ) : global _in_method_scope with input_layer . g . as_default ( ) , scopes . var_and_name_scope ( None if _in_method_scope else input_layer . _scope ) , scopes . var_and_name_scope ( ( name , None ) ) as ( scope , var_scope ) : was_in_method_scope = _in_method_scope yield scope , var_scope _in_method_scope = was_in_method_scope
Creates a nested set of name and id scopes and avoids repeats .
8,073
def _conversion_function ( pt_wrapper , dtype = None , name = None , as_ref = False ) : _ = name , as_ref t = pt_wrapper . tensor if dtype and not dtype . is_compatible_with ( t . dtype ) : raise ValueError ( 'Tensor conversion requested dtype %s for Tensor with dtype %s: %r' % ( dtype , t . dtype , t ) ) return t
Allows PrettyTensors and Loss to work as a tensor .
8,074
def bind ( self , ** bindings ) : found_vars = set ( ) result = [ ] for layer in self . flatten ( ) : if isinstance ( layer , _DeferredLayer ) : var_keys = { var . key for var in six . itervalues ( layer . unbound_vars ) } layers_bindings = { k : v for k , v in six . iteritems ( bindings ) if k in var_keys } result . append ( layer . bind ( ** layers_bindings ) ) found_vars . update ( six . iterkeys ( layers_bindings ) ) else : result . append ( layer ) missing_vars = set ( six . iterkeys ( bindings ) ) - found_vars if missing_vars : raise ValueError ( 'Unused bindings: %s' % missing_vars ) return self . __class__ ( * result )
Makes the bindings to each item in this and returns a new tuple .
8,075
def as_fn ( self , * binding_order ) : if len ( binding_order ) != len ( self . unbound_vars ) : raise ValueError ( 'All vars must be specified.' ) for arg in binding_order : if arg not in self . unbound_vars : raise ValueError ( 'Unknown binding: %s' % arg ) def func ( * args , ** kwargs ) : if len ( binding_order ) != len ( args ) : raise ValueError ( 'Missing values, expects: %s' % binding_order ) values = dict ( zip ( binding_order , args ) ) values . update ( kwargs ) return self . construct ( ** values ) func . __doc__ = _gen_ipython_string ( func , binding_order , [ ] , func . __doc__ ) return func
Creates a function by binding the arguments in the given order .
8,076
def _method_complete ( self , result ) : if isinstance ( result , ( PrettyTensor , Loss , PrettyTensorTupleMixin ) ) : return result elif ( isinstance ( result , collections . Sequence ) and not isinstance ( result , six . string_types ) ) : return self . with_sequence ( result ) else : return self . with_tensor ( result )
Called after a registered method with the result .
8,077
def add_loss ( self , loss , name = None ) : self . bookkeeper . add_loss ( loss , name = name ) return Loss ( self . bookkeeper , tensor = loss , name = name )
Adds a loss and returns a wrapper for that loss .
8,078
def _replace_args_with_defaults ( self , _args = None , ** kwargs ) : if _args is None : _args = six . iterkeys ( kwargs ) my_defaults = self . defaults for k in _args : if k not in kwargs : if k in my_defaults : kwargs [ k ] = my_defaults [ k ] elif k in _defaults : kwargs [ k ] = _defaults [ k ] return kwargs
Internal method to fill absent values in the kwargs with the defaults .
8,079
def attach_template ( self , _template , _key , ** unbound_var_values ) : if _key in unbound_var_values : raise ValueError ( '%s specified twice.' % _key ) unbound_var_values [ _key ] = self return _template . as_layer ( ) . construct ( ** unbound_var_values )
Attaches the template to this such that _key = this layer .
8,080
def _as_graph_element ( self ) : if self . is_sequence ( ) : raise TypeError ( 'A Pretty Tensor that holds a sequence cannot be ' 'represented as a graph element.' ) else : obj = self . tensor conv_fn = getattr ( obj , '_as_graph_element' , None ) if conv_fn and isinstance ( conv_fn , collections . Callable ) : obj = conv_fn ( ) return obj
Returns the underlying graph element if possible .
8,081
def mark_as_required ( self ) : if self not in tf . get_collection ( bookkeeper . GraphKeys . MARKED_LOSSES ) : tf . add_to_collection ( bookkeeper . GraphKeys . MARKED_LOSSES , self )
Adds this loss to the MARKED_LOSSES collection .
8,082
def _construct ( self , context ) : with self . g . as_default ( ) : if self . _pass_through : return self . _pass_through . _construct ( context ) current_value = context . get ( self , None ) assert current_value is not _unspecified , 'Circular dependency' if current_value is not None : return current_value context [ self ] = _unspecified method_args = self . _replace_deferred ( self . _method_args , context ) method_kwargs = self . _replace_deferred ( self . _method_kwargs , context ) result = self . _method ( * method_args , ** method_kwargs ) _strip_unnecessary_contents_from_stack ( result , set ( ) ) context [ self ] = result return result
Constructs this by calling the deferred method .
8,083
def bind ( self , ** bindings ) : new_context = dict ( self . _partial_context ) unknown_keys = [ ] for k , v in six . iteritems ( bindings ) : if k not in self . _unbound_vars : unknown_keys . append ( k ) new_context [ self . _unbound_vars [ k ] ] = v if unknown_keys : raise ValueError ( 'The following keys are not associated with any unbound vars: %s, ' 'legal values are %s' % ( unknown_keys , list ( self . _unbound_vars . keys ( ) ) ) ) return _DeferredLayer ( self . bookkeeper , None , ( ) , { } , scope = self . _scope , defaults = self . _defaults , pass_through = self , partial_context = new_context )
Creates a new template with the given unbound variables bound .
8,084
def attach_template ( self , _template , _key , ** unbound_var_values ) : if _key in unbound_var_values : raise ValueError ( '%s specified twice.' % _key ) unbound_var_values [ _key ] = self return _DeferredLayer ( self . bookkeeper , _template . as_layer ( ) . construct , [ ] , unbound_var_values , scope = self . _scope , defaults = self . _defaults , partial_context = self . _partial_context )
Attaches the template to this with the _key is supplied with this layer .
8,085
def _method_complete ( self , result ) : if isinstance ( result , PrettyTensor ) : self . _head = result return self elif isinstance ( result , Loss ) : return result elif isinstance ( result , PrettyTensorTupleMixin ) : self . _head = result [ 0 ] return result else : self . _head = self . _head . with_tensor ( result ) return self
Called after an extention method with the result .
8,086
def subdivide_with ( self , branches , join_function , name = 'mixed' ) : return _subdivide_context ( self , branches , join_function , name )
Branches this pretty tensor and uses an explicit join function .
8,087
def variable ( self , var_name , shape , init , dt = tf . float32 , train = None ) : dt = tf . as_dtype ( dt ) . base_dtype if var_name in self . vars : v = self . vars [ var_name ] if v . get_shape ( ) != shape : raise ValueError ( 'Shape mismatch: %s vs %s. Perhaps a UnboundVariable had ' 'incompatible values within a graph.' % ( v . get_shape ( ) , shape ) ) return v elif callable ( init ) : if train is None : train = _defaults . get ( 'trainable_variables' , True ) variable_collections = _defaults . get ( 'variable_collections' , ( ) ) if tf . GraphKeys . GLOBAL_VARIABLES not in variable_collections : variable_collections = list ( variable_collections ) + [ tf . GraphKeys . GLOBAL_VARIABLES ] v = tf . get_variable ( var_name , shape = shape , dtype = dt , initializer = init , trainable = train , collections = variable_collections ) self . vars [ var_name ] = v return v else : v = tf . convert_to_tensor ( init , name = var_name , dtype = dt ) v . get_shape ( ) . assert_is_compatible_with ( shape ) self . vars [ var_name ] = v return v
Adds a named variable to this bookkeeper or returns an existing one .
8,088
def fill_kwargs ( self , input_layer , kwargs ) : return input_layer . _replace_args_with_defaults ( _args = self . _assign_defaults , ** kwargs )
Applies name_suffix and defaults to kwargs and returns the result .
8,089
def create_deferred ( self , func , input_layer , deferred_args , deferred_kwargs , name ) : my_defaults = _defaults def _with_method_complete ( * args , ** kwargs ) : input_layer = args [ 0 ] with input_layer . g . as_default ( ) , defaults_scope ( ** my_defaults ) , tf . name_scope ( name ) : return input_layer . _method_complete ( func ( * args , ** kwargs ) ) full_args = [ input_layer ] full_args . extend ( deferred_args ) partial_context = { } if isinstance ( input_layer , _DeferredLayer ) : partial_context = input_layer . _partial_context return _DeferredLayer ( input_layer . bookkeeper , scopes . Template ( None , _with_method_complete ) , full_args , deferred_kwargs , scope = input_layer . _scope , defaults = input_layer . defaults , partial_context = partial_context )
Creates a deferred node with captured scope .
8,090
def create_method ( self , func ) : @ functools . wraps ( func ) def method ( input_layer , * args , ** kwargs ) : return func ( input_layer , * args , ** self . fill_kwargs ( input_layer , kwargs ) ) return method
Creates the method .
8,091
def _make_tuple ( x ) : if isinstance ( x , prettytensor . PrettyTensor ) : if x . is_sequence ( ) : return tuple ( x . sequence ) else : return ( x . tensor , ) elif isinstance ( x , tuple ) : return x elif ( isinstance ( x , collections . Sequence ) and not isinstance ( x , six . string_types ) ) : return tuple ( x ) else : return ( x , )
TF has an obnoxious habit of being lenient with single vs tuple .
8,092
def build_from_queue ( cls , input_queue , replay_size , batch_size ) : return cls ( lambda : input_queue . dequeue_many ( batch_size ) , replay_size , batch_size = batch_size )
Builds a ReplayableQueue that draws from a regular input_queue .
8,093
def replay_scope ( self , sess ) : current_replay = self . replay ( sess ) try : self . set_replay ( sess , True ) yield finally : self . set_replay ( sess , current_replay )
Enters a replay scope that unsets it at the end .
8,094
def set_replay ( self , sess , replay ) : sess . run ( self . _set_replay , { self . _set_replay_ph : replay } )
Changes the current replay setting on the graph .
8,095
def refill ( self , sess ) : sess . run ( self . _clear_queue ) while sess . run ( self . _fill_queue ) : pass
Clears the current queue and then refills it with new data .
8,096
def maybe_download ( url , filename ) : if not os . path . exists ( WORK_DIRECTORY ) : os . mkdir ( WORK_DIRECTORY ) filepath = os . path . join ( WORK_DIRECTORY , filename ) if not os . path . exists ( filepath ) : filepath , _ = request . urlretrieve ( url + filename , filepath ) statinfo = os . stat ( filepath ) print ( 'Successfully downloaded' , filename , statinfo . st_size , 'bytes.' ) return filepath
Download the data from Yann s website unless it s already here .
8,097
def permute_data ( arrays , random_state = None ) : if any ( len ( a ) != len ( arrays [ 0 ] ) for a in arrays ) : raise ValueError ( 'All arrays must be the same length.' ) if not random_state : random_state = np . random order = random_state . permutation ( len ( arrays [ 0 ] ) ) return [ a [ order ] for a in arrays ]
Permute multiple numpy arrays with the same order .
8,098
def mnist ( training ) : if training : data_filename = 'train-images-idx3-ubyte.gz' labels_filename = 'train-labels-idx1-ubyte.gz' count = 60000 else : data_filename = 't10k-images-idx3-ubyte.gz' labels_filename = 't10k-labels-idx1-ubyte.gz' count = 10000 data_filename = maybe_download ( MNIST_URL , data_filename ) labels_filename = maybe_download ( MNIST_URL , labels_filename ) return ( mnist_extract_data ( data_filename , count ) , mnist_extract_labels ( labels_filename , count ) )
Downloads MNIST and loads it into numpy arrays .
8,099
def shakespeare ( chunk_size ) : file_name = maybe_download ( 'http://cs.stanford.edu/people/karpathy/char-rnn/' , 'shakespear.txt' ) with open ( file_name ) as f : shakespeare_full = f . read ( ) length = ( len ( shakespeare_full ) // chunk_size ) * chunk_size if length < len ( shakespeare_full ) : shakespeare_full = shakespeare_full [ : length ] arr = np . array ( [ convert_to_int ( c ) for c in shakespeare_full ] ) [ 0 : len ( shakespeare_full ) / chunk_size * chunk_size ] return arr . reshape ( ( len ( arr ) / chunk_size , chunk_size ) )
Downloads Shakespeare converts it into ASCII codes and chunks it .