idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
7,600
def set_result ( self , msg , valid = True , overwrite = False ) : if self . has_result and not overwrite : log . warn ( LOG_CHECK , "Double result %r (previous %r) for %s" , msg , self . result , self ) else : self . has_result = True if not isinstance ( msg , unicode ) : log . warn ( LOG_CHECK , "Non-unicode result f...
Set result string and validity .
7,601
def get_title ( self ) : if self . title is None : url = u"" if self . base_url : url = self . base_url elif self . url : url = self . url self . title = url if "/" in url : title = url . rsplit ( "/" , 1 ) [ 1 ] if title : self . title = title return self . title
Return title of page the URL refers to . This is per default the filename or the URL .
7,602
def _is_ctype ( self , ctype ) : if not self . valid : return False mime = self . content_type return self . ContentMimetypes . get ( mime ) == ctype
Return True iff content is valid and of the given type .
7,603
def add_warning ( self , s , tag = None ) : item = ( tag , s ) if item not in self . warnings and tag not in self . aggregate . config [ "ignorewarnings" ] : self . warnings . append ( item )
Add a warning string .
7,604
def add_info ( self , s ) : if s not in self . info : self . info . append ( s )
Add an info string .
7,605
def set_cache_url ( self ) : self . cache_url = urlutil . urlunsplit ( self . urlparts [ : 4 ] + [ u'' ] ) if self . cache_url is not None : assert isinstance ( self . cache_url , unicode ) , repr ( self . cache_url )
Set the URL to be used for caching .
7,606
def check_url_warnings ( self ) : effectiveurl = urlutil . urlunsplit ( self . urlparts ) if self . url != effectiveurl : self . add_warning ( _ ( "Effective URL %(url)r." ) % { "url" : effectiveurl } , tag = WARN_URL_EFFECTIVE_URL ) self . url = effectiveurl if len ( self . url ) > URL_MAX_LENGTH and self . scheme != ...
Check URL name and length .
7,607
def build_url ( self ) : base_url , is_idn = url_norm ( self . base_url , self . encoding ) if self . base_ref : if ":" not in self . base_ref : self . base_ref = urljoin ( self . parent_url , self . base_ref ) self . url = urljoin ( self . base_ref , base_url ) elif self . parent_url : urlparts = list ( urlparse . url...
Construct self . url and self . urlparts out of the given base url information self . base_url self . parent_url and self . base_ref .
7,608
def build_url_parts ( self ) : self . userinfo , host = urllib . splituser ( self . urlparts [ 1 ] ) port = urlutil . default_ports . get ( self . scheme , 0 ) host , port = urlutil . splitport ( host , port = port ) if port is None : raise LinkCheckerError ( _ ( "URL host %(host)r has invalid port" ) % { "host" : host...
Set userinfo host port and anchor from self . urlparts . Also checks for obfuscated IP addresses .
7,609
def check_obfuscated_ip ( self ) : if iputil . is_obfuscated_ip ( self . host ) : ips = iputil . resolve_host ( self . host ) if ips : self . host = ips [ 0 ] self . add_warning ( _ ( "URL %(url)s has obfuscated IP address %(ip)s" ) % { "url" : self . base_url , "ip" : ips [ 0 ] } , tag = WARN_URL_OBFUSCATED_IP )
Warn if host of this URL is obfuscated IP address .
7,610
def check ( self ) : if self . aggregate . config [ "trace" ] : trace . trace_on ( ) try : self . local_check ( ) except ( socket . error , select . error ) : etype , value = sys . exc_info ( ) [ : 2 ] if etype == errno . EINTR : raise KeyboardInterrupt ( value ) else : raise
Main check function for checking this URL .
7,611
def local_check ( self ) : log . debug ( LOG_CHECK , "Checking %s" , unicode ( self ) ) assert not self . extern [ 1 ] , 'checking strict extern URL' log . debug ( LOG_CHECK , "checking connection" ) try : self . check_connection ( ) self . set_content_type ( ) self . add_size_info ( ) self . aggregate . plugin_manager...
Local check function can be overridden in subclasses .
7,612
def check_content ( self ) : if self . do_check_content and self . valid : try : if self . can_get_content ( ) : self . aggregate . plugin_manager . run_content_plugins ( self ) if self . allows_recursion ( ) : return True except tuple ( ExcList ) : value = self . handle_exception ( ) self . add_warning ( _ ( "could no...
Check content of URL .
7,613
def close_connection ( self ) : if self . url_connection is None : return try : self . url_connection . close ( ) except Exception : pass self . url_connection = None
Close an opened url connection .
7,614
def handle_exception ( self ) : etype , evalue = sys . exc_info ( ) [ : 2 ] log . debug ( LOG_CHECK , "Error in %s: %s %s" , self . url , etype , evalue , exception = True ) if ( etype in ExcNoCacheList ) or ( etype == socket . error and evalue . args [ 0 ] == errno . EBADF ) or not evalue : self . caching = False errm...
An exception occurred . Log it and set the cache flag .
7,615
def allows_simple_recursion ( self ) : rec_level = self . aggregate . config [ "recursionlevel" ] if rec_level >= 0 and self . recursion_level >= rec_level : log . debug ( LOG_CHECK , "... no, maximum recursion level reached." ) return False if self . extern [ 0 ] : log . debug ( LOG_CHECK , "... no, extern." ) return ...
Check recursion level and extern status .
7,616
def allows_recursion ( self ) : log . debug ( LOG_CHECK , "checking recursion of %r ..." , self . url ) if not self . valid : log . debug ( LOG_CHECK , "... no, invalid." ) return False if not self . can_get_content ( ) : log . debug ( LOG_CHECK , "... no, cannot get content." ) return False if not self . allows_simple...
Return True iff we can recurse into the url s content .
7,617
def read_content ( self ) : buf = StringIO ( ) data = self . read_content_chunk ( ) while data : if buf . tell ( ) + len ( data ) > self . aggregate . config [ "maxfilesizedownload" ] : raise LinkCheckerError ( _ ( "File size too large" ) ) buf . write ( data ) data = self . read_content_chunk ( ) return buf . getvalue...
Return data for this URL . Can be overridden in subclasses .
7,618
def add_url ( self , url , line = 0 , column = 0 , page = 0 , name = u"" , base = None ) : if base : base_ref = urlutil . url_norm ( base ) [ 0 ] else : base_ref = None url_data = get_url_from ( url , self . recursion_level + 1 , self . aggregate , parent_url = self . url , base_ref = base_ref , line = line , column = ...
Add new URL to queue .
7,619
def serialized ( self , sep = os . linesep ) : return unicode_safe ( sep ) . join ( [ u"%s link" % self . scheme , u"base_url=%r" % self . base_url , u"parent_url=%r" % self . parent_url , u"base_ref=%r" % self . base_ref , u"recursion_level=%d" % self . recursion_level , u"url_connection=%s" % self . url_connection , ...
Return serialized url check data as unicode string .
7,620
def add_intern_pattern ( self , url = None ) : try : pat = self . get_intern_pattern ( url = url ) if pat : log . debug ( LOG_CHECK , "Add intern pattern %r" , pat ) self . aggregate . config [ 'internlinks' ] . append ( get_link_pat ( pat ) ) except UnicodeError as msg : res = _ ( "URL has unparsable domain name: %(do...
Add intern URL regex to config .
7,621
def to_wire_dict ( self ) : return dict ( valid = self . valid , extern = self . extern [ 0 ] , result = self . result , warnings = self . warnings [ : ] , name = self . name or u"" , title = self . get_title ( ) , parent_url = self . parent_url or u"" , base_ref = self . base_ref or u"" , base_url = self . base_url or...
Return a simplified transport object for logging and caching .
7,622
def comment ( self , s , ** args ) : self . write ( u"<!-- " ) self . write ( s , ** args ) self . writeln ( u" )
Write XML comment .
7,623
def xml_starttag ( self , name , attrs = None ) : self . write ( self . indent * self . level ) self . write ( u"<%s" % xmlquote ( name ) ) if attrs : for name , value in attrs . items ( ) : args = ( xmlquote ( name ) , xmlquoteattr ( value ) ) self . write ( u' %s="%s"' % args ) self . writeln ( u">" ) self . level +=...
Write XML start tag .
7,624
def xml_tag ( self , name , content , attrs = None ) : self . write ( self . indent * self . level ) self . write ( u"<%s" % xmlquote ( name ) ) if attrs : for aname , avalue in attrs . items ( ) : args = ( xmlquote ( aname ) , xmlquoteattr ( avalue ) ) self . write ( u' %s="%s"' % args ) self . writeln ( u">%s</%s>" %...
Write XML tag with content .
7,625
def _escapify ( qstring ) : text = '' for c in qstring : if c in __escaped : text += '\\' + c elif ord ( c ) >= 0x20 and ord ( c ) < 0x7F : text += c else : text += '\\%03d' % ord ( c ) return text
Escape the characters in a quoted string which need it .
7,626
def _truncate_bitmap ( what ) : for i in xrange ( len ( what ) - 1 , - 1 , - 1 ) : if what [ i ] != '\x00' : break return '' . join ( what [ 0 : i + 1 ] )
Determine the index of greatest byte that isn t all zeros and return the bitmap that contains all the bytes less than that index .
7,627
def from_text ( rdclass , rdtype , tok , origin = None , relativize = True ) : if isinstance ( tok , str ) : tok = dns . tokenizer . Tokenizer ( tok ) cls = get_rdata_class ( rdclass , rdtype ) if cls != GenericRdata : token = tok . get ( ) tok . unget ( token ) if token . is_identifier ( ) and token . value == r'\#' :...
Build an rdata object from text format .
7,628
def from_wire ( rdclass , rdtype , wire , current , rdlen , origin = None ) : wire = dns . wiredata . maybe_wrap ( wire ) cls = get_rdata_class ( rdclass , rdtype ) return cls . from_wire ( rdclass , rdtype , wire , current , rdlen , origin )
Build an rdata object from wire format
7,629
def do_print ( self , url_data ) : if self . verbose : return True if self . warnings and url_data . warnings : return True return not url_data . valid
Determine if URL entry should be logged or not .
7,630
def log_url ( self , url_data ) : self . check_active_loggers ( ) do_print = self . do_print ( url_data ) for log in self . loggers : log . log_filter_url ( url_data , do_print )
Send new url to all configured loggers .
7,631
def quote_attrval ( s ) : res = [ ] for c in s : if ord ( c ) <= 127 : if c == u'&' : res . append ( u"&amp;" ) elif c == u'"' : res . append ( u"&quot;" ) else : res . append ( c ) else : res . append ( u"&#%d;" % ord ( c ) ) return u"" . join ( res )
Quote a HTML attribute to be able to wrap it in double quotes .
7,632
def comment ( self , data ) : data = data . encode ( self . encoding , "ignore" ) self . fd . write ( "<!--%s % data )
Print HTML comment .
7,633
def _start_element ( self , tag , attrs , end ) : tag = tag . encode ( self . encoding , "ignore" ) self . fd . write ( "<%s" % tag . replace ( "/" , "" ) ) for key , val in attrs . items ( ) : key = key . encode ( self . encoding , "ignore" ) if val is None : self . fd . write ( " %s" % key ) else : val = val . encode...
Print HTML element with end string .
7,634
def end_element ( self , tag ) : tag = tag . encode ( self . encoding , "ignore" ) self . fd . write ( "</%s>" % tag )
Print HTML end element .
7,635
def doctype ( self , data ) : data = data . encode ( self . encoding , "ignore" ) self . fd . write ( "<!DOCTYPE%s>" % data )
Print HTML document type .
7,636
def pi ( self , data ) : data = data . encode ( self . encoding , "ignore" ) self . fd . write ( "<?%s?>" % data )
Print HTML pi .
7,637
def cdata ( self , data ) : data = data . encode ( self . encoding , "ignore" ) self . fd . write ( "<![CDATA[%s]]>" % data )
Print HTML cdata .
7,638
def characters ( self , data ) : data = data . encode ( self . encoding , "ignore" ) self . fd . write ( data )
Print characters .
7,639
def canonical_clamav_conf ( ) : if os . name == 'posix' : clamavconf = "/etc/clamav/clamd.conf" elif os . name == 'nt' : clamavconf = r"c:\clamav-devel\etc\clamd.conf" else : clamavconf = "clamd.conf" return clamavconf
Default clamav configs for various platforms .
7,640
def get_clamav_conf ( filename ) : if os . path . isfile ( filename ) : return ClamavConfig ( filename ) log . warn ( LOG_PLUGIN , "No ClamAV config file found at %r." , filename )
Initialize clamav configuration .
7,641
def get_sockinfo ( host , port = None ) : family , socktype = socket . AF_INET , socket . SOCK_STREAM return socket . getaddrinfo ( host , port , family , socktype )
Return socket . getaddrinfo for given host and port .
7,642
def scan ( data , clamconf ) : try : scanner = ClamdScanner ( clamconf ) except socket . error : errmsg = _ ( "Could not connect to ClamAV daemon." ) return ( [ ] , [ errmsg ] ) try : scanner . scan ( data ) finally : scanner . close ( ) return scanner . infected , scanner . errors
Scan data for viruses .
7,643
def new_scansock ( self ) : port = None try : self . sock . sendall ( "STREAM" ) port = None for dummy in range ( 60 ) : data = self . sock . recv ( self . sock_rcvbuf ) i = data . find ( "PORT" ) if i != - 1 : port = int ( data [ i + 5 : ] ) break except socket . error : self . sock . close ( ) raise if port is None :...
Return a connected socket for sending scan data to it .
7,644
def close ( self ) : self . wsock . close ( ) data = self . sock . recv ( self . sock_rcvbuf ) while data : if "FOUND\n" in data : self . infected . append ( data ) if "ERROR\n" in data : self . errors . append ( data ) data = self . sock . recv ( self . sock_rcvbuf ) self . sock . close ( )
Get results and close clamd daemon sockets .
7,645
def parseconf ( self , filename ) : with open ( filename ) as fd : for line in fd : line = line . strip ( ) if not line or line . startswith ( "#" ) : continue split = line . split ( None , 1 ) if len ( split ) == 1 : self [ split [ 0 ] ] = True else : self [ split [ 0 ] ] = split [ 1 ]
Parse clamav configuration from given file .
7,646
def new_connection ( self ) : if self . get ( 'LocalSocket' ) : host = 'localhost' sock = self . create_local_socket ( ) elif self . get ( 'TCPSocket' ) : host = self . get ( 'TCPAddr' , 'localhost' ) sock = self . create_tcp_socket ( host ) else : raise ClamavError ( _ ( "one of TCPSocket or LocalSocket must be enable...
Connect to clamd for stream scanning .
7,647
def create_local_socket ( self ) : sock = create_socket ( socket . AF_UNIX , socket . SOCK_STREAM ) addr = self [ 'LocalSocket' ] try : sock . connect ( addr ) except socket . error : sock . close ( ) raise return sock
Create local socket connect to it and return socket object .
7,648
def create_tcp_socket ( self , host ) : port = int ( self [ 'TCPSocket' ] ) sockinfo = get_sockinfo ( host , port = port ) sock = create_socket ( socket . AF_INET , socket . SOCK_STREAM ) try : sock . connect ( sockinfo [ 0 ] [ 4 ] ) except socket . error : sock . close ( ) raise return sock
Create tcp socket connect to it and return socket object .
7,649
def zonalstats ( features , raster , all_touched , band , categorical , indent , info , nodata , prefix , stats , sequence , use_rs ) : if info : logging . basicConfig ( level = logging . INFO ) if stats is not None : stats = stats . split ( " " ) if 'all' in [ x . lower ( ) for x in stats ] : stats = "ALL" zonal_resul...
zonalstats generates summary statistics of geospatial raster datasets based on vector features .
7,650
def pointquery ( features , raster , band , indent , nodata , interpolate , property_name , sequence , use_rs ) : results = gen_point_query ( features , raster , band = band , nodata = nodata , interpolate = interpolate , property_name = property_name , geojson_out = True ) if sequence : for feature in results : if use...
Queries the raster values at the points of the input GeoJSON Features . The raster values are added to the features properties and output as GeoJSON Feature Collection .
7,651
def point_window_unitxy ( x , y , affine ) : fcol , frow = ~ affine * ( x , y ) r , c = int ( round ( frow ) ) , int ( round ( fcol ) ) new_win = ( ( r - 1 , r + 1 ) , ( c - 1 , c + 1 ) ) unitxy = ( 0.5 - ( c - fcol ) , 0.5 + ( r - frow ) ) return new_win , unitxy
Given an x y and a geotransform Returns - rasterio window representing 2x2 window whose center points encompass point - the cartesian x y coordinates of the point on the unit square defined by the array center points .
7,652
def geom_xys ( geom ) : if geom . has_z : geom = wkt . loads ( geom . to_wkt ( ) ) assert not geom . has_z if hasattr ( geom , "geoms" ) : geoms = geom . geoms else : geoms = [ geom ] for g in geoms : arr = g . array_interface_base [ 'data' ] for pair in zip ( arr [ : : 2 ] , arr [ 1 : : 2 ] ) : yield pair
Given a shapely geometry generate a flattened series of 2D points as x y tuples
7,653
def gen_point_query ( vectors , raster , band = 1 , layer = 0 , nodata = None , affine = None , interpolate = 'bilinear' , property_name = 'value' , geojson_out = False ) : if interpolate not in [ 'nearest' , 'bilinear' ] : raise ValueError ( "interpolate must be nearest or bilinear" ) features_iter = read_features ( v...
Given a set of vector features and a raster generate raster values at each vertex of the geometry
7,654
def key_assoc_val ( d , func , exclude = None ) : vs = list ( d . values ( ) ) ks = list ( d . keys ( ) ) key = ks [ vs . index ( func ( vs ) ) ] return key
return the key associated with the value returned by func
7,655
def boxify_points ( geom , rast ) : if 'Point' not in geom . type : raise ValueError ( "Points or multipoints only" ) buff = - 0.01 * abs ( min ( rast . affine . a , rast . affine . e ) ) if geom . type == 'Point' : pts = [ geom ] elif geom . type == "MultiPoint" : pts = geom . geoms geoms = [ ] for pt in pts : row , c...
Point and MultiPoint don t play well with GDALRasterize convert them into box polygons 99% cellsize centered on the raster cell
7,656
def parse_feature ( obj ) : if hasattr ( obj , '__geo_interface__' ) : gi = obj . __geo_interface__ if gi [ 'type' ] in geom_types : return wrap_geom ( gi ) elif gi [ 'type' ] == 'Feature' : return gi try : shape = wkt . loads ( obj ) return wrap_geom ( shape . __geo_interface__ ) except ( ReadingError , TypeError , At...
Given a python object attemp to a GeoJSON - like Feature from it
7,657
def bounds_window ( bounds , affine ) : w , s , e , n = bounds row_start , col_start = rowcol ( w , n , affine ) row_stop , col_stop = rowcol ( e , s , affine , op = math . ceil ) return ( row_start , row_stop ) , ( col_start , col_stop )
Create a full cover rasterio - style window
7,658
def read ( self , bounds = None , window = None , masked = False ) : if bounds and window : raise ValueError ( "Specify either bounds or window" ) if bounds : win = bounds_window ( bounds , self . affine ) elif window : win = window else : raise ValueError ( "Specify either bounds or window" ) c , _ , _ , f = window_bo...
Performs a boundless read against the underlying array source
7,659
def chunks ( data , n ) : for i in range ( 0 , len ( data ) , n ) : yield data [ i : i + n ]
Yield successive n - sized chunks from a slice - able iterable .
7,660
def halt ( self ) : buf = [ ] buf . append ( self . act_end ) buf . append ( 0 ) crc = self . calculate_crc ( buf ) self . clear_bitmask ( 0x08 , 0x80 ) self . card_write ( self . mode_transrec , buf ) self . clear_bitmask ( 0x08 , 0x08 ) self . authed = False
Switch state to HALT
7,661
def write ( self , block_address , data ) : buf = [ ] buf . append ( self . act_write ) buf . append ( block_address ) crc = self . calculate_crc ( buf ) buf . append ( crc [ 0 ] ) buf . append ( crc [ 1 ] ) ( error , back_data , back_length ) = self . card_write ( self . mode_transrec , buf ) if not ( back_length == 4...
Writes data to block . You should be authenticated before calling write . Returns error state .
7,662
def auth ( self , auth_method , key ) : self . method = auth_method self . key = key if self . debug : print ( "Changing used auth key to " + str ( key ) + " using method " + ( "A" if auth_method == self . rfid . auth_a else "B" ) )
Sets authentication info for current tag
7,663
def write_trailer ( self , sector , key_a = ( 0xFF , 0xFF , 0xFF , 0xFF , 0xFF , 0xFF ) , auth_bits = ( 0xFF , 0x07 , 0x80 ) , user_data = 0x69 , key_b = ( 0xFF , 0xFF , 0xFF , 0xFF , 0xFF , 0xFF ) ) : addr = self . block_addr ( sector , 3 ) return self . rewrite ( addr , key_a [ : 6 ] + auth_bits [ : 3 ] + ( user_data...
Writes sector trailer of specified sector . Tag and auth must be set - does auth . If value is None value of byte is kept . Returns error state .
7,664
def rewrite ( self , block_address , new_bytes ) : if not self . is_tag_set_auth ( ) : return True error = self . do_auth ( block_address ) if not error : ( error , data ) = self . rfid . read ( block_address ) if not error : for i in range ( len ( new_bytes ) ) : if new_bytes [ i ] != None : if self . debug : print ( ...
Rewrites block with new bytes keeping the old ones if None is passed . Tag and auth must be set - does auth . Returns error state .
7,665
def get_access_bits ( self , c1 , c2 , c3 ) : byte_6 = ( ( ~ c2 [ 3 ] & 1 ) << 7 ) + ( ( ~ c2 [ 2 ] & 1 ) << 6 ) + ( ( ~ c2 [ 1 ] & 1 ) << 5 ) + ( ( ~ c2 [ 0 ] & 1 ) << 4 ) + ( ( ~ c1 [ 3 ] & 1 ) << 3 ) + ( ( ~ c1 [ 2 ] & 1 ) << 2 ) + ( ( ~ c1 [ 1 ] & 1 ) << 1 ) + ( ~ c1 [ 0 ] & 1 ) byte_7 = ( ( c1 [ 3 ] & 1 ) << 7 ) +...
Calculates the access bits for a sector trailer based on their access conditions c1 c2 c3 c4 are 4 items tuples containing the values for each block returns the 3 bytes for the sector trailer
7,666
def get_data ( n_samples = 100 ) : X , y = make_classification ( n_samples = n_samples , n_features = N_FEATURES , n_classes = N_CLASSES , random_state = 0 , ) X = X . astype ( np . float32 ) return X , y
Get synthetic classification data with n_samples samples .
7,667
def get_model ( with_pipeline = False ) : model = NeuralNetClassifier ( MLPClassifier ) if with_pipeline : model = Pipeline ( [ ( 'scale' , FeatureUnion ( [ ( 'minmax' , MinMaxScaler ( ) ) , ( 'normalize' , Normalizer ( ) ) , ] ) ) , ( 'select' , SelectKBest ( k = N_FEATURES ) ) , ( 'net' , model ) , ] ) return model
Get a multi - layer perceptron model .
7,668
def save_model ( model , output_file ) : if not output_file : return with open ( output_file , 'wb' ) as f : pickle . dump ( model , f ) print ( "Saved model to file '{}'." . format ( output_file ) )
Save model to output_file if given
7,669
def net ( n_samples = 100 , output_file = None , ** kwargs ) : model = get_model ( with_pipeline = False ) parsed = parse_args ( kwargs , defaults = DEFAULTS_NET ) model = parsed ( model ) X , y = get_data ( n_samples = n_samples ) print ( "Training MLP classifier" ) model . fit ( X , y ) save_model ( model , output_fi...
Train an MLP classifier on synthetic data .
7,670
def pipeline ( n_samples = 100 , output_file = None , ** kwargs ) : model = get_model ( with_pipeline = True ) parsed = parse_args ( kwargs , defaults = DEFAULTS_PIPE ) model = parsed ( model ) X , y = get_data ( n_samples = n_samples ) print ( "Training MLP classifier in a pipeline" ) model . fit ( X , y ) save_model ...
Train an MLP classifier in a pipeline on synthetic data .
7,671
def predict ( self , X ) : y_preds = [ ] for yp in self . forward_iter ( X , training = False ) : yp = yp [ 0 ] if isinstance ( yp , tuple ) else yp y_preds . append ( to_numpy ( yp . max ( - 1 ) [ - 1 ] ) ) y_pred = np . concatenate ( y_preds , 0 ) return y_pred
Where applicable return class labels for samples in X .
7,672
def fit ( self , X , y , ** fit_params ) : return super ( ) . fit ( X , y , ** fit_params )
See NeuralNet . fit .
7,673
def predict_proba ( self , X ) : y_probas = [ ] bce_logits_loss = isinstance ( self . criterion_ , torch . nn . BCEWithLogitsLoss ) for yp in self . forward_iter ( X , training = False ) : yp = yp [ 0 ] if isinstance ( yp , tuple ) else yp if bce_logits_loss : yp = torch . sigmoid ( yp ) y_probas . append ( to_numpy ( ...
Where applicable return probability estimates for samples .
7,674
def _apply_to_data ( data , func , unpack_dict = False ) : apply_ = partial ( _apply_to_data , func = func , unpack_dict = unpack_dict ) if isinstance ( data , dict ) : if unpack_dict : return [ apply_ ( v ) for v in data . values ( ) ] return { k : apply_ ( v ) for k , v in data . items ( ) } if isinstance ( data , ( ...
Apply a function to data trying to unpack different data types .
7,675
def uses_placeholder_y ( ds ) : if isinstance ( ds , torch . utils . data . Subset ) : return uses_placeholder_y ( ds . dataset ) return isinstance ( ds , Dataset ) and hasattr ( ds , "y" ) and ds . y is None
If ds is a skorch . dataset . Dataset or a skorch . dataset . Dataset nested inside a torch . utils . data . Subset and uses y as a placeholder return True .
7,676
def unpack_data ( data ) : try : X , y = data return X , y except ValueError : if not isinstance ( data , ( tuple , list ) ) or len ( data ) < 2 : raise ValueError ( ERROR_MSG_1_ITEM ) raise ValueError ( ERROR_MSG_MORE_THAN_2_ITEMS . format ( len ( data ) ) )
Unpack data returned by the net s iterator into a 2 - tuple .
7,677
def transform ( self , X , y ) : y = torch . Tensor ( [ 0 ] ) if y is None else y if sparse . issparse ( X ) : X = X . toarray ( ) . squeeze ( 0 ) return X , y
Additional transformations on X and y .
7,678
def check_cv ( self , y ) : y_arr = None if self . stratified : try : y_arr = to_numpy ( y ) except ( AttributeError , TypeError ) : y_arr = y if self . _is_float ( self . cv ) : return self . _check_cv_float ( ) return self . _check_cv_non_float ( y_arr )
Resolve which cross validation strategy is used .
7,679
def _get_span ( s , pattern ) : i , j = - 1 , - 1 match = pattern . match ( s ) if not match : return i , j for group_name in pattern . groupindex : i , j = match . span ( group_name ) if ( i , j ) != ( - 1 , - 1 ) : return i , j return i , j
Return the span of the first group that matches the pattern .
7,680
def _substitute_default ( s , new_value ) : if new_value is None : return s i , j = _get_span ( s , pattern = P_DEFAULTS ) if ( i , j ) == ( - 1 , - 1 ) : return s return '{}{}{}' . format ( s [ : i ] , new_value , s [ j : ] )
Replaces the default value in a parameter docstring by a new value .
7,681
def _resolve_dotted_name ( dotted_name ) : if not isinstance ( dotted_name , str ) : return dotted_name if '.' not in dotted_name : return dotted_name args = None params = None match = P_PARAMS . match ( dotted_name ) if match : dotted_name = match . group ( 'name' ) params = match . group ( 'params' ) module , name = ...
Returns objects from strings
7,682
def parse_net_kwargs ( kwargs ) : if not kwargs : return kwargs resolved = { } for k , v in kwargs . items ( ) : resolved [ k ] = _resolve_dotted_name ( v ) return resolved
Parse arguments for the estimator .
7,683
def _yield_estimators ( model ) : yield from _yield_preproc_steps ( model ) net_prefixes = [ ] module_prefixes = [ ] if isinstance ( model , Pipeline ) : name = model . steps [ - 1 ] [ 0 ] net_prefixes . append ( name ) module_prefixes . append ( name ) net = model . steps [ - 1 ] [ 1 ] else : net = model yield '__' . ...
Yield estimator and its prefix from the model .
7,684
def _get_help_for_estimator ( prefix , estimator , defaults = None ) : from numpydoc . docscrape import ClassDoc defaults = defaults or { } estimator = _extract_estimator_cls ( estimator ) yield "<{}> options:" . format ( estimator . __name__ ) doc = ClassDoc ( estimator ) yield from _get_help_for_params ( doc [ 'Param...
Yield help lines for the given estimator and prefix .
7,685
def print_help ( model , defaults = None ) : defaults = defaults or { } print ( "This is the help for the model-specific parameters." ) print ( "To invoke help for the remaining options, run:" ) print ( "python {} -- --help" . format ( sys . argv [ 0 ] ) ) print ( ) lines = ( _get_help_for_estimator ( prefix , estimato...
Print help for the command line arguments of the given model .
7,686
def parse_args ( kwargs , defaults = None ) : try : import fire except ImportError : raise ImportError ( "Using skorch cli helpers requires the fire library," " you can install it with pip: pip install fire." ) try : import numpydoc . docscrape except ImportError : raise ImportError ( "Using skorch cli helpers requires...
Apply command line arguments or show help .
7,687
def filter_requires_grad ( pgroups ) : warnings . warn ( "For filtering gradients, please use skorch.callbacks.Freezer." , DeprecationWarning ) for pgroup in pgroups : output = { k : v for k , v in pgroup . items ( ) if k != 'params' } output [ 'params' ] = ( p for p in pgroup [ 'params' ] if p . requires_grad ) yield ...
Returns parameter groups where parameters that don t require a gradient are filtered out .
7,688
def convert_cell_to_img ( t , padding = 16 ) : std = torch . Tensor ( [ 0.229 , 0.224 , 0.225 ] ) . reshape ( - 1 , 1 , 1 ) mu = torch . Tensor ( [ 0.485 , 0.456 , 0.406 ] ) . reshape ( - 1 , 1 , 1 ) output = t . mul ( std ) output . add_ ( mu ) img = to_pil_image ( output ) w , h = img . size return img . crop ( ( pad...
Converts pytorch tensor into a Pillow Image . The padding will be removed from the resulting image
7,689
def plot_mask_cells ( mask_cells , padding = 16 ) : fig , axes = plt . subplots ( len ( mask_cells ) , 3 , figsize = ( 12 , 10 ) ) for idx , ( axes , mask_cell ) in enumerate ( zip ( axes , mask_cells ) , 1 ) : ax1 , ax2 , ax3 = axes true_mask , predicted_mask , cell = mask_cell plot_mask_cell ( true_mask , predicted_m...
Plots cells with their true mask predicted mask .
7,690
def plot_mask_cell ( true_mask , predicted_mask , cell , suffix , ax1 , ax2 , ax3 , padding = 16 ) : for ax in [ ax1 , ax2 , ax3 ] : ax . grid ( False ) ax . set_xticks ( [ ] ) ax . set_yticks ( [ ] ) ax1 . imshow ( true_mask [ padding : - padding , padding : - padding ] , cmap = 'viridis' ) ax1 . set_title ( 'True Mas...
Plots a single cell with a its true mask and predicuted mask
7,691
def plot_masks ( mask_1 , mask_2 , mask_3 ) : fig , ( ( ax1 , ax2 , ax3 ) ) = plt . subplots ( 1 , 3 , figsize = ( 12 , 5 ) ) for ax in [ ax1 , ax2 , ax3 ] : ax . grid ( False ) ax . set_xticks ( [ ] ) ax . set_yticks ( [ ] ) ax1 . set_title ( "Type 1" ) ax1 . imshow ( mask_1 , cmap = 'viridis' ) ax2 . set_title ( "Typ...
Plots three masks
7,692
def plot_cells ( cell_1 , cell_2 , cell_3 ) : fig , ( ( ax1 , ax2 , ax3 ) ) = plt . subplots ( 1 , 3 , figsize = ( 12 , 5 ) ) for ax in [ ax1 , ax2 , ax3 ] : ax . grid ( False ) ax . set_xticks ( [ ] ) ax . set_yticks ( [ ] ) ax1 . set_title ( "Type 1" ) ax1 . imshow ( cell_1 ) ax2 . set_title ( "Type 2" ) ax2 . imshow...
Plots three cells
7,693
def _sorted_keys ( self , keys ) : sorted_keys = [ ] if ( 'epoch' in keys ) and ( 'epoch' not in self . keys_ignored_ ) : sorted_keys . append ( 'epoch' ) for key in sorted ( keys ) : if not ( ( key in ( 'epoch' , 'dur' ) ) or ( key in self . keys_ignored_ ) or key . endswith ( '_best' ) or key . startswith ( 'event_' ...
Sort keys dropping the ones that should be ignored .
7,694
def to_tensor ( X , device , accept_sparse = False ) : to_tensor_ = partial ( to_tensor , device = device ) if is_torch_data_type ( X ) : return X . to ( device ) if isinstance ( X , dict ) : return { key : to_tensor_ ( val ) for key , val in X . items ( ) } if isinstance ( X , ( list , tuple ) ) : return [ to_tensor_ ...
Turn input data to torch tensor .
7,695
def to_numpy ( X ) : if isinstance ( X , np . ndarray ) : return X if is_pandas_ndframe ( X ) : return X . values if not is_torch_data_type ( X ) : raise TypeError ( "Cannot convert this data type to a numpy array." ) if X . is_cuda : X = X . cpu ( ) if X . requires_grad : X = X . detach ( ) return X . numpy ( )
Generic function to convert a pytorch tensor to numpy .
7,696
def _normalize_numpy_indices ( i ) : if isinstance ( i , np . ndarray ) : if i . dtype == bool : i = tuple ( j . tolist ( ) for j in i . nonzero ( ) ) elif i . dtype == int : i = i . tolist ( ) return i
Normalize the index in case it is a numpy integer or boolean array .
7,697
def multi_indexing ( data , i , indexing = None ) : i = _normalize_numpy_indices ( i ) if indexing is not None : return indexing ( data , i ) return check_indexing ( data ) ( data , i )
Perform indexing on multiple data structures .
7,698
def duplicate_items ( * collections ) : duplicates = set ( ) seen = set ( ) for item in flatten ( collections ) : if item in seen : duplicates . add ( item ) else : seen . add ( item ) return duplicates
Search for duplicate items in all collections .
7,699
def params_for ( prefix , kwargs ) : if not prefix . endswith ( '__' ) : prefix += '__' return { key [ len ( prefix ) : ] : val for key , val in kwargs . items ( ) if key . startswith ( prefix ) }
Extract parameters that belong to a given sklearn module prefix from kwargs . This is useful to obtain parameters that belong to a submodule .