idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
48,600 | def write_record ( self , warc_record ) : warc_record . write_to ( self . fileobj ) if isinstance ( self . fileobj , gzip2 . GzipFile ) : self . fileobj . close_member ( ) | Adds a warc record to this WARC file . |
48,601 | def tell ( self ) : if isinstance ( self . fileobj , gzip2 . GzipFile ) : return self . fileobj . fileobj . tell ( ) else : return self . fileobj . tell ( ) | Returns the file offset . If this is a compressed file then the offset in the compressed file is returned . |
48,602 | def close_member ( self ) : if self . _new_member : return self . fileobj . write ( self . compress . flush ( ) ) write32u ( self . fileobj , self . crc ) write32u ( self . fileobj , self . size & 0xffffffffL ) self . size = 0 self . compress = zlib . compressobj ( 9 , zlib . DEFLATED , - zlib . MAX_WBITS , zlib . DEF_MEM_LEVEL , 0 ) self . _new_member = True | Closes the current member being written . |
48,603 | def _start_member ( self ) : if self . _new_member : self . _init_write ( self . name ) self . _write_gzip_header ( ) self . _new_member = False | Starts writing a new member if required . |
48,604 | def close ( self ) : if self . fileobj is None : return if self . mode == WRITE : self . close_member ( ) self . fileobj = None elif self . mode == READ : self . fileobj = None if self . myfileobj : self . myfileobj . close ( ) self . myfileobj = None | Closes the gzip with care to handle multiple members . |
48,605 | def read_member ( self ) : if self . _member_lock is False : self . _member_lock = True if self . _new_member : try : BaseGzipFile . _read ( self , 1 ) assert self . _new_member is False except EOFError : return None return self | Returns a file - like object to read one member from the gzip file . |
48,606 | def write_member ( self , data ) : if isinstance ( data , basestring ) : self . write ( data ) else : for text in data : self . write ( text ) self . close_member ( ) | Writes the given data as one gzip member . The data can be a string an iterator that gives strings or a file - like object . |
48,607 | def write_to ( self , f , version = None ) : if not version : version = self . version if version == 1 : header = "%(url)s %(ip_address)s %(date)s %(content_type)s %(length)s" elif version == 2 : header = "%(url)s %(ip_address)s %(date)s %(content_type)s %(result_code)s %(checksum)s %(location)s %(offset)s %(filename)s %(length)s" header = header % dict ( url = self [ 'url' ] , ip_address = self [ 'ip_address' ] , date = self [ 'date' ] , content_type = self [ 'content_type' ] , result_code = self [ 'result_code' ] , checksum = self [ 'checksum' ] , location = self [ 'location' ] , offset = self [ 'offset' ] , filename = self [ 'filename' ] , length = self [ 'length' ] ) f . write ( header ) | Writes out the arc header to the file like object f . |
48,608 | def from_string ( cls , string , version ) : header , payload = string . split ( "\n" , 1 ) if payload [ 0 ] == '\n' : payload = payload [ 1 : ] if int ( version ) == 1 : arc_header_re = ARC1_HEADER_RE elif int ( version ) == 2 : arc_header_re = ARC2_HEADER_RE matches = arc_header_re . search ( header ) headers = matches . groupdict ( ) arc_header = ARCHeader ( ** headers ) return cls ( header = arc_header , payload = payload , version = version ) | Constructs an ARC record from a string and returns it . |
48,609 | def _write_header ( self ) : "Writes out an ARC header" if "org" not in self . file_headers : warnings . warn ( "Using 'unknown' for Archiving organisation name" ) self . file_headers [ 'org' ] = "Unknown" if "date" not in self . file_headers : now = datetime . datetime . utcnow ( ) warnings . warn ( "Using '%s' for Archiving time" % now ) self . file_headers [ 'date' ] = now if "ip_address" not in self . file_headers : warnings . warn ( "Using '127.0.0.1' as IP address of machine that's archiving" ) self . file_headers [ 'ip_address' ] = "127.0.0.1" if self . version == 1 : payload = "1 0 %(org)s\nURL IP-address Archive-date Content-type Archive-length" % dict ( org = self . file_headers [ 'org' ] ) elif self . version == 2 : payload = "2 0 %(org)s\nURL IP-address Archive-date Content-type Result-code Checksum Location Offset Filename Archive-length" else : raise IOError ( "Can't write an ARC file with version '\"%s\"'" % self . version ) fname = os . path . basename ( self . fileobj . name ) header = ARCHeader ( url = "filedesc://%s" % fname , ip_address = self . file_headers [ 'ip_address' ] , date = self . file_headers [ 'date' ] , content_type = "text/plain" , length = len ( payload ) , result_code = "200" , checksum = "-" , location = "-" , offset = str ( self . fileobj . tell ( ) ) , filename = fname ) arc_file_header_record = ARCRecord ( header , payload % self . file_headers ) self . write ( arc_file_header_record ) | Writes out an ARC header |
48,610 | def write ( self , arc_record ) : "Writes out the given arc record to the file" if not self . version : self . version = 2 if not self . header_written : self . header_written = True self . _write_header ( ) arc_record . write_to ( self . fileobj , self . version ) self . fileobj . write ( "\n" ) | Writes out the given arc record to the file |
48,611 | def _read_file_header ( self ) : header = self . fileobj . readline ( ) payload1 = self . fileobj . readline ( ) payload2 = self . fileobj . readline ( ) version , reserved , organisation = payload1 . split ( None , 2 ) self . fileobj . readline ( ) self . header_read = True if self . version and int ( self . version ) != version : raise IOError ( "Version mismatch. Requested version was '%s' but version in file was '%s'" % ( self . version , version ) ) if version == '1' : url , ip_address , date , content_type , length = header . split ( ) self . file_headers = { "ip_address" : ip_address , "date" : datetime . datetime . strptime ( date , "%Y%m%d%H%M%S" ) , "org" : organisation } self . version = 1 elif version == '2' : url , ip_address , date , content_type , result_code , checksum , location , offset , filename , length = header . split ( ) self . file_headers = { "ip_address" : ip_address , "date" : datetime . datetime . strptime ( date , "%Y%m%d%H%M%S" ) , "org" : organisation } self . version = 2 else : raise IOError ( "Unknown ARC version '%s'" % version ) | Reads out the file header for the arc file . If version was not provided this will autopopulate it . |
48,612 | def _read_arc_record ( self ) : "Reads out an arc record, formats it and returns it" header = self . fileobj . readline ( ) while header and header . strip ( ) == "" : header = self . fileobj . readline ( ) if header == "" : return None if int ( self . version ) == 1 : arc_header_re = ARC1_HEADER_RE elif int ( self . version ) == 2 : arc_header_re = ARC2_HEADER_RE matches = arc_header_re . search ( header ) headers = matches . groupdict ( ) arc_header = ARCHeader ( ** headers ) payload = self . fileobj . read ( int ( headers [ 'length' ] ) ) self . fileobj . readline ( ) return ARCRecord ( header = arc_header , payload = payload ) | Reads out an arc record formats it and returns it |
48,613 | def from_bank_code ( cls , country_code , bank_code ) : try : return cls ( registry . get ( 'bank_code' ) [ ( country_code , bank_code ) ] [ 'bic' ] ) except KeyError : raise ValueError ( "Invalid bank code {!r} for country {!r}" . format ( bank_code , country_code ) ) | Create a new BIC object from country - and bank - code . |
48,614 | def generate ( cls , country_code , bank_code , account_code ) : spec = _get_iban_spec ( country_code ) bank_code_length = code_length ( spec , 'bank_code' ) branch_code_length = code_length ( spec , 'branch_code' ) bank_and_branch_code_length = bank_code_length + branch_code_length account_code_length = code_length ( spec , 'account_code' ) if len ( bank_code ) > bank_and_branch_code_length : raise ValueError ( "Bank code exceeds maximum size {}" . format ( bank_and_branch_code_length ) ) if len ( account_code ) > account_code_length : raise ValueError ( "Account code exceeds maximum size {}" . format ( account_code_length ) ) bank_code = bank_code . rjust ( bank_and_branch_code_length , '0' ) account_code = account_code . rjust ( account_code_length , '0' ) iban = country_code + '??' + bank_code + account_code return cls ( iban ) | Generate an IBAN from it s components . |
48,615 | def tdms2rtdc ( ) : parser = tdms2rtdc_parser ( ) args = parser . parse_args ( ) path_tdms = pathlib . Path ( args . tdms_path ) . resolve ( ) path_rtdc = pathlib . Path ( args . rtdc_path ) if path_tdms . is_dir ( ) : files_tdms = fmt_tdms . get_tdms_files ( path_tdms ) if path_rtdc . is_file ( ) : raise ValueError ( "rtdc_path is a file: {}" . format ( path_rtdc ) ) files_rtdc = [ ] for ff in files_tdms : ff = pathlib . Path ( ff ) rp = ff . relative_to ( path_tdms ) rpr = path_rtdc / rp . with_suffix ( ".rtdc" ) files_rtdc . append ( rpr ) else : files_tdms = [ path_tdms ] files_rtdc = [ path_rtdc ] for ii in range ( len ( files_tdms ) ) : ff = pathlib . Path ( files_tdms [ ii ] ) fr = pathlib . Path ( files_rtdc [ ii ] ) print_info ( "Converting {:d}/{:d}: {}" . format ( ii + 1 , len ( files_tdms ) , ff ) ) ds = load . load_file ( ff ) if not fr . parent . exists ( ) : fr . parent . mkdir ( parents = True ) features = [ ] if args . compute_features : tocomp = dfn . feature_names else : tocomp = ds . _events for feat in tocomp : if feat not in dfn . scalar_feature_names : if not ds [ feat ] : continue elif feat not in ds : continue features . append ( feat ) ds . export . hdf5 ( path = fr , features = features , filtered = False , override = True ) | Convert . tdms datasets to the hdf5 - based . rtdc file format |
48,616 | def verify_dataset ( ) : parser = verify_dataset_parser ( ) args = parser . parse_args ( ) path_in = pathlib . Path ( args . path ) . resolve ( ) viol , aler , info = load . check_dataset ( path_in ) print_info ( "Checking {}" . format ( path_in ) ) for inf in info : print_info ( inf ) for ale in aler : print_alert ( ale ) for vio in viol : print_violation ( vio ) print_info ( "Check Complete: {} violations and {} alerts" . format ( len ( viol ) , len ( aler ) ) ) | Perform checks on experimental datasets |
48,617 | def load_from_file ( cfg_file ) : path = pathlib . Path ( cfg_file ) . resolve ( ) with path . open ( 'r' ) as f : code = f . readlines ( ) cfg = CaseInsensitiveDict ( ) for line in code : line = line . split ( "#" ) [ 0 ] . strip ( ) if len ( line ) != 0 : if line . startswith ( "[" ) and line . endswith ( "]" ) : section = line [ 1 : - 1 ] . lower ( ) if section not in cfg : cfg [ section ] = CaseInsensitiveDict ( ) continue var , val = line . split ( "=" , 1 ) var = var . strip ( ) . lower ( ) val = val . strip ( "' " ) . strip ( '" ' ) . strip ( ) if ( section in dfn . config_funcs and var in dfn . config_funcs [ section ] ) : val = dfn . config_funcs [ section ] [ var ] ( val ) else : var , val = keyval_str2typ ( var , val ) if len ( var ) != 0 and len ( str ( val ) ) != 0 : cfg [ section ] [ var ] = val return cfg | Load the configuration from a file |
48,618 | def keyval_str2typ ( var , val ) : if not ( isinstance ( val , str_types ) ) : return var . strip ( ) , val var = var . strip ( ) . lower ( ) val = val . strip ( ) if len ( var ) != 0 and len ( val ) != 0 : if val . startswith ( "[" ) and val . endswith ( "]" ) : if len ( val . strip ( "[]," ) ) == 0 : values = [ ] else : values = val . strip ( "[]," ) . split ( "," ) values = [ float ( v ) for v in values ] return var , values elif val . lower ( ) in [ "true" , "y" ] : return var , True elif val . lower ( ) in [ "false" , "n" ] : return var , False elif val [ 0 ] in [ "'" , '"' ] and val [ - 1 ] in [ "'" , '"' ] : return var , val . strip ( "'" ) . strip ( '"' ) . strip ( ) elif val in dfn . scalar_feature_names : return var , val else : try : return var , float ( val . replace ( "," , "." ) ) except ValueError : return var , val | Convert a variable from a string to its correct type |
48,619 | def keyval_typ2str ( var , val ) : varout = var . strip ( ) if isinstance ( val , list ) : data = ", " . join ( [ keyval_typ2str ( var , it ) [ 1 ] for it in val ] ) valout = "[" + data + "]" elif isinstance ( val , float ) : valout = "{:.12f}" . format ( val ) else : valout = "{}" . format ( val ) return varout , valout | Convert a variable to a string |
48,620 | def _init_default_values ( self ) : self [ "filtering" ] [ "remove invalid events" ] = False self [ "filtering" ] [ "enable filters" ] = True self [ "filtering" ] [ "limit events" ] = 0 self [ "filtering" ] [ "polygon filters" ] = [ ] self [ "filtering" ] [ "hierarchy parent" ] = "none" for item in dfn . scalar_feature_names : appends = [ " min" , " max" ] for a in appends : self [ "filtering" ] [ item + a ] = 0 | Set default initial values |
48,621 | def save ( self , filename ) : filename = pathlib . Path ( filename ) out = [ ] keys = sorted ( list ( self . keys ( ) ) ) for key in keys : out . append ( "[{}]" . format ( key ) ) section = self [ key ] ikeys = list ( section . keys ( ) ) ikeys . sort ( ) for ikey in ikeys : var , val = keyval_typ2str ( ikey , section [ ikey ] ) out . append ( "{} = {}" . format ( var , val ) ) out . append ( "" ) with filename . open ( "w" ) as f : for i in range ( len ( out ) ) : out [ i ] = out [ i ] + "\n" f . writelines ( out ) | Save the configuration to a file |
48,622 | def update ( self , newcfg ) : for key in newcfg . keys ( ) : if key not in self . _cfg : self . _cfg [ key ] = CaseInsensitiveDict ( ) for skey in newcfg [ key ] : self . _cfg [ key ] [ skey ] = newcfg [ key ] [ skey ] | Update current config with a dictionary |
48,623 | def convert ( area_um , deform , emodulus , channel_width_in , channel_width_out , flow_rate_in , flow_rate_out , viscosity_in , viscosity_out , inplace = False ) : copy = not inplace area_um_corr = np . array ( area_um , dtype = float , copy = copy ) deform_corr = np . array ( deform , copy = copy ) emodulus_corr = np . array ( emodulus , copy = copy ) if channel_width_in != channel_width_out : area_um_corr *= ( channel_width_out / channel_width_in ) ** 2 if ( flow_rate_in != flow_rate_out or viscosity_in != viscosity_out or channel_width_in != channel_width_out ) : emodulus_corr *= ( flow_rate_out / flow_rate_in ) * ( viscosity_out / viscosity_in ) * ( channel_width_in / channel_width_out ) ** 3 return area_um_corr , deform_corr , emodulus_corr | convert area - deformation - emodulus triplet |
48,624 | def corrpix_deform_delta ( area_um , px_um = 0.34 ) : pxcorr = ( .34 / px_um ) ** 2 offs = 0.0012 exp1 = 0.020 * np . exp ( - area_um * pxcorr / 7.1 ) exp2 = 0.010 * np . exp ( - area_um * pxcorr / 38.6 ) exp3 = 0.005 * np . exp ( - area_um * pxcorr / 296 ) delta = offs + exp1 + exp2 + exp3 return delta | Deformation correction term for pixelation effects |
48,625 | def get_emodulus ( area_um , deform , medium = "CellCarrier" , channel_width = 20.0 , flow_rate = 0.16 , px_um = 0.34 , temperature = 23.0 , copy = True ) : deform = np . array ( deform , copy = copy , dtype = float ) area_um = np . array ( area_um , copy = copy , dtype = float ) lut_path = resource_filename ( "dclab.features" , "emodulus_lut.txt" ) with pathlib . Path ( lut_path ) . open ( "rb" ) as lufd : lut = np . loadtxt ( lufd ) lut_channel_width = 20.0 lut_flow_rate = 0.04 lut_visco = 15.0 if isinstance ( medium , ( float , int ) ) : visco = medium else : visco = get_viscosity ( medium = medium , channel_width = channel_width , flow_rate = flow_rate , temperature = temperature ) convert ( area_um = lut [ : , 0 ] , deform = lut [ : , 1 ] , emodulus = lut [ : , 2 ] , channel_width_in = lut_channel_width , channel_width_out = channel_width , flow_rate_in = lut_flow_rate , flow_rate_out = flow_rate , viscosity_in = lut_visco , viscosity_out = visco , inplace = True ) if px_um : ddelt = corrpix_deform_delta ( area_um = area_um , px_um = px_um ) deform -= ddelt area_norm = lut [ : , 0 ] . max ( ) normalize ( lut [ : , 0 ] , area_norm ) normalize ( area_um , area_norm ) defo_norm = lut [ : , 1 ] . max ( ) normalize ( lut [ : , 1 ] , defo_norm ) normalize ( deform , defo_norm ) emod = spint . griddata ( ( lut [ : , 0 ] , lut [ : , 1 ] ) , lut [ : , 2 ] , ( area_um , deform ) , method = 'linear' ) return emod | Compute apparent Young s modulus using a look - up table |
48,626 | def make_exception ( method , e ) : x = e . details ( ) name = x [ : x . find ( ':' ) ] . split ( '.' ) [ - 1 ] if name in globals ( ) : cls = globals ( ) [ name ] else : cls = UnknownRpcException return cls ( method , e . code ( ) , e . details ( ) ) | Creates an exception for a given method and RpcError . |
48,627 | def text_to_int ( text , default_base = "hex" ) : if text . startswith ( "0x" ) : value = int ( text [ 2 : ] , 16 ) elif text . startswith ( "$" ) : value = int ( text [ 1 : ] , 16 ) elif text . startswith ( "#" ) : value = int ( text [ 1 : ] , 10 ) elif text . startswith ( "%" ) : value = int ( text [ 1 : ] , 2 ) else : if default_base == "dec" : value = int ( text ) else : value = int ( text , 16 ) return value | Convert text to int raising exeception on invalid input |
48,628 | def assign_sector_numbers ( self , dirent , sector_list ) : num = len ( sector_list ) order = self . reserve_space ( num ) if len ( order ) != num : raise errors . InvalidFile ( "VTOC reserved space for %d sectors. Sectors needed: %d" % ( len ( order ) , num ) ) file_length = 0 last_sector = None for sector , sector_num in zip ( sector_list . sectors , order ) : sector . sector_num = sector_num sector . file_num = dirent . file_num file_length += sector . used if last_sector is not None : last_sector . next_sector_num = sector_num last_sector = sector if last_sector is not None : last_sector . next_sector_num = 0 sector_list . file_length = file_length | Map out the sectors and link the sectors together |
48,629 | def downsample_rand ( a , samples , remove_invalid = False , ret_idx = False ) : rs = np . random . RandomState ( seed = 47 ) . get_state ( ) np . random . set_state ( rs ) samples = int ( samples ) if remove_invalid : bad = np . isnan ( a ) | np . isinf ( a ) pool = a [ ~ bad ] else : pool = a if samples and ( samples < pool . shape [ 0 ] ) : keep = np . zeros_like ( pool , dtype = bool ) keep_ids = np . random . choice ( np . arange ( pool . size ) , size = samples , replace = False ) keep [ keep_ids ] = True dsa = pool [ keep ] else : keep = np . ones_like ( pool , dtype = bool ) dsa = pool if remove_invalid : idx = np . zeros ( a . size , dtype = bool ) idx [ ~ bad ] = keep else : idx = keep if ret_idx : return dsa , idx else : return dsa | Downsampling by randomly removing points |
48,630 | def downsample_grid ( a , b , samples , ret_idx = False ) : rs = np . random . RandomState ( seed = 47 ) . get_state ( ) samples = int ( samples ) if samples and samples < a . size : keep = np . zeros_like ( a , dtype = bool ) grid_size = 300 xpx = norm ( a , a , b ) * grid_size ypx = norm ( b , b , a ) * grid_size toproc = np . ones ( ( grid_size , grid_size ) , dtype = bool ) for ii in range ( xpx . size ) : xi = xpx [ ii ] yi = ypx [ ii ] if valid ( xi , yi ) and toproc [ int ( xi - 1 ) , int ( yi - 1 ) ] : toproc [ int ( xi - 1 ) , int ( yi - 1 ) ] = False keep [ ii ] = True diff = np . sum ( keep ) - samples if diff > 0 : rem_indices = np . where ( keep ) [ 0 ] np . random . set_state ( rs ) rem = np . random . choice ( rem_indices , size = diff , replace = False ) keep [ rem ] = False elif diff < 0 : add_indices = np . where ( ~ keep ) [ 0 ] np . random . set_state ( rs ) add = np . random . choice ( add_indices , size = abs ( diff ) , replace = False ) keep [ add ] = True assert np . sum ( keep ) == samples , "sanity check" asd = a [ keep ] bsd = b [ keep ] assert np . allclose ( a [ keep ] , asd , equal_nan = True ) , "sanity check" assert np . allclose ( b [ keep ] , bsd , equal_nan = True ) , "sanity check" else : keep = np . ones_like ( a , dtype = bool ) asd = a bsd = b if ret_idx : return asd , bsd , keep else : return asd , bsd | Content - based downsampling for faster visualization |
48,631 | def valid ( a , b ) : return ~ ( np . isnan ( a ) | np . isinf ( a ) | np . isnan ( b ) | np . isinf ( b ) ) | Check whether a and b are not inf or nan |
48,632 | def parse_config ( h5path ) : with h5py . File ( h5path , mode = "r" ) as fh5 : h5attrs = dict ( fh5 . attrs ) for key in h5attrs : if isinstance ( h5attrs [ key ] , bytes ) : h5attrs [ key ] = h5attrs [ key ] . decode ( "utf-8" ) config = Configuration ( ) for key in h5attrs : section , pname = key . split ( ":" ) if pname not in dfn . config_funcs [ section ] : config [ section ] [ pname ] = h5attrs [ key ] msg = "Unknown key '{}' in section [{}]!" . format ( pname , section ) warnings . warn ( msg , UnknownKeyWarning ) else : typ = dfn . config_funcs [ section ] [ pname ] config [ section ] [ pname ] = typ ( h5attrs [ key ] ) return config | Parse the RT - DC configuration of an hdf5 file |
48,633 | def hash ( self ) : if self . _hash is None : tohash = [ self . path . name ] tohash . append ( hashfile ( self . path , blocksize = 65536 , count = 20 ) ) self . _hash = hashobj ( tohash ) return self . _hash | Hash value based on file name and content |
48,634 | def bin_num_doane ( a ) : bad = np . isnan ( a ) | np . isinf ( a ) data = a [ ~ bad ] acc = bin_width_doane ( a ) num = np . int ( np . round ( ( data . max ( ) - data . min ( ) ) / acc ) ) return num | Compute number of bins based on Doane s formula |
48,635 | def ignore_nan_inf ( kde_method ) : def new_kde_method ( events_x , events_y , xout = None , yout = None , * args , ** kwargs ) : bad_in = get_bad_vals ( events_x , events_y ) if xout is None : density = np . zeros_like ( events_x , dtype = float ) bad_out = bad_in xo = yo = None else : density = np . zeros_like ( xout , dtype = float ) bad_out = get_bad_vals ( xout , yout ) xo = xout [ ~ bad_out ] yo = yout [ ~ bad_out ] ev_x = events_x [ ~ bad_in ] ev_y = events_y [ ~ bad_in ] density [ ~ bad_out ] = kde_method ( ev_x , ev_y , xo , yo , * args , ** kwargs ) density [ bad_out ] = np . nan return density doc_add = "\n Notes\n" + " -----\n" + " This is a wrapped version that ignores nan and inf values." new_kde_method . __doc__ = kde_method . __doc__ + doc_add return new_kde_method | Ignores nans and infs from the input data |
48,636 | def kde_gauss ( events_x , events_y , xout = None , yout = None ) : valid_combi = ( ( xout is None and yout is None ) or ( xout is not None and yout is not None ) ) if not valid_combi : raise ValueError ( "Both `xout` and `yout` must be (un)set." ) if yout is None and yout is None : xout = events_x yout = events_y try : estimator = gaussian_kde ( [ events_x . flatten ( ) , events_y . flatten ( ) ] ) density = estimator . evaluate ( [ xout . flatten ( ) , yout . flatten ( ) ] ) except np . linalg . LinAlgError : density = np . zeros ( xout . shape ) * np . nan return density . reshape ( xout . shape ) | Gaussian Kernel Density Estimation |
48,637 | def kde_histogram ( events_x , events_y , xout = None , yout = None , bins = None ) : valid_combi = ( ( xout is None and yout is None ) or ( xout is not None and yout is not None ) ) if not valid_combi : raise ValueError ( "Both `xout` and `yout` must be (un)set." ) if yout is None and yout is None : xout = events_x yout = events_y if bins is None : bins = ( max ( 5 , bin_num_doane ( events_x ) ) , max ( 5 , bin_num_doane ( events_y ) ) ) hist2d , xedges , yedges = np . histogram2d ( x = events_x , y = events_y , bins = bins , normed = True ) xip = xedges [ 1 : ] - ( xedges [ 1 ] - xedges [ 0 ] ) / 2 yip = yedges [ 1 : ] - ( yedges [ 1 ] - yedges [ 0 ] ) / 2 estimator = RectBivariateSpline ( x = xip , y = yip , z = hist2d ) density = estimator . ev ( xout , yout ) density [ density < 0 ] = 0 return density . reshape ( xout . shape ) | Histogram - based Kernel Density Estimation |
48,638 | def kde_none ( events_x , events_y , xout = None , yout = None ) : valid_combi = ( ( xout is None and yout is None ) or ( xout is not None and yout is not None ) ) if not valid_combi : raise ValueError ( "Both `xout` and `yout` must be (un)set." ) if yout is None and yout is None : xout = events_x yout = events_y return np . ones ( xout . shape ) | No Kernel Density Estimation |
48,639 | def kde_multivariate ( events_x , events_y , xout = None , yout = None , bw = None ) : valid_combi = ( ( xout is None and yout is None ) or ( xout is not None and yout is not None ) ) if not valid_combi : raise ValueError ( "Both `xout` and `yout` must be (un)set." ) if yout is None and yout is None : xout = events_x yout = events_y if bw is None : bw = ( bin_width_doane ( events_x ) / 2 , bin_width_doane ( events_y ) / 2 ) positions = np . vstack ( [ xout . flatten ( ) , yout . flatten ( ) ] ) estimator_ly = KDEMultivariate ( data = [ events_x . flatten ( ) , events_y . flatten ( ) ] , var_type = 'cc' , bw = bw ) density = estimator_ly . pdf ( positions ) return density . reshape ( xout . shape ) | Multivariate Kernel Density Estimation |
48,640 | def _add ( self , isoel , col1 , col2 , method , meta ) : self . _data [ method ] [ col1 ] [ col2 ] [ "isoelastics" ] = isoel self . _data [ method ] [ col1 ] [ col2 ] [ "meta" ] = meta isoel_flip = [ iso [ : , [ 1 , 0 , 2 ] ] for iso in isoel ] self . _data [ method ] [ col2 ] [ col1 ] [ "isoelastics" ] = isoel_flip self . _data [ method ] [ col2 ] [ col1 ] [ "meta" ] = meta | Convenience method for population self . _data |
48,641 | def add_px_err ( isoel , col1 , col2 , px_um , inplace = False ) : Isoelastics . check_col12 ( col1 , col2 ) if "deform" in [ col1 , col2 ] : sign = + 1 else : sign = - 1 if col1 == "area_um" : area_ax = 0 deci_ax = 1 else : area_ax = 1 deci_ax = 0 new_isoel = [ ] for iso in isoel : iso = np . array ( iso , copy = not inplace ) ddeci = feat_emod . corrpix_deform_delta ( area_um = iso [ : , area_ax ] , px_um = px_um ) iso [ : , deci_ax ] += sign * ddeci new_isoel . append ( iso ) return new_isoel | Undo pixelation correction |
48,642 | def convert ( isoel , col1 , col2 , channel_width_in , channel_width_out , flow_rate_in , flow_rate_out , viscosity_in , viscosity_out , inplace = False ) : Isoelastics . check_col12 ( col1 , col2 ) if col1 == "area_um" : area_ax = 0 defo_ax = 1 else : area_ax = 1 defo_ax = 0 new_isoel = [ ] for iso in isoel : iso = np . array ( iso , copy = not inplace ) feat_emod . convert ( area_um = iso [ : , area_ax ] , deform = iso [ : , defo_ax ] , emodulus = iso [ : , 2 ] , channel_width_in = channel_width_in , channel_width_out = channel_width_out , flow_rate_in = flow_rate_in , flow_rate_out = flow_rate_out , viscosity_in = viscosity_in , viscosity_out = viscosity_out , inplace = True ) new_isoel . append ( iso ) return new_isoel | Convert isoelastics in area_um - deform space |
48,643 | def get_with_rtdcbase ( self , col1 , col2 , method , dataset , viscosity = None , add_px_err = False ) : cfg = dataset . config return self . get ( col1 = col1 , col2 = col2 , method = method , channel_width = cfg [ "setup" ] [ "channel width" ] , flow_rate = cfg [ "setup" ] [ "flow rate" ] , viscosity = viscosity , add_px_err = add_px_err , px_um = cfg [ "imaging" ] [ "pixel size" ] ) | Convenience method that extracts the metadata from RTDCBase |
48,644 | def load_data ( self , path ) : path = pathlib . Path ( path ) . resolve ( ) meta = { } with path . open ( ) as fd : while True : line = fd . readline ( ) . strip ( ) if line . startswith ( "# - " ) : line = line . strip ( "#- " ) var , val = line . split ( ":" ) if val . strip ( ) . replace ( "." , "" ) . isdigit ( ) : val = float ( val ) else : val = val . strip ( ) . lower ( ) meta [ var . strip ( ) ] = val elif line and not line . startswith ( "#" ) : break assert meta [ "column 1" ] in dfn . scalar_feature_names assert meta [ "column 2" ] in dfn . scalar_feature_names assert meta [ "column 3" ] == "emodulus" assert meta [ "method" ] in VALID_METHODS with path . open ( "rb" ) as isfd : isodata = np . loadtxt ( isfd ) emoduli = np . unique ( isodata [ : , 2 ] ) isoel = [ ] for emod in emoduli : where = isodata [ : , 2 ] == emod isoel . append ( isodata [ where ] ) self . add ( isoel = isoel , col1 = meta [ "column 1" ] , col2 = meta [ "column 2" ] , channel_width = meta [ "channel width [um]" ] , flow_rate = meta [ "flow rate [ul/s]" ] , viscosity = meta [ "viscosity [mPa*s]" ] , method = meta [ "method" ] ) | Load isoelastics from a text file |
48,645 | def get_compensation_matrix ( ct21 , ct31 , ct12 , ct32 , ct13 , ct23 ) : ct11 = 1 ct22 = 1 ct33 = 1 if ct21 < 0 : raise ValueError ( "ct21 matrix element must not be negative!" ) if ct31 < 0 : raise ValueError ( "ct31 matrix element must not be negative!" ) if ct12 < 0 : raise ValueError ( "ct12 matrix element must not be negative!" ) if ct32 < 0 : raise ValueError ( "ct32 matrix element must not be negative!" ) if ct13 < 0 : raise ValueError ( "ct13 matrix element must not be negative!" ) if ct23 < 0 : raise ValueError ( "ct23 matrix element must not be negative!" ) crosstalk = np . array ( [ [ ct11 , ct12 , ct13 ] , [ ct21 , ct22 , ct23 ] , [ ct31 , ct32 , ct33 ] , ] ) return np . linalg . inv ( crosstalk ) | Compute crosstalk inversion matrix |
48,646 | def correct_crosstalk ( fl1 , fl2 , fl3 , fl_channel , ct21 = 0 , ct31 = 0 , ct12 = 0 , ct32 = 0 , ct13 = 0 , ct23 = 0 ) : fl_channel = int ( fl_channel ) if fl_channel not in [ 1 , 2 , 3 ] : raise ValueError ( "`fl_channel` must be 1, 2, or 3!" ) minv = get_compensation_matrix ( ct21 = ct21 , ct31 = ct31 , ct12 = ct12 , ct32 = ct32 , ct13 = ct13 , ct23 = ct23 ) col = minv [ : , fl_channel - 1 ] . flatten ( ) flout = col [ 0 ] * fl1 + col [ 1 ] * fl2 + col [ 2 ] * fl3 return flout | Perform crosstalk correction |
48,647 | def get_inert_ratio_cvx ( cont ) : if isinstance ( cont , np . ndarray ) : cont = [ cont ] ret_list = False else : ret_list = True length = len ( cont ) inert_ratio_cvx = np . zeros ( length , dtype = float ) * np . nan for ii in range ( length ) : try : chull = ssp . ConvexHull ( cont [ ii ] ) except ssp . qhull . QhullError : pass else : hull = cont [ ii ] [ chull . vertices , : ] inert_ratio_cvx [ ii ] = get_inert_ratio_raw ( hull ) if not ret_list : inert_ratio_cvx = inert_ratio_cvx [ 0 ] return inert_ratio_cvx | Compute the inertia ratio of the convex hull of a contour |
48,648 | def get_inert_ratio_prnc ( cont ) : if isinstance ( cont , np . ndarray ) : cont = [ cont ] ret_list = False else : ret_list = True length = len ( cont ) inert_ratio_prnc = np . zeros ( length , dtype = float ) * np . nan for ii in range ( length ) : moments = cont_moments_cv ( cont [ ii ] ) if moments is not None : orient = 0.5 * np . arctan2 ( 2 * moments [ 'mu11' ] , moments [ 'mu02' ] - moments [ 'mu20' ] ) cc = np . array ( cont [ ii ] , dtype = float , copy = False ) rho = np . sqrt ( cc [ : , 0 ] ** 2 + cc [ : , 1 ] ** 2 ) phi = np . arctan2 ( cc [ : , 1 ] , cc [ : , 0 ] ) + orient + np . pi / 2 cc [ : , 0 ] = rho * np . cos ( phi ) cc [ : , 1 ] = rho * np . sin ( phi ) mprnc = cont_moments_cv ( cc ) inert_ratio_prnc [ ii ] = np . sqrt ( mprnc [ "mu20" ] / mprnc [ "mu02" ] ) if not ret_list : inert_ratio_prnc = inert_ratio_prnc [ 0 ] return inert_ratio_prnc | Compute principal inertia ratio of a contour |
48,649 | def get_inert_ratio_raw ( cont ) : if isinstance ( cont , np . ndarray ) : cont = [ cont ] ret_list = False else : ret_list = True length = len ( cont ) inert_ratio_raw = np . zeros ( length , dtype = float ) * np . nan for ii in range ( length ) : moments = cont_moments_cv ( cont [ ii ] ) if moments is not None : inert_ratio_raw [ ii ] = np . sqrt ( moments [ "mu20" ] / moments [ "mu02" ] ) if not ret_list : inert_ratio_raw = inert_ratio_raw [ 0 ] return inert_ratio_raw | Compute the inertia ratio of a contour |
48,650 | def get_tilt ( cont ) : if isinstance ( cont , np . ndarray ) : cont = [ cont ] ret_list = False else : ret_list = True length = len ( cont ) tilt = np . zeros ( length , dtype = float ) * np . nan for ii in range ( length ) : moments = cont_moments_cv ( cont [ ii ] ) if moments is not None : oii = 0.5 * np . arctan2 ( 2 * moments [ 'mu11' ] , moments [ 'mu02' ] - moments [ 'mu20' ] ) tilt [ ii ] = oii + np . pi / 2 tilt = np . mod ( tilt , np . pi ) tilt [ tilt > np . pi / 2 ] -= np . pi tilt = np . abs ( tilt ) if not ret_list : tilt = tilt [ 0 ] return tilt | Compute tilt of raw contour relative to channel axis |
48,651 | def tag ( * tags ) : def dfn ( fn ) : _tags = getattr ( fn , 'tags' , set ( ) ) _tags . update ( tags ) fn . tags = _tags return fn return dfn | Constructs a decorator that tags a function with specified strings ( |
48,652 | def raw ( mime = 'application/octet-stream' ) : def dfn ( fn ) : tags = getattr ( fn , 'tags' , set ( ) ) tags . add ( 'raw' ) fn . tags = tags fn . mime = getattr ( fn , 'mime' , mime ) return fn return dfn | Constructs a decorator that marks the fn as raw response format |
48,653 | def open ( self , pysession_id ) : self . id = id ( self ) self . funcserver = self . application . funcserver self . pysession_id = pysession_id self . state = self . funcserver . websocks [ self . id ] = { 'id' : self . id , 'sock' : self } | Called when client opens connection . Initialization is done here . |
48,654 | def on_message ( self , msg ) : msg = json . loads ( msg ) psession = self . funcserver . pysessions . get ( self . pysession_id , None ) if psession is None : interpreter = PyInterpreter ( self . funcserver . define_python_namespace ( ) ) psession = dict ( interpreter = interpreter , socks = set ( [ self . id ] ) ) self . funcserver . pysessions [ self . pysession_id ] = psession else : interpreter = psession [ 'interpreter' ] psession [ 'socks' ] . add ( self . id ) code = msg [ 'code' ] msg_id = msg [ 'id' ] stdout = sys . stdout try : sys . stdout = cStringIO . StringIO ( ) interpreter . runsource ( code ) output = sys . stdout . getvalue ( ) or interpreter . output if isinstance ( output , list ) : output = '' . join ( output ) interpreter . output = [ ] finally : sys . stdout = stdout msg = { 'type' : MSG_TYPE_CONSOLE , 'id' : msg_id , 'data' : output } self . send_message ( msg ) | Called when client sends a message . |
48,655 | def on_close ( self ) : if self . id in self . funcserver . websocks : self . funcserver . websocks [ self . id ] = None ioloop = tornado . ioloop . IOLoop . instance ( ) ioloop . add_callback ( lambda : self . funcserver . websocks . pop ( self . id , None ) ) psession = self . funcserver . pysessions . get ( self . pysession_id , None ) if psession : psession [ 'socks' ] . remove ( self . id ) if not psession [ 'socks' ] : del self . funcserver . pysessions [ self . pysession_id ] | Called when client closes this connection . Cleanup is done here . |
48,656 | def _clean_kwargs ( self , kwargs , fn ) : if not self . server . IGNORE_UNEXPECTED_KWARGS : return kwargs expected_kwargs = set ( inspect . getargspec ( fn ) . args ) got_kwargs = set ( kwargs . keys ( ) ) unexpected_kwargs = got_kwargs - expected_kwargs for k in unexpected_kwargs : del kwargs [ k ] return kwargs | Remove unexpected keyword arguments from the set of received keyword arguments . |
48,657 | def dump_stacks ( self ) : dump = [ ] threads = dict ( [ ( th . ident , th . name ) for th in threading . enumerate ( ) ] ) for thread , frame in sys . _current_frames ( ) . items ( ) : if thread not in threads : continue dump . append ( 'Thread 0x%x (%s)\n' % ( thread , threads [ thread ] ) ) dump . append ( '' . join ( traceback . format_stack ( frame ) ) ) dump . append ( '\n' ) return '' . join ( dump ) | Dumps the stack of all threads . This function is meant for debugging . Useful when a deadlock happens . |
48,658 | def define_log_pre_format_hooks ( self ) : hooks = super ( Server , self ) . define_log_pre_format_hooks ( ) if self . args . func == self . run and self . args . debug : hooks . append ( self . _send_log_to_ws ) return hooks | adds a hook to send to websocket if the run command was selected |
48,659 | def run ( self ) : self . log_id = 0 self . websocks = { } self . pysessions = { } if self . DISABLE_REQUESTS_DEBUG_LOGS : disable_requests_debug_logs ( ) self . threadpool = ThreadPool ( self . THREADPOOL_WORKERS ) self . api = None base_handlers = self . prepare_base_handlers ( ) handlers = self . prepare_handlers ( ) self . template_loader = TemplateLoader ( [ resolve_path ( self . TEMPLATE_PATH ) ] ) _ = self . prepare_template_loader ( self . template_loader ) if _ is not None : self . template_loader = _ shclass = CustomStaticFileHandler shclass . PATHS . append ( resolve_path ( self . STATIC_PATH ) ) _ = self . prepare_static_paths ( shclass . PATHS ) if _ is not None : shclass . PATHS = _ self . static_handler_class = shclass self . nav_tabs = [ ( 'Home' , '/' ) ] if self . args . debug : self . nav_tabs += [ ( 'Console' , '/console' ) , ( 'Logs' , '/logs' ) ] self . nav_tabs = self . prepare_nav_tabs ( self . nav_tabs ) settings = { 'static_path' : '<DUMMY-INEXISTENT-PATH>' , 'static_handler_class' : self . static_handler_class , 'template_loader' : self . template_loader , 'compress_response' : True , 'debug' : self . args . debug , } all_handlers = handlers + base_handlers self . app = self . APP_CLASS ( ** settings ) self . app . add_handlers ( self . VIRTUAL_HOST , all_handlers ) sys . funcserver = self . app . funcserver = self self . api = self . prepare_api ( ) if self . api is not None and not hasattr ( self . api , 'log' ) : self . api . log = self . log if self . args . port != 0 : self . app . listen ( self . args . port ) tornado . ioloop . IOLoop . instance ( ) . start ( ) | prepares the api and starts the tornado funcserver |
48,660 | def _get_sql ( filename ) : with open ( os . path . join ( SQL_DIR , filename ) , 'r' ) as f : return f . read ( ) | Returns the contents of the sql file from the given filename . |
48,661 | def verify_id_n_version ( id , version ) : stmt = _get_sql ( 'verify-id-and-version.sql' ) args = dict ( id = id , version = version ) with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( stmt , args ) try : valid = cursor . fetchone ( ) [ 0 ] except TypeError : raise NotFound ( join_ident_hash ( id , version ) ) return True | Given an id and version verify the identified content exists . |
48,662 | def get_id_n_version ( ident_hash ) : try : id , version = split_ident_hash ( ident_hash ) except IdentHashMissingVersion : from pyramid . httpexceptions import HTTPNotFound from cnxarchive . views . helpers import get_latest_version try : version = get_latest_version ( ident_hash ) except HTTPNotFound : raise NotFound ( ident_hash ) id , version = split_ident_hash ( join_ident_hash ( ident_hash , version ) ) else : verify_id_n_version ( id , version ) return id , version | From the given ident_hash return the id and version . |
48,663 | def get_type ( ident_hash ) : id , version = get_id_n_version ( ident_hash ) stmt = _get_sql ( 'get-type.sql' ) args = dict ( id = id , version = version ) with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( stmt , args ) type = cursor . fetchone ( ) [ 0 ] return type | Return the database type for the given ident_hash As of now this could either be a Module or Collection . |
48,664 | def get_metadata ( ident_hash ) : id , version = get_id_n_version ( ident_hash ) stmt = _get_sql ( 'get-metadata.sql' ) args = dict ( id = id , version = version ) with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( stmt , args ) try : metadata = cursor . fetchone ( ) [ 0 ] except TypeError : raise NotFound ( ident_hash ) return metadata | Return the dictionary of metadata from the database . This data is keyed using the cnx - epub data structure . |
48,665 | def get_content ( ident_hash , context = None ) : id , version = get_id_n_version ( ident_hash ) filename = 'index.cnxml.html' if context is not None : stmt = _get_sql ( 'get-baked-content.sql' ) args = dict ( id = id , version = version , context = context ) else : stmt = _get_sql ( 'get-content.sql' ) args = dict ( id = id , version = version , filename = filename ) with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( stmt , args ) try : content , _ = cursor . fetchone ( ) except TypeError : raise ContentNotFound ( ident_hash , context , filename ) return content [ : ] | Returns the content for the given ident_hash . context is optionally ident - hash used to find the content within the context of a Collection ident_hash . |
48,666 | def get_file ( hash ) : stmt = _get_sql ( 'get-file.sql' ) args = dict ( hash = hash ) with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( stmt , args ) try : file , _ = cursor . fetchone ( ) except TypeError : raise FileNotFound ( hash ) return memoryview ( file [ : ] ) | Return the contents of the file as a memoryview . |
48,667 | def get_registered_files ( ident_hash ) : id , version = get_id_n_version ( ident_hash ) stmt = _get_sql ( 'get-registered-files-info.sql' ) args = dict ( id = id , version = version ) with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( stmt , args ) rows = cursor . fetchall ( ) if rows is None : rows = [ ] hashes = list ( set ( [ sha1 for sha1 , _ , __ in rows ] ) ) return hashes | Returns a list SHA1 hashes for registered file entries identified by the given module ident_hash . |
48,668 | def get_tree ( ident_hash , baked = False ) : id , version = get_id_n_version ( ident_hash ) stmt = _get_sql ( 'get-tree.sql' ) args = dict ( id = id , version = version , baked = baked ) with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( stmt , args ) try : tree = cursor . fetchone ( ) [ 0 ] except TypeError : raise NotFound ( ident_hash ) if tree is None : raise NotFound ( ident_hash ) return tree | Return a tree structure of the Collection |
48,669 | def guess_media_type ( filepath ) : o = subprocess . check_output ( [ 'file' , '--mime-type' , '-Lb' , filepath ] ) o = o . strip ( ) return o | Returns the media - type of the file at the given filepath |
48,670 | def lookup_module_ident ( id , version ) : with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( "SELECT module_ident FROM modules " "WHERE uuid = %s " "AND CONCAT_WS('.', major_version, minor_version) = %s" , ( id , version ) ) try : mident = cursor . fetchone ( ) [ 0 ] except ( IndexError , TypeError ) : ident_hash = join_ident_hash ( id , version ) raise RuntimeError ( "Content at {} does not exist." . format ( ident_hash ) ) return mident | Return the module_ident for the given id & major and minor version as a tuple . |
48,671 | def insert_file ( file , media_type ) : resource_hash = get_file_sha1 ( file ) with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( "SELECT fileid FROM files WHERE sha1 = %s" , ( resource_hash , ) ) try : fileid = cursor . fetchone ( ) [ 0 ] except ( IndexError , TypeError ) : cursor . execute ( "INSERT INTO files (file, media_type) " "VALUES (%s, %s)" "RETURNING fileid" , ( psycopg2 . Binary ( file . read ( ) ) , media_type , ) ) fileid = cursor . fetchone ( ) [ 0 ] return fileid , resource_hash | Upsert the file and media_type into the files table . Returns the fileid and sha1 of the upserted file . |
48,672 | def upsert_module_file ( module_ident , fileid , filename ) : with db_connect ( ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( "SELECT true FROM module_files " "WHERE module_ident = %s " "AND filename = %s" , ( module_ident , filename , ) ) try : cursor . fetchone ( ) [ 0 ] except ( IndexError , TypeError ) : cursor . execute ( "INSERT INTO module_files " "(module_ident, fileid, filename) " "VALUES (%s, %s, %s)" , ( module_ident , fileid , filename , ) ) else : cursor . execute ( "UPDATE module_files " "SET (fileid) = (%s) " "WHERE module_ident = %s AND filename = %s" , ( fileid , module_ident , filename , ) ) | Upsert a file associated with fileid with filename as a module_files entry associated with content at module_ident . |
48,673 | def get_contour ( mask ) : if isinstance ( mask , np . ndarray ) and len ( mask . shape ) == 2 : mask = [ mask ] ret_list = False else : ret_list = True contours = [ ] for mi in mask : c0 = find_contours ( mi . transpose ( ) , level = .9999 , positive_orientation = "low" , fully_connected = "high" ) [ 0 ] c1 = np . asarray ( np . round ( c0 ) , int ) c2 = remove_duplicates ( c1 ) contours . append ( c2 ) if ret_list : return contours else : return contours [ 0 ] | Compute the image contour from a mask |
48,674 | def scan ( self , stop_on_first = True , base_ip = 0 ) : tvs = [ ] if base_ip == 0 : sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) sock . connect ( ( "8.8.8.8" , 80 ) ) ip = sock . getsockname ( ) [ 0 ] sock . close ( ) ip_parts = ip . split ( '.' ) base_ip = ip_parts [ 0 ] + '.' + ip_parts [ 1 ] + '.' + ip_parts [ 2 ] for ip_suffix in range ( 2 , 256 ) : ip_check = '{}.{}' . format ( base_ip , ip_suffix ) if self . check_ip ( ip_check ) : tvs . append ( ip_check ) if stop_on_first : break return tvs | Scans the local network for TVs . |
48,675 | def check_ip ( ip , log = False ) : if log : print ( 'Checking ip: {}...' . format ( ip ) ) request_timeout = 0.1 try : tv_url = 'http://{}:6095/request?action=isalive' . format ( ip ) request = requests . get ( tv_url , timeout = request_timeout ) except requests . exceptions . ConnectTimeout : return False return request . status_code == 200 | Attempts a connection to the TV and checks if there really is a TV . |
48,676 | def get_field_type ( f ) : types = ( t [ 5 : ] for t in dir ( f ) if t [ : 4 ] == 'TYPE' and getattr ( f , t ) == f . type ) return next ( types ) | Obtain the type name of a GRPC Message field . |
48,677 | def get_field_description ( f ) : type_name = get_field_type ( f ) if type_name == 'MESSAGE' and { sf . name for sf in f . message_type . fields } == { 'key' , 'value' } : return 'map<string, string>' elif type_name == 'MESSAGE' : return f . message_type . full_name elif type_name == 'ENUM' : return f . enum_type . full_name else : return type_name . lower ( ) | Get the type description of a GRPC Message field . |
48,678 | def make_static_request ( method , * args , ** kwargs ) : if args and not use_signature : raise NotImplementedError ( "Only keyword arguments allowed in Python2" ) if use_signature : new_kwargs = { kw : unwrap ( value ) for kw , value in kwargs . items ( ) } new_args = tuple ( unwrap ( value ) for value in args ) bound_args = method . signature . bind ( None , * new_args , ** new_kwargs ) . arguments for k in bound_args : if isinstance ( bound_args [ k ] , Enum ) : bound_args [ k ] = bound_args [ k ] . value new_kwargs = { kw : v for kw , v in bound_args . items ( ) if kw != 'cls' } else : new_kwargs = { kw : unwrap ( value ) for kw , value in kwargs . items ( ) } return method . request_type ( ** new_kwargs ) | Creates a request from a static method function call . |
48,679 | def make_request ( self , method , * args , ** kwargs ) : if args and not use_signature : raise NotImplementedError ( "Only keyword arguments allowed in Python2" ) new_kwargs = { kw : unwrap ( value ) for kw , value in kwargs . items ( ) } if use_signature : new_args = tuple ( unwrap ( value ) for value in args ) bound_args = method . signature . bind ( unwrap ( self ) , * new_args , ** new_kwargs ) . arguments def translate_enum ( arg ) : return arg . value if isinstance ( arg , Enum ) else arg for k in bound_args : if isinstance ( bound_args [ k ] , str ) : continue if isinstance ( bound_args [ k ] , dict ) : continue try : x = [ translate_enum ( arg ) for arg in bound_args [ k ] ] bound_args [ k ] = x except TypeError : bound_args [ k ] = translate_enum ( bound_args [ k ] ) new_kwargs = { ( kw if kw != 'self' else method . field_name ) : v for kw , v in bound_args . items ( ) } else : new_kwargs [ self . field_name ] = unwrap ( self ) return method . request_type ( ** new_kwargs ) | Creates a request from a method function call . |
48,680 | def method_wrapper ( m ) : if m . is_simple : def simple_method ( self ) : return apply_transform ( self . __service__ , m . output_transform , grpc_call ( self . __service__ , m , unwrap ( self ) ) ) return simple_method elif m . input_transform is not None : def transform_method ( self , * args , ** kwargs ) : request = m . input_transform ( self , * args , ** kwargs ) return apply_transform ( self . __service__ , m . output_transform , grpc_call ( self . __service__ , m , request ) ) return transform_method elif m . static : def static_method ( cls , * args , ** kwargs ) : request = make_static_request ( m , * args , ** kwargs ) return apply_transform ( cls . __stub__ ( __server__ ) , m . output_transform , grpc_call ( cls . __stub__ ( __server__ ) , m , request ) ) return static_method else : def request_method ( self , * args , ** kwargs ) : request = make_request ( self , m , * args , ** kwargs ) return apply_transform ( self . __service__ , m . output_transform , grpc_call ( self . __service__ , m , request ) ) return request_method | Generates a method from a GrpcMethod definition . |
48,681 | def request_name ( self ) : if self . static and not self . uses_request : return 'Empty' if not self . uses_request : return None if isinstance ( self . uses_request , str ) : return self . uses_request return to_camel_case ( self . name ) + "Request" | Generate the name of the request . |
48,682 | def request_type ( self ) : if self . static and not self . uses_request : return getattr ( xenon_pb2 , 'Empty' ) if not self . uses_request : return None return getattr ( xenon_pb2 , self . request_name ) | Retrieve the type of the request by fetching it from xenon . proto . xenon_pb2 . |
48,683 | def signature ( self ) : if not use_signature : raise NotImplementedError ( "Python 3 only." ) if self . static : parameters = ( Parameter ( name = 'cls' , kind = Parameter . POSITIONAL_ONLY ) , ) else : parameters = ( Parameter ( name = 'self' , kind = Parameter . POSITIONAL_ONLY ) , ) if self . input_transform : return signature ( self . input_transform ) if self . uses_request : fields = get_fields ( self . request_type ) if not self . static : if self . field_name not in fields : raise NameError ( "field '{}' not found in {}" . format ( self . field_name , self . request_name ) ) fields . remove ( self . field_name ) parameters += tuple ( Parameter ( name = name , kind = Parameter . POSITIONAL_OR_KEYWORD , default = None ) for name in fields ) return Signature ( parameters ) | Create a signature for this method only in Python > 3 . 4 |
48,684 | def docstring ( self , servicer ) : s = getattr ( servicer , to_lower_camel_case ( self . name ) ) . __doc__ or "TODO: no docstring in .proto file" if self . uses_request : s += "\n" for field in get_fields ( self . request_type ) : if field != self . field_name : type_info = get_field_description ( self . request_type . DESCRIPTOR . fields_by_name [ field ] ) s += " :param {}: {}\n" . format ( field , field ) s += " :type {0}: {1}\n" . format ( field , type_info ) return s | Generate a doc - string . |
48,685 | def slugify ( string ) : filtered_string = [ ] if isinstance ( string , str ) : string = unicode ( string , 'utf-8' ) for i in unicodedata . normalize ( 'NFKC' , string ) : cat = unicodedata . category ( i ) [ 0 ] if cat in 'LN' or i in '-_' : filtered_string . append ( i ) elif cat in 'Z' : filtered_string . append ( ' ' ) return re . sub ( '\s+' , '-' , '' . join ( filtered_string ) ) . lower ( ) | Return a slug for the unicode_string . |
48,686 | def find_contours ( array , level , fully_connected = 'low' , positive_orientation = 'low' ) : array = np . asarray ( array , dtype = np . double ) if array . ndim != 2 : raise ValueError ( 'Only 2D arrays are supported.' ) level = float ( level ) if ( fully_connected not in _param_options or positive_orientation not in _param_options ) : raise ValueError ( 'Parameters "fully_connected" and' ' "positive_orientation" must be either "high" or' ' "low".' ) point_list = _find_contours_cy . iterate_and_store ( array , level , fully_connected == 'high' ) contours = _assemble_contours ( _take_2 ( point_list ) ) if positive_orientation == 'high' : contours = [ c [ : : - 1 ] for c in contours ] return contours | Find iso - valued contours in a 2D array for a given level value . |
48,687 | def get_module_ident_from_ident_hash ( ident_hash , cursor ) : try : uuid , ( mj_ver , mn_ver ) = split_ident_hash ( ident_hash , split_version = True ) except IdentHashMissingVersion as e : uuid , mj_ver , mn_ver = e . id , None , None args = [ uuid ] stmt = "SELECT module_ident FROM {} WHERE uuid = %s" table_name = 'modules' if mj_ver is None : table_name = 'latest_modules' else : args . append ( mj_ver ) stmt += " AND major_version = %s" if mn_ver is not None : args . append ( mn_ver ) stmt += " AND minor_version = %s" stmt = stmt . format ( table_name ) cursor . execute ( stmt , args ) try : module_ident = cursor . fetchone ( ) [ 0 ] except TypeError : module_ident = None return module_ident | Return the moduleid for a given ident_hash . |
48,688 | def get_tree ( ident_hash , cursor , as_collated = False ) : uuid , version = split_ident_hash ( ident_hash ) cursor . execute ( SQL [ 'get-tree-by-uuid-n-version' ] , ( uuid , version , as_collated , ) ) try : tree = cursor . fetchone ( ) [ 0 ] except TypeError : raise ContentNotFound ( ) if type ( tree ) in ( type ( '' ) , type ( u'' ) ) : return json . loads ( tree ) else : return tree | Return a JSON representation of the binder tree for ident_hash . |
48,689 | def get_collated_content ( ident_hash , context_ident_hash , cursor ) : cursor . execute ( SQL [ 'get-collated-content' ] , ( ident_hash , context_ident_hash , ) ) try : return cursor . fetchone ( ) [ 0 ] except TypeError : return | Return collated content for ident_hash . |
48,690 | def get_module_uuid ( plpy , moduleid ) : plan = plpy . prepare ( "SELECT uuid FROM modules WHERE moduleid = $1;" , ( 'text' , ) ) result = plpy . execute ( plan , ( moduleid , ) , 1 ) if result : return result [ 0 ] [ 'uuid' ] | Retrieve page uuid from legacy moduleid . |
48,691 | def set_version ( portal_type , legacy_version , td ) : modified = 'OK' legacy_major , legacy_minor = legacy_version . split ( '.' ) if portal_type == 'Collection' : modified = 'MODIFY' td [ 'new' ] [ 'major_version' ] = int ( legacy_minor ) if td [ 'new' ] [ 'minor_version' ] is None : td [ 'new' ] [ 'minor_version' ] = 1 elif portal_type == 'Module' : modified = 'MODIFY' td [ 'new' ] [ 'major_version' ] = int ( legacy_minor ) + ( int ( legacy_major ) - 1 ) td [ 'new' ] [ 'minor_version' ] = None return modified | Set the major_version and minor_version if they are not set . |
48,692 | def republish_module ( td , plpy ) : portal_type = td [ 'new' ] [ 'portal_type' ] modified = 'OK' moduleid = td [ 'new' ] [ 'moduleid' ] legacy_version = td [ 'new' ] [ 'version' ] submitter = td [ 'new' ] [ 'submitter' ] submitlog = td [ 'new' ] [ 'submitlog' ] modified = set_version ( portal_type , legacy_version , td ) current_module_ident = get_current_module_ident ( moduleid , plpy ) if current_module_ident : uuid = get_module_uuid ( plpy , moduleid ) td [ 'new' ] [ 'uuid' ] = uuid modified = 'MODIFY' else : return modified if portal_type != 'Module' : return modified replace_map = { current_module_ident : td [ 'new' ] [ 'module_ident' ] } for sub_id in get_subcols ( current_module_ident , plpy ) : minor = next_version ( sub_id , plpy ) new_subcol_ident = republish_collection ( submitter , submitlog , minor , sub_id , plpy ) replace_map [ sub_id ] = new_subcol_ident for collection_id in get_collections ( current_module_ident , plpy ) : minor = next_version ( collection_id , plpy ) new_ident = republish_collection ( submitter , submitlog , minor , collection_id , plpy ) replace_map [ collection_id ] = new_ident rebuild_collection_tree ( collection_id , replace_map , plpy ) return modified | When a module is republished create new minor versions of collections . |
48,693 | def republish_module_trigger ( plpy , td ) : is_legacy_publication = td [ 'new' ] [ 'version' ] is not None if not is_legacy_publication : return "OK" plpy . log ( 'Trigger fired on %s' % ( td [ 'new' ] [ 'moduleid' ] , ) ) modified = republish_module ( td , plpy ) plpy . log ( 'modified: {}' . format ( modified ) ) plpy . log ( 'insert values:\n{}\n' . format ( '\n' . join ( [ '{}: {}' . format ( key , value ) for key , value in td [ 'new' ] . items ( ) ] ) ) ) return modified | Trigger called from postgres database when republishing a module . |
48,694 | def assign_version_default_trigger ( plpy , td ) : modified_state = "OK" portal_type = td [ 'new' ] [ 'portal_type' ] version = td [ 'new' ] [ 'version' ] minor_version = td [ 'new' ] [ 'minor_version' ] if minor_version is None and portal_type in ( 'Collection' , 'SubCollection' ) : modified_state = "MODIFY" td [ 'new' ] [ 'minor_version' ] = 1 if version is None : major_version = td [ 'new' ] [ 'major_version' ] version = "1.{}" . format ( major_version ) modified_state = "MODIFY" td [ 'new' ] [ 'version' ] = version return modified_state | Trigger to fill in legacy data fields . |
48,695 | def get_export ( request ) : settings = get_current_registry ( ) . settings exports_dirs = settings [ 'exports-directories' ] . split ( ) args = request . matchdict ident_hash , type = args [ 'ident_hash' ] , args [ 'type' ] id , version = split_ident_hash ( ident_hash ) with db_connect ( ) as db_connection : with db_connection . cursor ( ) as cursor : try : results = get_export_files ( cursor , id , version , [ type ] , exports_dirs , read_file = True ) if not results : raise httpexceptions . HTTPNotFound ( ) filename , mimetype , size , modtime , state , file_content = results [ 0 ] except ExportError as e : logger . debug ( str ( e ) ) raise httpexceptions . HTTPNotFound ( ) if state == 'missing' : raise httpexceptions . HTTPNotFound ( ) encoded_filename = urllib . quote ( filename . encode ( 'utf-8' ) ) resp = request . response resp . status = "200 OK" resp . content_type = mimetype resp . content_disposition = "attachment; filename={fname};" " filename*=UTF-8''{fname}" . format ( fname = encoded_filename ) resp . body = file_content slug_title = '-' . join ( encoded_filename . split ( '-' ) [ : - 1 ] ) resp . headerlist . append ( ( 'Link' , '<https://{}/contents/{}/{}> ;rel="Canonical"' . format ( request . host , id , slug_title ) ) ) return resp | Retrieve an export file . |
48,696 | def get_export_files ( cursor , id , version , types , exports_dirs , read_file = True ) : request = get_current_request ( ) type_info = dict ( request . registry . settings [ '_type_info' ] ) metadata = get_content_metadata ( id , version , cursor ) legacy_id = metadata [ 'legacy_id' ] legacy_version = metadata [ 'legacy_version' ] reachable_dirs = [ dir for dir in exports_dirs if safe_stat ( dir ) ] results = [ ] for type in list ( types ) : if type not in type_info : raise ExportError ( "invalid type '{}' requested." . format ( type ) ) file_extension = type_info [ type ] [ 'file_extension' ] if metadata [ 'mediaType' ] == MODULE_MIMETYPE and file_extension == 'pdf' : continue mimetype = type_info [ type ] [ 'mimetype' ] filename = '{}@{}.{}' . format ( id , version , file_extension ) legacy_filenames = [ '{}-{}.{}' . format ( legacy_id , legacy_version , ext ) for ext in LEGACY_EXTENSION_MAP [ file_extension ] ] slugify_title_filename = u'{}-{}.{}' . format ( slugify ( metadata [ 'title' ] ) , version , file_extension ) for dir in reachable_dirs : filepath = os . path . join ( dir , filename ) try : if read_file : with open ( filepath , 'r' ) as file : stats = os . fstat ( file . fileno ( ) ) contents = file . read ( ) else : stats = os . stat ( filepath ) contents = None modtime = fromtimestamp ( int ( stats . st_mtime ) ) results . append ( ( slugify_title_filename , mimetype , stats . st_size , modtime , 'good' , contents ) ) break except EnvironmentError : pass else : legacy_file_found = False for dir in reachable_dirs : filepath = os . path . join ( dir , filename ) legacy_filepaths = [ os . path . join ( dir , fn ) for fn in legacy_filenames ] for legacy_filepath in legacy_filepaths : try : if read_file : with open ( legacy_filepath , 'r' ) as file : stats = os . fstat ( file . fileno ( ) ) contents = file . read ( ) else : stats = os . stat ( legacy_filepath ) contents = None modtime = fromtimestamp ( stats . st_mtime ) os . link ( legacy_filepath , filepath ) results . append ( ( slugify_title_filename , mimetype , stats . st_size , modtime , 'good' , contents ) ) legacy_file_found = True break except EnvironmentError : pass if legacy_file_found : break else : filenames = [ filename ] + legacy_filenames log_formatted_filenames = '\n' . join ( [ ' - {}' . format ( x ) for x in filenames ] ) logger . error ( "Could not find a file for '{}' at version '{}' " "with any of the following file names:\n{}" . format ( id , version , log_formatted_filenames ) ) results . append ( ( slugify_title_filename , mimetype , 0 , None , 'missing' , None ) ) return results | Retrieve files associated with document . |
48,697 | def get_content_metadata ( id , version , cursor ) : args = dict ( id = id , version = version ) cursor . execute ( SQL [ 'get-module-metadata' ] , args ) try : result = cursor . fetchone ( ) [ 0 ] result [ 'version' ] = result . pop ( 'current_version' ) result [ 'mediaType' ] = portaltype_to_mimetype ( result [ 'mediaType' ] ) return result except ( TypeError , IndexError , ) : raise httpexceptions . HTTPNotFound ( ) | Return metadata related to the content from the database . |
48,698 | def find_contours_level ( density , x , y , level , closed = False ) : if level >= 1 or level <= 0 : raise ValueError ( "`level` must be in (0,1), got '{}'!" . format ( level ) ) level = level * density . max ( ) if len ( x . shape ) == 2 : assert np . all ( x [ : , 0 ] == x [ : , 1 ] ) x = x [ : , 0 ] if len ( y . shape ) == 2 : assert np . all ( y [ 0 , : ] == y [ 1 , : ] ) y = y [ 0 , : ] if closed : density = np . pad ( density , ( ( 1 , 1 ) , ( 1 , 1 ) ) , mode = "constant" ) offset = 1 else : offset = 0 conts_idx = find_contours ( density , level ) conts_xy = [ ] for cc in conts_idx : cx = np . interp ( x = cc [ : , 0 ] - offset , xp = range ( x . size ) , fp = x ) cy = np . interp ( x = cc [ : , 1 ] - offset , xp = range ( y . size ) , fp = y ) conts_xy . append ( np . stack ( ( cx , cy ) , axis = 1 ) ) return conts_xy | Find iso - valued density contours for a given level value |
48,699 | def get_quantile_levels ( density , x , y , xp , yp , q , normalize = True ) : if len ( x . shape ) == 2 : assert np . all ( x [ : , 0 ] == x [ : , 1 ] ) x = x [ : , 0 ] if len ( y . shape ) == 2 : assert np . all ( y [ 0 , : ] == y [ 1 , : ] ) y = y [ 0 , : ] bad = get_bad_vals ( xp , yp ) xp = xp [ ~ bad ] yp = yp [ ~ bad ] x_norm = x . max ( ) x = x / x_norm xp = xp / x_norm y_norm = y . max ( ) y = y / y_norm yp = yp / y_norm dp = spint . interpn ( ( x , y ) , density , ( xp , yp ) , method = 'linear' , bounds_error = False , fill_value = 0 ) if normalize : dp /= density . max ( ) if not np . isscalar ( q ) : q = np . array ( q ) plev = np . nanpercentile ( dp , q = q * 100 ) return plev | Compute density levels for given quantiles by interpolation |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.