idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
5,900 | def check_orthogonal ( angle ) : flow_dir_taudem = - 1 flow_dir = - 1 if MathClass . floatequal ( angle , FlowModelConst . e ) : flow_dir_taudem = FlowModelConst . e flow_dir = 1 elif MathClass . floatequal ( angle , FlowModelConst . ne ) : flow_dir_taudem = FlowModelConst . ne flow_dir = 128 elif MathClass . floatequal ( angle , FlowModelConst . n ) : flow_dir_taudem = FlowModelConst . n flow_dir = 64 elif MathClass . floatequal ( angle , FlowModelConst . nw ) : flow_dir_taudem = FlowModelConst . nw flow_dir = 32 elif MathClass . floatequal ( angle , FlowModelConst . w ) : flow_dir_taudem = FlowModelConst . w flow_dir = 16 elif MathClass . floatequal ( angle , FlowModelConst . sw ) : flow_dir_taudem = FlowModelConst . sw flow_dir = 8 elif MathClass . floatequal ( angle , FlowModelConst . s ) : flow_dir_taudem = FlowModelConst . s flow_dir = 4 elif MathClass . floatequal ( angle , FlowModelConst . se ) : flow_dir_taudem = FlowModelConst . se flow_dir = 2 return flow_dir_taudem , flow_dir | Check the given Dinf angle based on D8 flow direction encoding code by ArcGIS |
5,901 | def start ( self , * args , ** kwargs ) : self . _stop = False super ( Plant , self ) . start ( * args , ** kwargs ) | start the instrument thread |
5,902 | def quit ( self , * args , ** kwargs ) : self . stop ( ) self . _stop = True self . msleep ( 2 * int ( 1e3 / self . settings [ 'update frequency' ] ) ) super ( Plant , self ) . quit ( * args , ** kwargs ) | quit the instrument thread |
5,903 | def controler_output ( self , current_value ) : set_point = self . settings [ 'set_point' ] Kp = self . settings [ 'gains' ] [ 'proportional' ] Ki = self . settings [ 'gains' ] [ 'integral' ] output_range = self . settings [ 'output_range' ] time_step = self . settings [ 'time_step' ] error_new = set_point - current_value print ( ( 'PD- error:\t' , error_new , Ki , Kp , time_step ) ) self . u_P = Kp * error_new * time_step print ( ( 'PD- self.u_P:\t' , self . u_P , self . u_I ) ) self . u_I += Kp * Ki * ( error_new + self . error ) / 2.0 * time_step self . error = error_new print ( ( 'PD- self.u_P:\t' , self . u_P , self . u_I ) ) if self . u_P + self . u_I > output_range [ 'max' ] : self . u_I = output_range [ 'max' ] - self . u_P if self . u_P + self . u_I < output_range [ 'min' ] : self . u_I = output_range [ 'min' ] - self . u_P output = self . u_P + self . u_I print ( ( 'PD- output:\t' , output ) ) return output | Calculate PI output value for given reference input and feedback |
5,904 | def get_opts ( opts ) : defaults = { 'board' : None , 'terrain' : Opt . random , 'numbers' : Opt . preset , 'ports' : Opt . preset , 'pieces' : Opt . preset , 'players' : Opt . preset , } _opts = defaults . copy ( ) if opts is None : opts = dict ( ) try : for key , val in opts . copy ( ) . items ( ) : if key == 'board' : continue opts [ key ] = Opt ( val ) _opts . update ( opts ) except Exception : raise ValueError ( 'Invalid options={}' . format ( opts ) ) logging . debug ( 'used defaults=\n{}\n on opts=\n{}\nreturned total opts=\n{}' . format ( pprint . pformat ( defaults ) , pprint . pformat ( opts ) , pprint . pformat ( _opts ) ) ) return _opts | Validate options and apply defaults for options not supplied . |
5,905 | def _get_tiles ( board = None , terrain = None , numbers = None ) : if board is not None : tiles = _read_tiles_from_string ( board ) else : tiles = _generate_tiles ( terrain , numbers ) return tiles | Generate a list of tiles using the given terrain and numbers options . |
5,906 | def _get_ports ( port_opts ) : if port_opts in [ Opt . preset , Opt . debug ] : _preset_ports = [ ( 1 , 'NW' , catan . board . PortType . any3 ) , ( 2 , 'W' , catan . board . PortType . wood ) , ( 4 , 'W' , catan . board . PortType . brick ) , ( 5 , 'SW' , catan . board . PortType . any3 ) , ( 6 , 'SE' , catan . board . PortType . any3 ) , ( 8 , 'SE' , catan . board . PortType . sheep ) , ( 9 , 'E' , catan . board . PortType . any3 ) , ( 10 , 'NE' , catan . board . PortType . ore ) , ( 12 , 'NE' , catan . board . PortType . wheat ) ] return [ catan . board . Port ( tile , dir , port_type ) for tile , dir , port_type in _preset_ports ] elif port_opts in [ Opt . empty , Opt . random ] : logging . warning ( '{} option not yet implemented' . format ( port_opts ) ) return [ ] | Generate a list of ports using the given options . |
5,907 | def _get_pieces ( tiles , ports , players_opts , pieces_opts ) : if pieces_opts == Opt . empty : return dict ( ) elif pieces_opts == Opt . debug : players = catan . game . Game . get_debug_players ( ) return { ( hexgrid . NODE , 0x23 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 0 ] ) , ( hexgrid . EDGE , 0x22 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 0 ] ) , ( hexgrid . NODE , 0x67 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 1 ] ) , ( hexgrid . EDGE , 0x98 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 1 ] ) , ( hexgrid . NODE , 0x87 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 2 ] ) , ( hexgrid . EDGE , 0x89 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 2 ] ) , ( hexgrid . EDGE , 0xA9 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 3 ] ) , ( hexgrid . TILE , 0x77 ) : catan . pieces . Piece ( catan . pieces . PieceType . robber , None ) , } elif pieces_opts in ( Opt . preset , ) : deserts = filter ( lambda tile : tile . terrain == catan . board . Terrain . desert , tiles ) coord = hexgrid . tile_id_to_coord ( list ( deserts ) [ 0 ] . tile_id ) return { ( hexgrid . TILE , coord ) : catan . pieces . Piece ( catan . pieces . PieceType . robber , None ) } elif pieces_opts in ( Opt . random , ) : logging . warning ( '{} option not yet implemented' . format ( pieces_opts ) ) | Generate a dictionary of pieces using the given options . |
5,908 | def create_feature ( self , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . create_feature_with_http_info ( ** kwargs ) else : ( data ) = self . create_feature_with_http_info ( ** kwargs ) return data | Create an enumerated sequence feature |
5,909 | def list_features ( self , locus , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_with_http_info ( locus , ** kwargs ) else : ( data ) = self . list_features_with_http_info ( locus , ** kwargs ) return data | List the enumerated sequence features at a locus |
5,910 | def list_features_0 ( self , locus , term , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_0_with_http_info ( locus , term , ** kwargs ) else : ( data ) = self . list_features_0_with_http_info ( locus , term , ** kwargs ) return data | List the enumerated sequence features matching a term at a locus |
5,911 | def list_features_1 ( self , locus , term , rank , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_1_with_http_info ( locus , term , rank , ** kwargs ) else : ( data ) = self . list_features_1_with_http_info ( locus , term , rank , ** kwargs ) return data | List the enumerated sequence features matching a term and rank at a locus |
5,912 | def request ( self , method , uri , headers = None , bodyProducer = None ) : if self . _parent_trace is None : trace = Trace ( method ) else : trace = self . _parent_trace . child ( method ) if self . _endpoint is not None : trace . set_endpoint ( self . _endpoint ) if headers is None : headers = Headers ( { } ) headers . setRawHeaders ( 'X-B3-TraceId' , [ hex_str ( trace . trace_id ) ] ) headers . setRawHeaders ( 'X-B3-SpanId' , [ hex_str ( trace . span_id ) ] ) if trace . parent_span_id is not None : headers . setRawHeaders ( 'X-B3-ParentSpanId' , [ hex_str ( trace . parent_span_id ) ] ) trace . record ( Annotation . string ( 'http.uri' , uri ) ) trace . record ( Annotation . client_send ( ) ) def _finished ( resp ) : trace . record ( Annotation . string ( 'http.responsecode' , '{0} {1}' . format ( resp . code , resp . phrase ) ) ) trace . record ( Annotation . client_recv ( ) ) return resp d = self . _agent . request ( method , uri , headers , bodyProducer ) d . addBoth ( _finished ) return d | Send a client request following HTTP redirects . |
5,913 | def stringify ( data ) : def serialize ( k , v ) : if k == "candidates" : return int ( v ) if isinstance ( v , numbers . Number ) : if k == "zipcode" : return str ( v ) . zfill ( 5 ) return str ( v ) return v return [ { k : serialize ( k , v ) for k , v in json_dict . items ( ) } for json_dict in data ] | Ensure all values in the dictionary are strings except for the value for candidate which should just be an integer . |
5,914 | def post ( self , endpoint , data ) : headers = { "Content-Type" : "application/json" , "Accept" : "application/json" , "x-standardize-only" : "true" if self . standardize else "false" , "x-include-invalid" : "true" if self . invalid else "false" , "x-accept-keypair" : "true" if self . accept_keypair else "false" , } if not self . logging : headers [ "x-suppress-logging" ] = "true" params = { "auth-id" : self . auth_id , "auth-token" : self . auth_token } url = self . BASE_URL + endpoint response = self . session . post ( url , json . dumps ( stringify ( data ) ) , params = params , headers = headers , timeout = self . timeout , ) if response . status_code == 200 : return response . json ( ) raise ERROR_CODES . get ( response . status_code , SmartyStreetsError ) | Executes the HTTP POST request |
5,915 | def street_addresses ( self , addresses ) : if type ( addresses [ 0 ] ) != dict : addresses = [ { "street" : arg for arg in addresses } ] return AddressCollection ( self . post ( "street-address" , data = addresses ) ) | API method for verifying street address and geolocating |
5,916 | def street_address ( self , address ) : address = self . street_addresses ( [ address ] ) if not len ( address ) : return None return Address ( address [ 0 ] ) | Geocode one and only address get a single Address object back |
5,917 | def load ( schema , uri = None , spec = None , provider = None ) : factory = Factory ( provider , spec ) return factory ( schema , uri or '#' ) | Scaffold a validator against a schema . |
5,918 | def sizeHint ( self ) : w , h = self . get_width_height ( ) return QtCore . QSize ( w , h ) | gives qt a starting point for widget size during window resizing |
5,919 | def stage ( obj , parent = None , member = None ) : obj = Staged ( obj , parent , member ) if isinstance ( obj , Mapping ) : for key , value in obj . items ( ) : stage ( value , obj , key ) elif isinstance ( obj , Sequence ) and not isinstance ( obj , string_types ) : for index , value in enumerate ( obj ) : stage ( value , obj , index ) elif isinstance ( obj , Set ) : for value in obj : stage ( value , obj , None ) return obj | Prepare obj to be staged . |
5,920 | def rotate ( key_prefix , key_ext , bucket_name , daily_backups = 7 , weekly_backups = 4 , aws_key = None , aws_secret = None ) : session = boto3 . Session ( aws_access_key_id = aws_key , aws_secret_access_key = aws_secret ) s3 = session . resource ( 's3' ) bucket = s3 . Bucket ( bucket_name ) keys = bucket . objects . filter ( Prefix = key_prefix ) regex = '{0}-(?P<year>[\d]+?)-(?P<month>[\d]+?)-(?P<day>[\d]+?){1}' . format ( key_prefix , key_ext ) backups = [ ] for key in keys : match = re . match ( regex , str ( key . key ) ) if not match : continue year = int ( match . group ( 'year' ) ) month = int ( match . group ( 'month' ) ) day = int ( match . group ( 'day' ) ) key_date = datetime ( year , month , day ) backups [ : 0 ] = [ key_date ] backups = sorted ( backups , reverse = True ) if len ( backups ) > daily_backups + 1 and backups [ daily_backups ] - backups [ daily_backups + 1 ] < timedelta ( days = 7 ) : key = bucket . Object ( "{0}{1}{2}" . format ( key_prefix , backups [ daily_backups ] . strftime ( "-%Y-%m-%d" ) , key_ext ) ) logger . debug ( "deleting {0}" . format ( key ) ) key . delete ( ) del backups [ daily_backups ] month_offset = daily_backups + weekly_backups if len ( backups ) > month_offset + 1 and backups [ month_offset ] - backups [ month_offset + 1 ] < timedelta ( days = 30 ) : key = bucket . Object ( "{0}{1}{2}" . format ( key_prefix , backups [ month_offset ] . strftime ( "-%Y-%m-%d" ) , key_ext ) ) logger . debug ( "deleting {0}" . format ( key ) ) key . delete ( ) del backups [ month_offset ] | Delete old files we ve uploaded to S3 according to grandfather father sun strategy |
5,921 | def splitext ( filename ) : index = filename . find ( '.' ) if index == 0 : index = 1 + filename [ 1 : ] . find ( '.' ) if index == - 1 : return filename , '' return filename [ : index ] , filename [ index : ] return os . path . splitext ( filename ) | Return the filename and extension according to the first dot in the filename . This helps date stamping . tar . bz2 or . ext . gz files properly . |
5,922 | def start_fitting ( self ) : self . queue = queue . Queue ( ) self . peak_vals = [ ] self . fit_thread = QThread ( ) self . fitobj = self . do_fit ( str ( self . data_filepath . text ( ) ) , self . matplotlibwidget , self . queue , self . peak_vals , self . peak_locs ) self . fitobj . moveToThread ( self . fit_thread ) self . fit_thread . started . connect ( self . fitobj . run ) self . fitobj . finished . connect ( self . fit_thread . quit ) self . fitobj . status . connect ( self . update_status ) self . fit_thread . start ( ) | Launches the fitting routine on another thread |
5,923 | def _get_bandgap_from_bands ( energies , nelec ) : nelec = int ( nelec ) valence = [ x [ nelec - 1 ] for x in energies ] conduction = [ x [ nelec ] for x in energies ] return max ( min ( conduction ) - max ( valence ) , 0.0 ) | Compute difference in conduction band min and valence band max |
5,924 | def _get_bandgap_eigenval ( eigenval_fname , outcar_fname ) : with open ( outcar_fname , "r" ) as f : parser = OutcarParser ( ) nelec = next ( iter ( filter ( lambda x : "number of electrons" in x , parser . parse ( f . readlines ( ) ) ) ) ) [ "number of electrons" ] with open ( eigenval_fname , "r" ) as f : eigenval_info = list ( EigenvalParser ( ) . parse ( f . readlines ( ) ) ) all_energies = [ zip ( * x [ "energies" ] ) for x in eigenval_info if "energies" in x ] spin_energies = zip ( * all_energies ) gaps = [ VaspParser . _get_bandgap_from_bands ( x , nelec / 2.0 ) for x in spin_energies ] return min ( gaps ) | Get the bandgap from the EIGENVAL file |
5,925 | def _get_bandgap_doscar ( filename ) : with open ( filename ) as fp : for i in range ( 6 ) : l = fp . readline ( ) efermi = float ( l . split ( ) [ 3 ] ) step1 = fp . readline ( ) . split ( ) [ 0 ] step2 = fp . readline ( ) . split ( ) [ 0 ] step_size = float ( step2 ) - float ( step1 ) not_found = True while not_found : l = fp . readline ( ) . split ( ) e = float ( l . pop ( 0 ) ) dens = 0.0 for i in range ( int ( len ( l ) / 2 ) ) : dens += float ( l [ i ] ) if e < efermi and dens > 1e-3 : bot = e elif e > efermi and dens > 1e-3 : top = e not_found = False if top - bot < step_size * 2 : bandgap = 0.0 else : bandgap = float ( top - bot ) return bandgap | Get the bandgap from the DOSCAR file |
5,926 | def get_band_gap ( self ) : if self . outcar is not None and self . eignval is not None : bandgap = VaspParser . _get_bandgap_eigenval ( self . eignval , self . outcar ) elif self . doscar is not None : bandgap = VaspParser . _get_bandgap_doscar ( self . doscar ) else : return None return Property ( scalars = [ Scalar ( value = round ( bandgap , 3 ) ) ] , units = 'eV' ) | Get the bandgap either from the EIGENVAL or DOSCAR files |
5,927 | def get_value_by_xy ( self , x , y ) : if x < self . xMin or x > self . xMax or y < self . yMin or y > self . yMax : return None else : row = self . nRows - int ( numpy . ceil ( ( y - self . yMin ) / self . dx ) ) col = int ( numpy . floor ( ( x - self . xMin ) / self . dx ) ) value = self . data [ row ] [ col ] if value == self . noDataValue : return None else : return value | Get raster value by xy coordinates . |
5,928 | def get_central_coors ( self , row , col ) : if row < 0 or row >= self . nRows or col < 0 or col >= self . nCols : raise ValueError ( "The row (%d) or col (%d) must be >=0 and less than " "nRows (%d) or nCols (%d)!" % ( row , col , self . nRows , self . nCols ) ) else : tmpx = self . xMin + ( col + 0.5 ) * self . dx tmpy = self . yMax - ( row + 0.5 ) * self . dx return tmpx , tmpy | Get the coordinates of central grid . |
5,929 | def read_raster ( raster_file ) : ds = gdal_Open ( raster_file ) band = ds . GetRasterBand ( 1 ) data = band . ReadAsArray ( ) xsize = band . XSize ysize = band . YSize nodata_value = band . GetNoDataValue ( ) geotrans = ds . GetGeoTransform ( ) dttype = band . DataType srs = osr_SpatialReference ( ) srs . ImportFromWkt ( ds . GetProjection ( ) ) if nodata_value is None : nodata_value = DEFAULT_NODATA band = None ds = None return Raster ( ysize , xsize , data , nodata_value , geotrans , srs , dttype ) | Read raster by GDAL . |
5,930 | def get_mask_from_raster ( rasterfile , outmaskfile , keep_nodata = False ) : raster_r = RasterUtilClass . read_raster ( rasterfile ) xsize = raster_r . nCols ysize = raster_r . nRows nodata_value = raster_r . noDataValue srs = raster_r . srs x_min = raster_r . xMin y_max = raster_r . yMax dx = raster_r . dx data = raster_r . data if not keep_nodata : i_min = ysize - 1 i_max = 0 j_min = xsize - 1 j_max = 0 for i in range ( ysize ) : for j in range ( xsize ) : if abs ( data [ i ] [ j ] - nodata_value ) > DELTA : i_min = min ( i , i_min ) i_max = max ( i , i_max ) j_min = min ( j , j_min ) j_max = max ( j , j_max ) y_size_mask = i_max - i_min + 1 x_size_mask = j_max - j_min + 1 x_min_mask = x_min + j_min * dx y_max_mask = y_max - i_min * dx else : y_size_mask = ysize x_size_mask = xsize x_min_mask = x_min y_max_mask = y_max i_min = 0 j_min = 0 print ( '%dx%d -> %dx%d' % ( xsize , ysize , x_size_mask , y_size_mask ) ) mask = numpy . zeros ( ( y_size_mask , x_size_mask ) ) for i in range ( y_size_mask ) : for j in range ( x_size_mask ) : if abs ( data [ i + i_min ] [ j + j_min ] - nodata_value ) > DELTA : mask [ i ] [ j ] = 1 else : mask [ i ] [ j ] = DEFAULT_NODATA mask_geotrans = [ x_min_mask , dx , 0 , y_max_mask , 0 , - dx ] RasterUtilClass . write_gtiff_file ( outmaskfile , y_size_mask , x_size_mask , mask , mask_geotrans , srs , DEFAULT_NODATA , GDT_Int32 ) return Raster ( y_size_mask , x_size_mask , mask , DEFAULT_NODATA , mask_geotrans , srs ) | Generate mask data from a given raster data . |
5,931 | def raster_reclassify ( srcfile , v_dict , dstfile , gdaltype = GDT_Float32 ) : src_r = RasterUtilClass . read_raster ( srcfile ) src_data = src_r . data dst_data = numpy . copy ( src_data ) if gdaltype == GDT_Float32 and src_r . dataType != GDT_Float32 : gdaltype = src_r . dataType no_data = src_r . noDataValue new_no_data = DEFAULT_NODATA if gdaltype in [ GDT_Unknown , GDT_Byte , GDT_UInt16 , GDT_UInt32 ] : new_no_data = 0 if not MathClass . floatequal ( new_no_data , src_r . noDataValue ) : if src_r . noDataValue not in v_dict : v_dict [ src_r . noDataValue ] = new_no_data no_data = new_no_data for ( k , v ) in iteritems ( v_dict ) : dst_data [ src_data == k ] = v RasterUtilClass . write_gtiff_file ( dstfile , src_r . nRows , src_r . nCols , dst_data , src_r . geotrans , src_r . srs , no_data , gdaltype ) | Reclassify raster by given classifier dict . |
5,932 | def write_gtiff_file ( f_name , n_rows , n_cols , data , geotransform , srs , nodata_value , gdal_type = GDT_Float32 ) : UtilClass . mkdir ( os . path . dirname ( FileClass . get_file_fullpath ( f_name ) ) ) driver = gdal_GetDriverByName ( str ( 'GTiff' ) ) try : ds = driver . Create ( f_name , n_cols , n_rows , 1 , gdal_type ) except Exception : print ( 'Cannot create output file %s' % f_name ) return ds . SetGeoTransform ( geotransform ) try : ds . SetProjection ( srs . ExportToWkt ( ) ) except AttributeError or Exception : ds . SetProjection ( srs ) ds . GetRasterBand ( 1 ) . SetNoDataValue ( nodata_value ) if isinstance ( data , numpy . ndarray ) and data . dtype in [ numpy . dtype ( 'int' ) , numpy . dtype ( 'float' ) ] : data = numpy . where ( numpy . isnan ( data ) , nodata_value , data ) ds . GetRasterBand ( 1 ) . WriteArray ( data ) ds = None | Output Raster to GeoTiff format file . |
5,933 | def write_asc_file ( filename , data , xsize , ysize , geotransform , nodata_value ) : UtilClass . mkdir ( os . path . dirname ( FileClass . get_file_fullpath ( filename ) ) ) header = 'NCOLS %d\n' 'NROWS %d\n' 'XLLCENTER %f\n' 'YLLCENTER %f\n' 'CELLSIZE %f\n' 'NODATA_VALUE %f' % ( xsize , ysize , geotransform [ 0 ] + 0.5 * geotransform [ 1 ] , geotransform [ 3 ] - ( ysize - 0.5 ) * geotransform [ 1 ] , geotransform [ 1 ] , nodata_value ) with open ( filename , 'w' , encoding = 'utf-8' ) as f : f . write ( header ) for i in range ( 0 , ysize ) : for j in range ( 0 , xsize ) : f . write ( '%s\t' % repr ( data [ i ] [ j ] ) ) f . write ( '\n' ) f . close ( ) | Output Raster to ASCII file . |
5,934 | def raster_to_gtiff ( tif , geotif , change_nodata = False , change_gdal_type = False ) : rst_file = RasterUtilClass . read_raster ( tif ) nodata = rst_file . noDataValue if change_nodata : if not MathClass . floatequal ( rst_file . noDataValue , DEFAULT_NODATA ) : nodata = DEFAULT_NODATA rst_file . data [ rst_file . data == rst_file . noDataValue ] = DEFAULT_NODATA gdal_type = rst_file . dataType if change_gdal_type : gdal_type = GDT_Float32 RasterUtilClass . write_gtiff_file ( geotif , rst_file . nRows , rst_file . nCols , rst_file . data , rst_file . geotrans , rst_file . srs , nodata , gdal_type ) | Converting Raster format to GeoTIFF . |
5,935 | def raster_to_asc ( raster_f , asc_f ) : raster_r = RasterUtilClass . read_raster ( raster_f ) RasterUtilClass . write_asc_file ( asc_f , raster_r . data , raster_r . nCols , raster_r . nRows , raster_r . geotrans , raster_r . noDataValue ) | Converting Raster format to ASCII raster . |
5,936 | def raster_statistics ( raster_file ) : ds = gdal_Open ( raster_file ) band = ds . GetRasterBand ( 1 ) minv , maxv , meanv , std = band . ComputeStatistics ( False ) return minv , maxv , meanv , std | Get basic statistics of raster data . |
5,937 | def split_raster ( rs , split_shp , field_name , temp_dir ) : UtilClass . rmmkdir ( temp_dir ) ds = ogr_Open ( split_shp ) lyr = ds . GetLayer ( 0 ) lyr . ResetReading ( ) ft = lyr . GetNextFeature ( ) while ft : cur_field_name = ft . GetFieldAsString ( field_name ) for r in rs : cur_file_name = r . split ( os . sep ) [ - 1 ] outraster = temp_dir + os . sep + cur_file_name . replace ( '.tif' , '_%s.tif' % cur_field_name . replace ( ' ' , '_' ) ) subprocess . call ( [ 'gdalwarp' , r , outraster , '-cutline' , split_shp , '-crop_to_cutline' , '-cwhere' , "'%s'='%s'" % ( field_name , cur_field_name ) , '-dstnodata' , '-9999' ] ) ft = lyr . GetNextFeature ( ) ds = None | Split raster by given shapefile and field name . |
5,938 | def get_negative_dem ( raw_dem , neg_dem ) : origin = RasterUtilClass . read_raster ( raw_dem ) max_v = numpy . max ( origin . data ) temp = origin . data < 0 neg = numpy . where ( temp , origin . noDataValue , max_v - origin . data ) RasterUtilClass . write_gtiff_file ( neg_dem , origin . nRows , origin . nCols , neg , origin . geotrans , origin . srs , origin . noDataValue , origin . dataType ) | Get negative DEM data . |
5,939 | def raster_binarization ( given_value , rasterfilename ) : origin_raster = RasterUtilClass . read_raster ( rasterfilename ) binary_raster = numpy . where ( origin_raster . data == given_value , 1 , 0 ) return binary_raster | Make the raster into binarization . |
5,940 | def raster_erosion ( rasterfile ) : if is_string ( rasterfile ) : origin_raster = RasterUtilClass . read_raster ( str ( rasterfile ) ) elif isinstance ( rasterfile , Raster ) : origin_raster = rasterfile . data elif isinstance ( rasterfile , numpy . ndarray ) : origin_raster = rasterfile else : return "Your rasterfile has a wrong type. Type must be string or " "numpy.array or class Raster in pygeoc." max_value_raster = origin_raster . max ( ) erosion_raster = numpy . zeros ( ( origin_raster . shape [ 0 ] , origin_raster . shape [ 1 ] ) ) add_row = numpy . full ( ( 1 , origin_raster . shape [ 1 ] ) , max_value_raster ) temp_origin_raster = numpy . vstack ( ( numpy . vstack ( ( add_row , origin_raster ) ) , add_row ) ) add_col = numpy . full ( ( origin_raster . shape [ 0 ] + 2 , 1 ) , max_value_raster ) expand_origin_raster = numpy . hstack ( ( numpy . hstack ( ( add_col , temp_origin_raster ) ) , add_col ) ) for i in range ( origin_raster . shape [ 0 ] ) : for j in range ( origin_raster . shape [ 1 ] ) : min_pixel_value = max_value_raster for k in range ( 3 ) : for l in range ( 3 ) : if expand_origin_raster [ i + k , j + l ] <= min_pixel_value : min_pixel_value = expand_origin_raster [ i + k , j + l ] erosion_raster [ i , j ] = min_pixel_value return erosion_raster | Erode the raster image . |
5,941 | def raster_dilation ( rasterfile ) : if is_string ( rasterfile ) : origin_raster = RasterUtilClass . read_raster ( str ( rasterfile ) ) elif isinstance ( rasterfile , Raster ) : origin_raster = rasterfile . data elif isinstance ( rasterfile , numpy . ndarray ) : origin_raster = rasterfile else : return 'Your rasterfile has a wrong type. Type must be string or ' 'numpy.array or class Raster in pygeoc.' min_value_raster = origin_raster . min ( ) dilation_raster = numpy . zeros ( ( origin_raster . shape [ 0 ] , origin_raster . shape [ 1 ] ) ) add_row = numpy . full ( ( 1 , origin_raster . shape [ 1 ] ) , min_value_raster ) temp_origin_raster = numpy . vstack ( ( numpy . vstack ( ( add_row , origin_raster ) ) , add_row ) ) add_col = numpy . full ( ( origin_raster . shape [ 0 ] + 2 , 1 ) , min_value_raster ) expand_origin_raster = numpy . hstack ( ( numpy . hstack ( ( add_col , temp_origin_raster ) ) , add_col ) ) for i in range ( origin_raster . shape [ 0 ] ) : for j in range ( origin_raster . shape [ 1 ] ) : max_pixel_value = min_value_raster for k in range ( 3 ) : for l in range ( 3 ) : if expand_origin_raster [ i + k , j + l ] >= max_pixel_value : max_pixel_value = expand_origin_raster [ i + k , j + l ] dilation_raster [ i , j ] = max_pixel_value return dilation_raster | Dilate the raster image . |
5,942 | def openning ( input_rasterfilename , times ) : input_raster = RasterUtilClass . read_raster ( input_rasterfilename ) openning_raster = input_raster for i in range ( times ) : openning_raster = RasterUtilClass . raster_erosion ( openning_raster ) for i in range ( times ) : openning_raster = RasterUtilClass . raster_dilation ( openning_raster ) return openning_raster | Do openning . |
5,943 | def closing ( input_rasterfilename , times ) : input_raster = RasterUtilClass . read_raster ( input_rasterfilename ) closing_raster = input_raster for i in range ( times ) : closing_raster = RasterUtilClass . raster_dilation ( closing_raster ) for i in range ( times ) : closing_raster = RasterUtilClass . raster_erosion ( closing_raster ) return closing_raster | Do closing . |
5,944 | def calculate_tx_fee ( tx_size : int ) -> Decimal : per_kb_cost = 0.01 min_fee = Decimal ( 0.001 ) fee = Decimal ( ( tx_size / 1000 ) * per_kb_cost ) if fee <= min_fee : return min_fee else : return fee | return tx fee from tx size in bytes |
5,945 | def p2sh_p2pkh_script ( network : str , address : str ) -> P2shScript : network_params = net_query ( network ) addr = Address . from_string ( network = network_params , string = address ) p2pkh = P2pkhScript ( addr ) return P2shScript ( p2pkh ) | p2sh embedding p2pkh |
5,946 | def tx_output ( network : str , value : Decimal , n : int , script : ScriptSig ) -> TxOut : network_params = net_query ( network ) return TxOut ( network = network_params , value = int ( value * network_params . to_unit ) , n = n , script_pubkey = script ) | create TxOut object |
5,947 | def make_raw_transaction ( network : str , inputs : list , outputs : list , locktime : Locktime , timestamp : int = int ( time ( ) ) , version : int = 1 , ) -> MutableTransaction : network_params = net_query ( network ) if network_params . name . startswith ( "peercoin" ) : return MutableTransaction ( version = version , ins = inputs , outs = outputs , locktime = locktime , network = network_params , timestamp = timestamp , ) return MutableTransaction ( version = version , ins = inputs , outs = outputs , locktime = locktime , network = network_params , ) | create raw transaction |
5,948 | def find_parent_outputs ( provider : Provider , utxo : TxIn ) -> TxOut : network_params = net_query ( provider . network ) index = utxo . txout return TxOut . from_json ( provider . getrawtransaction ( utxo . txid , 1 ) [ 'vout' ] [ index ] , network = network_params ) | due to design of the btcpy library TxIn object must be converted to TxOut object before signing |
5,949 | def sign_transaction ( provider : Provider , unsigned : MutableTransaction , key : Kutil ) -> Transaction : parent_outputs = [ find_parent_outputs ( provider , i ) for i in unsigned . ins ] return key . sign_transaction ( parent_outputs , unsigned ) | sign transaction with Kutil |
5,950 | def set_style ( style = 'basic' , ** kwargs ) : style = _read_style ( style ) if style [ 0 ] != 'basic' : style = [ 'basic' ] + style for s in style : _set_style ( s , ** kwargs ) | Changes Matplotlib basic style to produce high quality graphs . Call this function at the beginning of your script . You can even further improve graphs with a call to fix_style at the end of your script . |
5,951 | def fix_style ( style = 'basic' , ax = None , ** kwargs ) : style = _read_style ( style ) for s in style : if not s in style_params . keys ( ) : avail = [ f . replace ( '.mplstyle' , '' ) for f in os . listdir ( _get_lib ( ) ) if f . endswith ( '.mplstyle' ) ] raise ValueError ( '{0} is not a valid style. ' . format ( s ) + 'Please pick a style from the list available in ' + '{0}: {1}' . format ( _get_lib ( ) , avail ) ) _fix_style ( style , ax , ** kwargs ) | Add an extra formatting layer to an axe that couldn t be changed directly in matplotlib . rcParams or with styles . Apply this function to every axe you created . |
5,952 | def _get_label ( self ) : if self . _label is None : foundfiles = False for f in self . _files : if ".files" in f : foundfiles = True self . _label = f . split ( "." ) [ 0 ] with open ( self . _label + '.files' , 'r' ) as fp : line = fp . readline ( ) . split ( ) [ 0 ] if line != self . _label + ".in" : fp . close ( ) raise Exception ( 'first line must be label.in' ) line = fp . readline ( ) . split ( ) [ 0 ] if line != self . _label + ".txt" : fp . close ( ) raise Exception ( 'second line must be label.txt' ) line = fp . readline ( ) . split ( ) [ 0 ] if line != self . _label + "i" : fp . close ( ) raise Exception ( 'third line must be labeli' ) line = fp . readline ( ) . split ( ) [ 0 ] if line != self . _label + "o" : fp . close ( ) raise Exception ( 'fourth line must be labelo' ) fp . close ( ) if foundfiles : return self . _label else : raise Exception ( 'label.files not found' ) else : return self . _label | Find the label for the output files for this calculation |
5,953 | def next_player ( self ) : logging . warning ( 'turn={}, players={}' . format ( self . game . _cur_turn , self . game . players ) ) return self . game . players [ ( self . game . _cur_turn + 1 ) % len ( self . game . players ) ] | Returns the player whose turn it will be next . |
5,954 | def _estimate_progress ( self ) : estimate = True current_subscript = self . _current_subscript_stage [ 'current_subscript' ] num_subscripts = len ( self . scripts ) if self . iterator_type == 'loop' : num_iterations = self . settings [ 'num_loops' ] elif self . iterator_type == 'sweep' : sweep_range = self . settings [ 'sweep_range' ] if self . settings [ 'stepping_mode' ] == 'value_step' : num_iterations = int ( ( sweep_range [ 'max_value' ] - sweep_range [ 'min_value' ] ) / sweep_range [ 'N/value_step' ] ) + 1 elif self . settings [ 'stepping_mode' ] == 'N' : num_iterations = sweep_range [ 'N/value_step' ] else : raise KeyError ( 'unknown key' + self . settings [ 'stepping_mode' ] ) else : print ( 'unknown iterator type in Iterator receive signal - can\'t estimate ramining time' ) estimate = False if estimate : loop_index = self . loop_index if num_subscripts > 1 : loop_execution_time = 0. sub_progress_time = 0. if current_subscript is not None : current_subscript_exec_duration = self . _current_subscript_stage [ 'subscript_exec_duration' ] [ current_subscript . name ] . total_seconds ( ) else : current_subscript_exec_duration = 0.0 current_subscript_elapsed_time = ( datetime . datetime . now ( ) - current_subscript . start_time ) . total_seconds ( ) if current_subscript_exec_duration == 0.0 : remaining_time = current_subscript . remaining_time . total_seconds ( ) current_subscript_exec_duration = remaining_time + current_subscript_elapsed_time remaining_scripts = 0 for subscript_name , duration in self . _current_subscript_stage [ 'subscript_exec_duration' ] . items ( ) : if duration . total_seconds ( ) == 0.0 : remaining_scripts += 1 loop_execution_time += duration . total_seconds ( ) if self . _current_subscript_stage [ 'subscript_exec_count' ] [ subscript_name ] == loop_index and subscript_name is not current_subscript . name : sub_progress_time += duration . total_seconds ( ) sub_progress_time += current_subscript_elapsed_time if remaining_scripts == num_subscripts : loop_execution_time = num_subscripts * current_subscript_exec_duration elif remaining_scripts > 1 : loop_execution_time = 1. * num_subscripts / ( num_subscripts - remaining_scripts ) elif remaining_scripts == 1 : loop_execution_time += current_subscript_exec_duration if loop_execution_time > 0 : progress_subscript = 100. * sub_progress_time / loop_execution_time else : progress_subscript = 1. * progress_subscript / num_subscripts progress = 100. * ( loop_index - 1. + 0.01 * progress_subscript ) / num_iterations else : progress = 50 return progress | estimates the current progress that is then used in _receive_signal |
5,955 | def plot ( self , figure_list ) : if self . _current_subscript_stage is not None : if self . _current_subscript_stage [ 'current_subscript' ] is not None : self . _current_subscript_stage [ 'current_subscript' ] . plot ( figure_list ) if ( self . is_running is False ) and not ( self . data == { } or self . data is None ) : script_names = list ( self . settings [ 'script_order' ] . keys ( ) ) script_indices = [ self . settings [ 'script_order' ] [ name ] for name in script_names ] _ , sorted_script_names = list ( zip ( * sorted ( zip ( script_indices , script_names ) ) ) ) last_script = self . scripts [ sorted_script_names [ - 1 ] ] last_script . force_update ( ) axes_list = last_script . get_axes_layout ( figure_list ) try : last_script . _plot ( axes_list , self . data ) except TypeError as err : print ( ( warnings . warn ( 'can\'t plot average script data because script.plot function doens\'t take data as optional argument. Plotting last data set instead' ) ) ) print ( ( err . message ) ) last_script . plot ( figure_list ) | When each subscript is called uses its standard plotting |
5,956 | def get_default_settings ( sub_scripts , script_order , script_execution_freq , iterator_type ) : def populate_sweep_param ( scripts , parameter_list , trace = '' ) : def get_parameter_from_dict ( trace , dic , parameter_list , valid_values = None ) : if valid_values is None and isinstance ( dic , Parameter ) : valid_values = dic . valid_values for key , value in dic . items ( ) : if isinstance ( value , dict ) : parameter_list = get_parameter_from_dict ( trace + '.' + key , value , parameter_list , dic . valid_values [ key ] ) elif ( valid_values [ key ] in ( float , int ) ) or ( isinstance ( valid_values [ key ] , list ) and valid_values [ key ] [ 0 ] in ( float , int ) ) : parameter_list . append ( trace + '.' + key ) else : print ( ( 'ignoring sweep parameter' , key ) ) return parameter_list for script_name in list ( scripts . keys ( ) ) : from pylabcontrol . core import ScriptIterator script_trace = trace if script_trace == '' : script_trace = script_name else : script_trace = script_trace + '->' + script_name if issubclass ( scripts [ script_name ] , ScriptIterator ) : populate_sweep_param ( vars ( scripts [ script_name ] ) [ '_SCRIPTS' ] , parameter_list = parameter_list , trace = script_trace ) else : for setting in [ elem [ 1 ] for elem in inspect . getmembers ( scripts [ script_name ] ) if elem [ 0 ] == '_DEFAULT_SETTINGS' ] [ 0 ] : parameter_list = get_parameter_from_dict ( script_trace , setting , parameter_list ) return parameter_list if iterator_type == 'loop' : script_default_settings = [ Parameter ( 'script_order' , script_order ) , Parameter ( 'script_execution_freq' , script_execution_freq ) , Parameter ( 'num_loops' , 0 , int , 'times the subscripts will be executed' ) , Parameter ( 'run_all_first' , True , bool , 'Run all scripts with nonzero frequency in first pass' ) ] elif iterator_type == 'sweep' : sweep_params = populate_sweep_param ( sub_scripts , [ ] ) script_default_settings = [ Parameter ( 'script_order' , script_order ) , Parameter ( 'script_execution_freq' , script_execution_freq ) , Parameter ( 'sweep_param' , sweep_params [ 0 ] , sweep_params , 'variable over which to sweep' ) , Parameter ( 'sweep_range' , [ Parameter ( 'min_value' , 0 , float , 'min parameter value' ) , Parameter ( 'max_value' , 0 , float , 'max parameter value' ) , Parameter ( 'N/value_step' , 0 , float , 'either number of steps or parameter value step, depending on mode' ) ] ) , Parameter ( 'stepping_mode' , 'N' , [ 'N' , 'value_step' ] , 'Switch between number of steps and step amount' ) , Parameter ( 'run_all_first' , True , bool , 'Run all scripts with nonzero frequency in first pass' ) ] else : print ( ( 'unknown iterator type ' + iterator_type ) ) raise TypeError ( 'unknown iterator type ' + iterator_type ) return script_default_settings | assigning the actual script settings depending on the iterator type |
5,957 | def raster2shp ( rasterfile , vectorshp , layername = None , fieldname = None , band_num = 1 , mask = 'default' ) : FileClass . remove_files ( vectorshp ) FileClass . check_file_exists ( rasterfile ) gdal . UseExceptions ( ) src_ds = gdal . Open ( rasterfile ) if src_ds is None : print ( 'Unable to open %s' % rasterfile ) sys . exit ( 1 ) try : srcband = src_ds . GetRasterBand ( band_num ) except RuntimeError as e : print ( 'Band ( %i ) not found, %s' % ( band_num , e ) ) sys . exit ( 1 ) if mask == 'default' : maskband = srcband . GetMaskBand ( ) elif mask is None or mask . upper ( ) == 'NONE' : maskband = None else : mask_ds = gdal . Open ( mask ) maskband = mask_ds . GetRasterBand ( 1 ) if layername is None : layername = FileClass . get_core_name_without_suffix ( rasterfile ) drv = ogr_GetDriverByName ( str ( 'ESRI Shapefile' ) ) dst_ds = drv . CreateDataSource ( vectorshp ) srs = None if src_ds . GetProjection ( ) != '' : srs = osr_SpatialReference ( ) srs . ImportFromWkt ( src_ds . GetProjection ( ) ) dst_layer = dst_ds . CreateLayer ( str ( layername ) , srs = srs ) if fieldname is None : fieldname = layername . upper ( ) fd = ogr_FieldDefn ( str ( fieldname ) , OFTInteger ) dst_layer . CreateField ( fd ) dst_field = 0 result = gdal . Polygonize ( srcband , maskband , dst_layer , dst_field , [ '8CONNECTED=8' ] , callback = None ) return result | Convert raster to ESRI shapefile |
5,958 | def convert2geojson ( jsonfile , src_srs , dst_srs , src_file ) : if os . path . exists ( jsonfile ) : os . remove ( jsonfile ) if sysstr == 'Windows' : exepath = '"%s/Lib/site-packages/osgeo/ogr2ogr"' % sys . exec_prefix else : exepath = FileClass . get_executable_fullpath ( 'ogr2ogr' ) s = '%s -f GeoJSON -s_srs "%s" -t_srs %s %s %s' % ( exepath , src_srs , dst_srs , jsonfile , src_file ) UtilClass . run_command ( s ) | convert shapefile to geojson file |
5,959 | def consensus ( aln , weights = None , gap_threshold = 0.5 , simple = False , trim_ends = True ) : if simple : col_consensus = make_simple_col_consensus ( alnutils . aa_frequencies ( aln ) ) def is_majority_gap ( col ) : return ( float ( col . count ( '-' ) ) / len ( col ) >= gap_threshold ) else : if weights is None : seq_weights = alnutils . sequence_weights ( aln , 'avg1' ) else : seq_weights = weights aa_frequencies = alnutils . aa_frequencies ( aln , weights = seq_weights ) col_consensus = make_entropy_col_consensus ( aa_frequencies ) def is_majority_gap ( col ) : gap_count = 0.0 for wt , char in zip ( seq_weights , col ) : if char == '-' : gap_count += wt return ( gap_count / sum ( seq_weights ) >= gap_threshold ) def col_wise_consensus ( columns ) : if not trim_ends : in_left_end = True maybe_right_tail = [ ] for col in columns : if all ( c . islower ( ) for c in col if c not in '.-' ) : yield '-' continue if any ( c . islower ( ) for c in col ) : logging . warn ( 'Mixed lowercase and uppercase letters in a ' 'column: ' + '' . join ( col ) ) col = map ( str . upper , col ) is_gap = is_majority_gap ( col ) if not trim_ends : if in_left_end : if not is_gap : in_left_end = False is_gap = False if is_gap and trim_ends : yield '-' continue cons_char = col_consensus ( col ) if trim_ends : yield cons_char else : if is_gap : maybe_right_tail . append ( cons_char ) else : for char in maybe_right_tail : yield '-' maybe_right_tail = [ ] yield cons_char if not trim_ends : for char in maybe_right_tail : yield char return '' . join ( col_wise_consensus ( zip ( * aln ) ) ) | Get the consensus of an alignment as a string . |
5,960 | def make_simple_col_consensus ( bg_freqs ) : def col_consensus ( col , prev_col = [ ] , prev_char = [ ] ) : aa_counts = sequtils . aa_frequencies ( col ) assert aa_counts , "Column is all gaps! That's not allowed." best_char , best_score = max ( aa_counts . iteritems ( ) , key = lambda kv : kv [ 1 ] ) ties = [ aa for aa in aa_counts if aa_counts [ aa ] == best_score ] if len ( ties ) > 1 : if prev_char and prev_col : mc_next = Counter ( [ b for a , b in zip ( prev_col , col ) if a == prev_char [ 0 ] and b in ties ] ) . most_common ( ) ties_next = [ x [ 0 ] for x in mc_next if x [ 1 ] == mc_next [ 0 ] [ 1 ] ] if ties_next : ties = ties_next if len ( ties ) > 1 : ties . sort ( key = lambda aa : bg_freqs [ aa ] ) best_char = ties [ 0 ] else : assert best_char == ties [ 0 ] , 'WTF %s != %s[0]' % ( best_char , ties ) prev_col [ : ] = col prev_char [ : ] = best_char return best_char return col_consensus | Consensus by simple plurality unweighted . |
5,961 | def supported ( aln ) : def col_consensus ( columns ) : for col in columns : if ( ( col . count ( '-' ) >= len ( col ) / 2 ) or all ( c . islower ( ) for c in col if c not in '.-' ) ) : yield '-' continue if any ( c . islower ( ) for c in col ) : logging . warn ( 'Mixed lowercase and uppercase letters in a ' 'column: ' + '' . join ( col ) ) col = map ( str . upper , col ) most_common = Counter ( [ c for c in col if c not in '-' ] ) . most_common ( ) if not most_common : logging . warn ( "Column is all gaps! How did that happen?" ) if most_common [ 0 ] [ 1 ] == 1 : yield '-' elif ( len ( most_common ) > 1 and most_common [ 0 ] [ 1 ] == most_common [ 1 ] [ 1 ] ) : ties = [ x [ 0 ] for x in most_common if x [ 1 ] == most_common [ 0 ] [ 1 ] ] yield '' . join ( ties ) else : yield most_common [ 0 ] [ 0 ] return list ( col_consensus ( zip ( * aln ) ) ) | Get only the supported consensus residues in each column . |
5,962 | def detect_clause ( parser , clause_name , tokens , as_filter_expr = True ) : if clause_name in tokens : t_index = tokens . index ( clause_name ) clause_value = tokens [ t_index + 1 ] if as_filter_expr : clause_value = parser . compile_filter ( clause_value ) del tokens [ t_index : t_index + 2 ] else : clause_value = None return clause_value | Helper function detects a certain clause in tag tokens list . Returns its value . |
5,963 | def install_kernel_spec ( self , app , dir_name , display_name , settings_module , ipython_arguments ) : ksm = app . kernel_spec_manager try_spec_names = [ 'python3' if six . PY3 else 'python2' , 'python' ] if isinstance ( try_spec_names , six . string_types ) : try_spec_names = [ try_spec_names ] ks = None for spec_name in try_spec_names : try : ks = ksm . get_kernel_spec ( spec_name ) break except Exception : continue if not ks : self . parser . error ( "No notebook (Python) kernel specs found" ) ks . display_name = display_name ks . env [ "CORRAL_SETTINGS_MODULE" ] = settings_module ks . argv . extend ( ipython_arguments ) in_corral_dir , in_corral = os . path . split ( os . path . realpath ( sys . argv [ 0 ] ) ) pythonpath = ks . env . get ( 'PYTHONPATH' , os . environ . get ( 'PYTHONPATH' , '' ) ) pythonpath = pythonpath . split ( ':' ) if in_corral_dir not in pythonpath : pythonpath . append ( in_corral_dir ) ks . env [ 'PYTHONPATH' ] = ':' . join ( filter ( None , pythonpath ) ) kernel_dir = os . path . join ( ksm . user_kernel_dir , conf . PACKAGE ) if not os . path . exists ( kernel_dir ) : os . makedirs ( kernel_dir ) shutil . copy ( res . fullpath ( "logo-64x64.png" ) , kernel_dir ) with open ( os . path . join ( kernel_dir , 'kernel.json' ) , 'w' ) as f : f . write ( ks . to_json ( ) ) | install an IPython > = 3 . 0 kernelspec that loads corral env |
5,964 | def _cache_init ( self ) : cache_ = cache . get ( self . CACHE_ENTRY_NAME ) if cache_ is None : categories = get_category_model ( ) . objects . order_by ( 'sort_order' ) ids = { category . id : category for category in categories } aliases = { category . alias : category for category in categories if category . alias } parent_to_children = OrderedDict ( ) for category in categories : parent_category = ids . get ( category . parent_id , False ) parent_alias = None if parent_category : parent_alias = parent_category . alias if parent_alias not in parent_to_children : parent_to_children [ parent_alias ] = [ ] parent_to_children [ parent_alias ] . append ( category . id ) cache_ = { self . CACHE_NAME_IDS : ids , self . CACHE_NAME_PARENTS : parent_to_children , self . CACHE_NAME_ALIASES : aliases } cache . set ( self . CACHE_ENTRY_NAME , cache_ , self . CACHE_TIMEOUT ) self . _cache = cache_ | Initializes local cache from Django cache if required . |
5,965 | def _cache_get_entry ( self , entry_name , key = ENTIRE_ENTRY_KEY , default = False ) : if key is self . ENTIRE_ENTRY_KEY : return self . _cache [ entry_name ] return self . _cache [ entry_name ] . get ( key , default ) | Returns cache entry parameter value by its name . |
5,966 | def sort_aliases ( self , aliases ) : self . _cache_init ( ) if not aliases : return aliases parent_aliases = self . _cache_get_entry ( self . CACHE_NAME_PARENTS ) . keys ( ) return [ parent_alias for parent_alias in parent_aliases if parent_alias in aliases ] | Sorts the given aliases list returns a sorted list . |
5,967 | def get_parents_for ( self , child_ids ) : self . _cache_init ( ) parent_candidates = [ ] for parent , children in self . _cache_get_entry ( self . CACHE_NAME_PARENTS ) . items ( ) : if set ( children ) . intersection ( child_ids ) : parent_candidates . append ( parent ) return set ( parent_candidates ) | Returns parent aliases for a list of child IDs . |
5,968 | def get_children_for ( self , parent_alias = None , only_with_aliases = False ) : self . _cache_init ( ) child_ids = self . get_child_ids ( parent_alias ) if only_with_aliases : children = [ ] for cid in child_ids : category = self . get_category_by_id ( cid ) if category . alias : children . append ( category ) return children return [ self . get_category_by_id ( cid ) for cid in child_ids ] | Returns a list with with categories under the given parent . |
5,969 | def get_child_ids ( self , parent_alias ) : self . _cache_init ( ) return self . _cache_get_entry ( self . CACHE_NAME_PARENTS , parent_alias , [ ] ) | Returns child IDs of the given parent category |
5,970 | def get_category_by_alias ( self , alias ) : self . _cache_init ( ) return self . _cache_get_entry ( self . CACHE_NAME_ALIASES , alias , None ) | Returns Category object by its alias . |
5,971 | def get_category_by_id ( self , cid ) : self . _cache_init ( ) return self . _cache_get_entry ( self . CACHE_NAME_IDS , cid ) | Returns Category object by its id . |
5,972 | def get_ties_stats ( self , categories , target_model = None ) : filter_kwargs = { 'category_id__in' : categories } if target_model is not None : is_cls = hasattr ( target_model , '__name__' ) if is_cls : concrete = False else : concrete = True filter_kwargs [ 'object_id' ] = target_model . id filter_kwargs [ 'content_type' ] = ContentType . objects . get_for_model ( target_model , for_concrete_model = concrete ) return { item [ 'category_id' ] : item [ 'ties_num' ] for item in get_tie_model ( ) . objects . filter ( ** filter_kwargs ) . values ( 'category_id' ) . annotate ( ties_num = Count ( 'category' ) ) } | Returns a dict with categories popularity stats . |
5,973 | def load_p2th_privkey_into_local_node ( provider : RpcNode , prod : bool = True ) -> None : assert isinstance ( provider , RpcNode ) , { "error" : "Import only works with local node." } error = { "error" : "Loading P2TH privkey failed." } pa_params = param_query ( provider . network ) if prod : provider . importprivkey ( pa_params . P2TH_wif , "PAPROD" ) if not provider . validateaddress ( pa_params . P2TH_addr ) [ 'ismine' ] : raise P2THImportFailed ( error ) else : provider . importprivkey ( pa_params . test_P2TH_wif , "PATEST" ) if not provider . validateaddress ( pa_params . test_P2TH_addr ) [ 'ismine' ] : raise P2THImportFailed ( error ) | Load PeerAssets P2TH privkey into the local node . |
5,974 | def find_deck_spawns ( provider : Provider , prod : bool = True ) -> Iterable [ str ] : pa_params = param_query ( provider . network ) if isinstance ( provider , RpcNode ) : if prod : decks = ( i [ "txid" ] for i in provider . listtransactions ( "PAPROD" ) ) else : decks = ( i [ "txid" ] for i in provider . listtransactions ( "PATEST" ) ) if isinstance ( provider , Cryptoid ) or isinstance ( provider , Explorer ) : if prod : decks = ( i for i in provider . listtransactions ( pa_params . P2TH_addr ) ) else : decks = ( i for i in provider . listtransactions ( pa_params . test_P2TH_addr ) ) return decks | find deck spawn transactions via Provider it requires that Deck spawn P2TH were imported in local node or that remote API knows about P2TH address . |
5,975 | def deck_parser ( args : Tuple [ Provider , dict , int , str ] , prod : bool = True ) -> Optional [ Deck ] : provider = args [ 0 ] raw_tx = args [ 1 ] deck_version = args [ 2 ] p2th = args [ 3 ] try : validate_deckspawn_p2th ( provider , raw_tx , p2th ) d = parse_deckspawn_metainfo ( read_tx_opreturn ( raw_tx [ 'vout' ] [ 1 ] ) , deck_version ) if d : d [ "id" ] = raw_tx [ "txid" ] try : d [ "issue_time" ] = raw_tx [ "blocktime" ] except KeyError : d [ "time" ] = 0 d [ "issuer" ] = find_tx_sender ( provider , raw_tx ) d [ "network" ] = provider . network d [ "production" ] = prod d [ "tx_confirmations" ] = raw_tx [ "confirmations" ] return Deck ( ** d ) except ( InvalidDeckSpawn , InvalidDeckMetainfo , InvalidDeckVersion , InvalidNulldataOutput ) as err : pass return None | deck parser function |
5,976 | def tx_serialization_order ( provider : Provider , blockhash : str , txid : str ) -> int : return provider . getblock ( blockhash ) [ "tx" ] . index ( txid ) | find index of this tx in the blockid |
5,977 | def deck_issue_mode ( proto : DeckSpawnProto ) -> Iterable [ str ] : if proto . issue_mode == 0 : yield "NONE" return for mode , value in proto . MODE . items ( ) : if value > proto . issue_mode : continue if value & proto . issue_mode : yield mode | interpret issue mode bitfeg |
5,978 | def parse_deckspawn_metainfo ( protobuf : bytes , version : int ) -> dict : deck = DeckSpawnProto ( ) deck . ParseFromString ( protobuf ) error = { "error" : "Deck ({deck}) metainfo incomplete, deck must have a name." . format ( deck = deck . name ) } if deck . name == "" : raise InvalidDeckMetainfo ( error ) if deck . version != version : raise InvalidDeckVersion ( { "error" , "Deck version mismatch." } ) return { "version" : deck . version , "name" : deck . name , "issue_mode" : deck . issue_mode , "number_of_decimals" : deck . number_of_decimals , "asset_specific_data" : deck . asset_specific_data } | Decode deck_spawn tx op_return protobuf message and validate it Raise error if deck_spawn metainfo incomplete or version mistmatch . |
5,979 | def load_deck_p2th_into_local_node ( provider : RpcNode , deck : Deck ) -> None : assert isinstance ( provider , RpcNode ) , { "error" : "You can load privkeys only into local node." } error = { "error" : "Deck P2TH import went wrong." } provider . importprivkey ( deck . p2th_wif , deck . id ) check_addr = provider . validateaddress ( deck . p2th_address ) if not check_addr [ "isvalid" ] and not check_addr [ "ismine" ] : raise DeckP2THImportError ( error ) | load deck p2th into local node via importprivke this allows building of proof - of - timeline for this deck |
5,980 | def card_bundle_parser ( bundle : CardBundle , debug = False ) -> Iterator : try : validate_card_transfer_p2th ( bundle . deck , bundle . vouts [ 0 ] ) card_metainfo = parse_card_transfer_metainfo ( read_tx_opreturn ( bundle . vouts [ 1 ] ) , bundle . deck . version ) except ( InvalidCardTransferP2TH , CardVersionMismatch , CardNumberOfDecimalsMismatch , RecieverAmountMismatch , DecodeError , TypeError , InvalidNulldataOutput ) as e : if debug : print ( e ) return yield if not card_metainfo [ "number_of_decimals" ] == bundle . deck . number_of_decimals : raise CardNumberOfDecimalsMismatch ( { "error" : "Number of decimals does not match." } ) cards = card_postprocess ( card_metainfo , bundle . vouts ) del bundle . __dict__ [ 'vouts' ] for c in cards : d = { ** c , ** bundle . __dict__ } try : yield CardTransfer ( ** d ) except InvalidCardIssue as e : if debug : print ( e ) | this function wraps all the card transfer parsing |
5,981 | def param_query ( name : str ) -> PAParams : for pa_params in params : if name in ( pa_params . network_name , pa_params . network_shortname , ) : return pa_params raise UnsupportedNetwork | Find the PAParams for a network by its long or short name . Raises UnsupportedNetwork if no PAParams is found . |
5,982 | def load_scripts ( self ) : for index in range ( self . tree_scripts . topLevelItemCount ( ) ) : script_item = self . tree_scripts . topLevelItem ( index ) self . update_script_from_item ( script_item ) dialog = LoadDialog ( elements_type = "scripts" , elements_old = self . scripts , filename = self . gui_settings [ 'scripts_folder' ] ) if dialog . exec_ ( ) : self . gui_settings [ 'scripts_folder' ] = str ( dialog . txt_probe_log_path . text ( ) ) scripts = dialog . get_values ( ) added_scripts = set ( scripts . keys ( ) ) - set ( self . scripts . keys ( ) ) removed_scripts = set ( self . scripts . keys ( ) ) - set ( scripts . keys ( ) ) if 'data_folder' in list ( self . gui_settings . keys ( ) ) and os . path . exists ( self . gui_settings [ 'data_folder' ] ) : data_folder_name = self . gui_settings [ 'data_folder' ] else : data_folder_name = None self . scripts , loaded_failed , self . instruments = Script . load_and_append ( script_dict = { name : scripts [ name ] for name in added_scripts } , scripts = self . scripts , instruments = self . instruments , log_function = self . log , data_path = data_folder_name ) for name in removed_scripts : del self . scripts [ name ] | opens file dialog to load scripts into gui |
5,983 | def getblockhash ( self , index : int ) -> str : return cast ( str , self . api_fetch ( 'getblockhash?index=' + str ( index ) ) ) | Returns the hash of the block at ; index 0 is the genesis block . |
5,984 | def getblock ( self , hash : str ) -> dict : return cast ( dict , self . api_fetch ( 'getblock?hash=' + hash ) ) | Returns information about the block with the given hash . |
5,985 | def getaddress ( self , address : str ) -> dict : return cast ( dict , self . ext_fetch ( 'getaddress/' + address ) ) | Returns information for given address . |
5,986 | def listunspent ( self , address : str ) -> list : try : return cast ( dict , self . ext_fetch ( 'listunspent/' + address ) ) [ 'unspent_outputs' ] except KeyError : raise InsufficientFunds ( 'Insufficient funds.' ) | Returns unspent transactions for given address . |
5,987 | def txinfo ( self , txid : str ) -> dict : return cast ( dict , self . ext_fetch ( 'txinfo/' + txid ) ) | Returns information about given transaction . |
5,988 | def getbalance ( self , address : str ) -> Decimal : try : return Decimal ( cast ( float , self . ext_fetch ( 'getbalance/' + address ) ) ) except TypeError : return Decimal ( 0 ) | Returns current balance of given address . |
5,989 | def extract ( self , obj , bypass_ref = False ) : return self . pointer . extract ( obj , bypass_ref ) | Extract subelement from obj according to pointer . It assums that document is the object . |
5,990 | def parse ( self , pointer ) : if isinstance ( pointer , Pointer ) : return pointer . tokens [ : ] elif pointer == '' : return [ ] tokens = [ ] staged , _ , children = pointer . partition ( '/' ) if staged : try : token = StagesToken ( staged ) token . last = False tokens . append ( token ) except ValueError : raise ParseError ( 'pointer must start with / or int' , pointer ) if _ : for part in children . split ( '/' ) : part = part . replace ( '~1' , '/' ) part = part . replace ( '~0' , '~' ) token = ChildToken ( part ) token . last = False tokens . append ( token ) return tokens | parse pointer into tokens |
5,991 | def extract ( self , obj , bypass_ref = False ) : for token in self . tokens : obj = token . extract ( obj , bypass_ref ) return obj | Extract subelement from obj according to tokens . |
5,992 | def extract ( self , obj , bypass_ref = False ) : for i in range ( 0 , self . stages ) : try : obj = obj . parent_obj except AttributeError : raise UnstagedError ( obj , '{!r} must be staged before ' 'exploring its parents' . format ( obj ) ) if self . member : return obj . parent_member return obj | Extract parent of obj according to current token . |
5,993 | def extract ( self , obj , bypass_ref = False ) : try : if isinstance ( obj , Mapping ) : if not bypass_ref and '$ref' in obj : raise RefError ( obj , 'presence of a $ref member' ) obj = self . extract_mapping ( obj ) elif isinstance ( obj , Sequence ) and not isinstance ( obj , string_types ) : obj = self . extract_sequence ( obj ) else : raise WrongType ( obj , '{!r} does not apply ' 'for {!r}' . format ( str ( self ) , obj ) ) if isinstance ( obj , Mapping ) : if not bypass_ref and '$ref' in obj : raise RefError ( obj , 'presence of a $ref member' ) return obj except ExtractError as error : logger . exception ( error ) raise except Exception as error : logger . exception ( error ) args = [ arg for arg in error . args if arg not in ( self , obj ) ] raise ExtractError ( obj , * args ) | Extract subelement from obj according to current token . |
5,994 | def digester ( data ) : if not isinstance ( data , six . binary_type ) : data = data . encode ( 'utf_8' ) hashof = hashlib . sha1 ( data ) . digest ( ) encoded_hash = base64 . b64encode ( hashof ) if not isinstance ( encoded_hash , six . string_types ) : encoded_hash = encoded_hash . decode ( 'utf_8' ) chunked = splitter ( encoded_hash , chunksize = 60 ) lines = '\n' . join ( chunked ) return lines | Create SHA - 1 hash get digest b64 encode split every 60 char . |
5,995 | def splitter ( iterable , chunksize = 60 ) : return ( iterable [ 0 + i : chunksize + i ] for i in range ( 0 , len ( iterable ) , chunksize ) ) | Split an iterable that supports indexing into chunks of chunksize . |
5,996 | def canonical_request ( self , method , path , content , timestamp ) : request = collections . OrderedDict ( [ ( 'Method' , method . upper ( ) ) , ( 'Hashed Path' , path ) , ( 'X-Ops-Content-Hash' , content ) , ( 'X-Ops-Timestamp' , timestamp ) , ( 'X-Ops-UserId' , self . user_id ) , ] ) return '\n' . join ( [ '%s:%s' % ( key , value ) for key , value in request . items ( ) ] ) | Return the canonical request string . |
5,997 | def load_pem ( cls , private_key , password = None ) : maybe_path = normpath ( private_key ) if os . path . isfile ( maybe_path ) : with open ( maybe_path , 'rb' ) as pkf : private_key = pkf . read ( ) if not isinstance ( private_key , six . binary_type ) : private_key = private_key . encode ( 'utf-8' ) pkey = serialization . load_pem_private_key ( private_key , password = password , backend = crypto_backends . default_backend ( ) ) return cls ( pkey ) | Return a PrivateKey instance . |
5,998 | def sign ( self , data , b64 = True ) : padder = padding . PKCS1v15 ( ) signer = self . private_key . signer ( padder , None ) if not isinstance ( data , six . binary_type ) : data = data . encode ( 'utf_8' ) signer . update ( data ) signed = signer . finalize ( ) if b64 : signed = base64 . b64encode ( signed ) return signed | Sign data with the private key and return the signed data . |
5,999 | def dump ( obj , fp , ** kw ) : r xml = dumps ( obj , ** kw ) if isinstance ( fp , basestring ) : with open ( fp , 'w' ) as fobj : fobj . write ( xml ) else : fp . write ( xml ) | r Dump python object to file . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.