idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
58,900 | def _get_addresses ( self , text ) : addresses = [ ] matches = utils . findall ( self . rules , text , flags = re . VERBOSE | re . U ) if ( matches ) : for match in matches : addresses . append ( match [ 0 ] . strip ( ) ) return addresses | Returns a list of addresses found in text |
58,901 | def parse ( some_text , ** kwargs ) : ap = parser . AddressParser ( ** kwargs ) return ap . parse ( some_text ) | Creates request to AddressParser and returns list of Address objects |
58,902 | def setAttribute ( values , value ) : if isinstance ( value , int ) : values . add ( ) . int32_value = value elif isinstance ( value , float ) : values . add ( ) . double_value = value elif isinstance ( value , long ) : values . add ( ) . int64_value = value elif isinstance ( value , str ) : values . add ( ) . string_v... | Takes the values of an attribute value list and attempts to append attributes of the proper type inferred from their Python type . |
58,903 | def deepSetAttr ( obj , path , val ) : first , _ , rest = path . rpartition ( '.' ) return setattr ( deepGetAttr ( obj , first ) if first else obj , rest , val ) | Sets a deep attribute on an object by resolving a dot - delimited path . If path does not exist an AttributeError will be raised . |
58,904 | def convertDatetime ( t ) : epoch = datetime . datetime . utcfromtimestamp ( 0 ) delta = t - epoch millis = delta . total_seconds ( ) * 1000 return int ( millis ) | Converts the specified datetime object into its appropriate protocol value . This is the number of milliseconds from the epoch . |
58,905 | def getValueFromValue ( value ) : if type ( value ) != common . AttributeValue : raise TypeError ( "Expected an AttributeValue, but got {}" . format ( type ( value ) ) ) if value . WhichOneof ( "value" ) is None : raise AttributeError ( "Nothing set for {}" . format ( value ) ) return getattr ( value , value . WhichOne... | Extract the currently set field from a Value structure |
58,906 | def toJson ( protoObject , indent = None ) : js = json_format . MessageToDict ( protoObject , False ) return json . dumps ( js , indent = indent ) | Serialises a protobuf object as json |
58,907 | def getProtocolClasses ( superclass = message . Message ) : superclasses = set ( [ message . Message ] ) thisModule = sys . modules [ __name__ ] subclasses = [ ] for name , class_ in inspect . getmembers ( thisModule ) : if ( ( inspect . isclass ( class_ ) and issubclass ( class_ , superclass ) and class_ not in superc... | Returns all the protocol classes that are subclasses of the specified superclass . Only leaf classes are returned corresponding directly to the classes defined in the protocol . |
58,908 | def runCommandSplits ( splits , silent = False , shell = False ) : try : if silent : with open ( os . devnull , 'w' ) as devnull : subprocess . check_call ( splits , stdout = devnull , stderr = devnull , shell = shell ) else : subprocess . check_call ( splits , shell = shell ) except OSError as exception : if exception... | Run a shell command given the command s parsed command line |
58,909 | def _createSchemaFiles ( self , destPath , schemasPath ) : ga4ghPath = os . path . join ( destPath , 'ga4gh' ) if not os . path . exists ( ga4ghPath ) : os . mkdir ( ga4ghPath ) ga4ghSchemasPath = os . path . join ( ga4ghPath , 'schemas' ) if not os . path . exists ( ga4ghSchemasPath ) : os . mkdir ( ga4ghSchemasPath )... | Create a hierarchy of proto files in a destination directory copied from the schemasPath hierarchy |
58,910 | def _doLineReplacements ( self , line ) : packageString = 'package ga4gh;' if packageString in line : return line . replace ( packageString , 'package ga4gh.schemas.ga4gh;' ) importString = 'import "ga4gh/' if importString in line : return line . replace ( importString , 'import "ga4gh/schemas/ga4gh/' ) googlePackageSt... | Given a line of a proto file replace the line with one that is appropriate for the hierarchy that we want to compile |
58,911 | def _copySchemaFile ( self , src , dst ) : with open ( src ) as srcFile , open ( dst , 'w' ) as dstFile : srcLines = srcFile . readlines ( ) for srcLine in srcLines : toWrite = self . _doLineReplacements ( srcLine ) dstFile . write ( toWrite ) | Copy a proto file to the temporary directory with appropriate line replacements |
58,912 | def convert_protodef_to_editable ( proto ) : class Editable ( object ) : def __init__ ( self , prot ) : self . kind = type ( prot ) self . name = prot . name self . comment = "" self . options = dict ( [ ( key . name , value ) for ( key , value ) in prot . options . ListFields ( ) ] ) if isinstance ( prot , EnumDescrip... | Protobuf objects can t have arbitrary fields addedd and we need to later on add comments to them so we instead make Editable objects that can do so |
58,913 | def haversine ( point1 , point2 , unit = 'km' ) : AVG_EARTH_RADIUS_KM = 6371.0088 conversions = { 'km' : 1 , 'm' : 1000 , 'mi' : 0.621371192 , 'nmi' : 0.539956803 , 'ft' : 3280.839895013 , 'in' : 39370.078740158 } avg_earth_radius = AVG_EARTH_RADIUS_KM * conversions [ unit ] lat1 , lng1 = point1 lat2 , lng2 = point2 la... | Calculate the great - circle distance between two points on the Earth surface . |
58,914 | def main ( ) : logging . basicConfig ( level = logging . INFO ) run_metrics = py_interop_run_metrics . run_metrics ( ) summary = py_interop_summary . run_summary ( ) valid_to_load = py_interop_run . uchar_vector ( py_interop_run . MetricCount , 0 ) py_interop_run_metrics . list_summary_metrics_to_load ( valid_to_load )... | Retrieve run folder paths from the command line Ensure only metrics required for summary are loaded Load the run metrics Calculate the summary metrics Display error by lane read |
58,915 | def gen_csv ( sc , filename , field_list , source , filters ) : datafile = open ( filename , 'wb' ) csvfile = csv . writer ( datafile ) header = [ ] for field in field_list : header . append ( fields . fields [ field ] [ 'name' ] ) csvfile . writerow ( header ) debug . write ( 'Generating %s: ' % filename ) fparams = {... | csv SecurityCenterObj AssetListName CSVFields EmailAddress |
58,916 | def login ( self , user , passwd ) : resp = self . post ( 'token' , json = { 'username' : user , 'password' : passwd } ) self . _token = resp . json ( ) [ 'response' ] [ 'token' ] | Logs the user into SecurityCenter and stores the needed token and cookies . |
58,917 | def download_scans ( sc , age = 0 , unzip = False , path = 'scans' ) : if not os . path . exists ( path ) : logger . debug ( 'scan path didn\'t exist. creating it.' ) os . makedirs ( path ) findate = ( date . today ( ) - timedelta ( days = age ) ) logger . debug ( 'getting scan results for parsing' ) resp = sc . get ( ... | Scan Downloader Here we will attempt to download all of the scans that have completed between now and AGE days ago . |
58,918 | def update ( sc , filename , asset_id ) : addresses = [ ] with open ( filename ) as hostfile : for line in hostfile . readlines ( ) : addresses . append ( line . strip ( '\n' ) ) sc . asset_update ( asset_id , dns = addresses ) | Updates a DNS Asset List with the contents of the filename . The assumed format of the file is 1 entry per line . This function will convert the file contents into an array of entries and then upload that array into SecurityCenter . |
58,919 | def generate_html_report ( base_path , asset_id ) : jenv = Environment ( loader = PackageLoader ( 'swchange' , 'templates' ) ) s = Session ( ) asset = s . query ( AssetList ) . filter_by ( id = asset_id ) . first ( ) if not asset : print 'Invalid Asset ID (%s)!' % asset_id return filename = os . path . join ( base_path... | Generates the HTML report and dumps it into the specified filename |
58,920 | def gen_csv ( sc , filename ) : datafile = open ( filename , 'wb' ) csvfile = csv . writer ( datafile ) csvfile . writerow ( [ 'Software Package Name' , 'Count' ] ) debug . write ( 'Generating %s: ' % filename ) fparams = { 'fobj' : csvfile } sc . query ( 'listsoftware' , func = writer , func_params = fparams ) debug .... | csv SecurityCenterObj EmailAddress |
58,921 | def download ( sc , age = 0 , path = 'reports' , ** args ) : if not os . path . exists ( path ) : logger . debug ( 'report path didn\'t exist. creating it.' ) os . makedirs ( path ) findate = ( date . today ( ) - timedelta ( days = age ) ) reports = sc . get ( 'report' , params = { 'startTime' : findate . strftime ( '%... | Report Downloader The report downloader will pull reports down from SecurityCenter based on the conditions provided to the path provided . |
58,922 | def post ( self , path , ** kwargs ) : resp = self . _session . post ( self . _url ( path ) , ** self . _builder ( ** kwargs ) ) if 'stream' in kwargs : return resp else : return self . _resp_error_check ( resp ) | Calls the specified path with the POST method |
58,923 | def import_repo ( self , repo_id , fileobj ) : filename = self . upload ( fileobj ) . json ( ) [ 'response' ] [ 'filename' ] return self . post ( 'repository/{}/import' . format ( repo_id ) , json = { 'file' : filename } ) | Imports a repository package using the repository ID specified . |
58,924 | def _revint ( self , version ) : intrev = 0 vsplit = version . split ( '.' ) for c in range ( len ( vsplit ) ) : item = int ( vsplit [ c ] ) * ( 10 ** ( ( ( len ( vsplit ) - c - 1 ) * 2 ) ) ) intrev += item return intrev | Internal function to convert a version string to an integer . |
58,925 | def _revcheck ( self , func , version ) : current = self . _revint ( self . version ) check = self . _revint ( version ) if func in ( 'lt' , '<=' , ) : return check <= current elif func in ( 'gt' , '>=' ) : return check >= current elif func in ( 'eq' , '=' , 'equals' ) : return check == current else : return False | Internal function to see if a version is func than what we have determined to be talking to . This is very useful for newer API calls to make sure we don t accidentally make a call to something that doesnt exist . |
58,926 | def _build_xrefs ( self ) : xrefs = set ( ) plugins = self . plugins ( ) for plugin in plugins : for xref in plugin [ 'xrefs' ] . split ( ', ' ) : xrf = xref . replace ( '-' , '_' ) . split ( ':' ) [ 0 ] if xrf is not '' : xrefs . add ( xrf ) self . _xrefs = list ( xrefs ) | Internal function to populate the xrefs list with the external references to be used in searching plugins and potentially other functions as well . |
58,927 | def login ( self , user , passwd ) : data = self . raw_query ( 'auth' , 'login' , data = { 'username' : user , 'password' : passwd } ) self . _token = data [ "token" ] self . _user = data | login user passwd Performs the login operation for Security Center storing the token that Security Center has generated for this login session for future queries . |
58,928 | def credential_add ( self , name , cred_type , ** options ) : if 'pirvateKey' in options : options [ 'privateKey' ] = self . _upload ( options [ 'privateKey' ] ) [ 'filename' ] if 'publicKey' in options : options [ 'publicKey' ] = self . _upload ( options [ 'publicKey' ] ) [ 'filename' ] return self . raw_query ( "cred... | Adds a new credential into SecurityCenter . As credentials can be of multiple types we have different options to specify for each type of credential . |
58,929 | def credential_delete_simulate ( self , * ids ) : return self . raw_query ( "credential" , "deleteSimulate" , data = { "credentials" : [ { "id" : str ( id ) } for id in ids ] } ) | Show the relationships and dependencies for one or more credentials . |
58,930 | def credential_delete ( self , * ids ) : return self . raw_query ( "credential" , "delete" , data = { "credentials" : [ { "id" : str ( id ) } for id in ids ] } ) | Delete one or more credentials . |
58,931 | def plugins ( self , plugin_type = 'all' , sort = 'id' , direction = 'asc' , size = 1000 , offset = 0 , all = True , loops = 0 , since = None , ** filterset ) : plugins = [ ] payload = { 'size' : size , 'offset' : offset , 'type' : plugin_type , 'sortField' : sort , 'sortDirection' : direction . upper ( ) , } if len ( ... | plugins Returns a list of of the plugins and their associated families . For simplicity purposes the plugin family names will be injected into the plugin data so that only 1 list is returned back with all of the information . |
58,932 | def plugin_counts ( self ) : ret = { 'total' : 0 , } data = self . raw_query ( 'plugin' , 'init' ) ret [ 'total' ] = data [ 'pluginCount' ] if 'lastUpdates' in data : for item in [ 'active' , 'passive' , 'compliance' , 'custom' , 'event' ] : itemdata = { } if item in data [ 'lastUpdates' ] : itemdata = data [ 'lastUpda... | plugin_counts Returns the plugin counts as dictionary with the last updated info if its available . |
58,933 | def ip_info ( self , ip , repository_ids = None ) : if not repository_ids : repository_ids = [ ] repos = [ ] for rid in repository_ids : repos . append ( { 'id' : rid } ) return self . raw_query ( 'vuln' , 'getIP' , data = { 'ip' : ip , 'repositories' : repos } ) | ip_info Returns information about the IP specified in the repository ids defined . |
58,934 | def scan_list ( self , start_time = None , end_time = None , ** kwargs ) : try : end_time = datetime . utcfromtimestamp ( int ( end_time ) ) except TypeError : if end_time is None : end_time = datetime . utcnow ( ) try : start_time = datetime . utcfromtimestamp ( int ( start_time ) ) except TypeError : if start_time is... | List scans stored in Security Center in a given time range . |
58,935 | def dashboard_import ( self , name , fileobj ) : data = self . _upload ( fileobj ) return self . raw_query ( 'dashboard' , 'importTab' , data = { 'filename' : data [ 'filename' ] , 'name' : name , } ) | dashboard_import Dashboard_Name filename Uploads a dashboard template to the current user s dashboard tabs . |
58,936 | def report_import ( self , name , filename ) : data = self . _upload ( filename ) return self . raw_query ( 'report' , 'import' , data = { 'filename' : data [ 'filename' ] , 'name' : name , } ) | report_import Report_Name filename Uploads a report template to the current user s reports |
58,937 | def asset_create ( self , name , items , tag = '' , description = '' , atype = 'static' ) : data = { 'name' : name , 'description' : description , 'type' : atype , 'tags' : tag } if atype == 'static' : data [ 'definedIPs' ] = ',' . join ( items ) if atype == 'dns' : data [ 'type' ] = 'dnsname' data [ 'definedDNSNames' ... | asset_create_static name ips tags description Create a new asset list with the defined information . |
58,938 | def asset_create_combo ( self , name , combo , tag = '' , description = '' ) : return self . raw_query ( 'asset' , 'add' , data = { 'name' : name , 'description' : description , 'type' : 'combination' , 'combinations' : combo , } ) | asset_create_combo name combination tag description Creates a new combination asset list . Operands can be either asset list IDs or be a nested combination asset list . |
58,939 | def risk_rule ( self , rule_type , rule_value , port , proto , plugin_id , repo_ids , comment = '' , expires = '-1' , severity = None ) : data = { 'hostType' : rule_type , 'port' : port , 'comments' : comment , 'protocol' : proto , 'pluginID' : plugin_id , 'repIDs' : [ { 'id' : i } for i in repo_ids ] } if rule_type !=... | accept_risk rule_type rule_value port proto plugin_id comment Creates an accept rick rule based on information provided . |
58,940 | def group_add ( self , name , restrict , repos , lces = [ ] , assets = [ ] , queries = [ ] , policies = [ ] , dashboards = [ ] , credentials = [ ] , description = '' ) : return self . raw_query ( 'group' , 'add' , data = { 'lces' : [ { 'id' : i } for i in lces ] , 'assets' : [ { 'id' : i } for i in assets ] , 'queries'... | group_add name restrict repos |
58,941 | def get_geo_info ( filename , band = 1 ) : sourceds = gdal . Open ( filename , GA_ReadOnly ) ndv = sourceds . GetRasterBand ( band ) . GetNoDataValue ( ) xsize = sourceds . RasterXSize ysize = sourceds . RasterYSize geot = sourceds . GetGeoTransform ( ) projection = osr . SpatialReference ( ) projection . ImportFromWkt... | Gets information from a Raster data set |
58,942 | def create_geotiff ( name , Array , driver , ndv , xsize , ysize , geot , projection , datatype , band = 1 ) : if isinstance ( datatype , np . int ) == False : if datatype . startswith ( 'gdal.GDT_' ) == False : datatype = eval ( 'gdal.GDT_' + datatype ) newfilename = name + '.tif' Array [ np . isnan ( Array ) ] = ndv ... | Creates new geotiff from array |
58,943 | def load_tiff ( file ) : ndv , xsize , ysize , geot , projection , datatype = get_geo_info ( file ) data = gdalnumeric . LoadFile ( file ) data = np . ma . masked_array ( data , mask = data == ndv , fill_value = ndv ) return data | Load a geotiff raster keeping ndv values using a masked array |
58,944 | def from_file ( filename , ** kwargs ) : ndv , xsize , ysize , geot , projection , datatype = get_geo_info ( filename , ** kwargs ) data = gdalnumeric . LoadFile ( filename , ** kwargs ) data = np . ma . masked_array ( data , mask = data == ndv , fill_value = ndv ) return GeoRaster ( data , geot , nodata_value = ndv , ... | Create a GeoRaster object from a file |
58,945 | def copy ( self ) : return GeoRaster ( self . raster . copy ( ) , self . geot , nodata_value = self . nodata_value , projection = self . projection , datatype = self . datatype ) | Returns copy of itself |
58,946 | def clip ( self , shp , keep = False , * args , ** kwargs ) : df = pd . DataFrame ( zonal_stats ( shp , self . raster , nodata = self . nodata_value , all_touched = True , raster_out = True , affine = Affine . from_gdal ( * self . geot ) , geojson_out = keep , ) ) if keep : df [ 'GeoRaster' ] = df . properties . apply ... | Clip raster using shape where shape is either a GeoPandas DataFrame shapefile or some other geometry format used by python - raster - stats |
58,947 | def pysal_Gamma ( self , ** kwargs ) : if self . weights is None : self . raster_weights ( ** kwargs ) rasterf = self . raster . flatten ( ) rasterf = rasterf [ rasterf . mask == False ] self . Gamma = pysal . Gamma ( rasterf , self . weights , ** kwargs ) | Compute Gamma Index of Spatial Autocorrelation for GeoRaster |
58,948 | def pysal_Join_Counts ( self , ** kwargs ) : if self . weights is None : self . raster_weights ( ** kwargs ) rasterf = self . raster . flatten ( ) rasterf = rasterf [ rasterf . mask == False ] self . Join_Counts = pysal . Join_Counts ( rasterf , self . weights , ** kwargs ) | Compute join count statistics for GeoRaster |
58,949 | def pysal_Moran ( self , ** kwargs ) : if self . weights is None : self . raster_weights ( ** kwargs ) rasterf = self . raster . flatten ( ) rasterf = rasterf [ rasterf . mask == False ] self . Moran = pysal . Moran ( rasterf , self . weights , ** kwargs ) | Compute Moran s I measure of global spatial autocorrelation for GeoRaster |
58,950 | def pysal_Moran_Local ( self , ** kwargs ) : if self . weights is None : self . raster_weights ( ** kwargs ) rasterf = self . raster . flatten ( ) rasterf = rasterf [ rasterf . mask == False ] self . Moran_Local = pysal . Moran_Local ( rasterf , self . weights , ** kwargs ) for i in self . Moran_Local . __dict__ . keys... | Compute Local Moran s I measure of local spatial autocorrelation for GeoRaster |
58,951 | def mcp ( self , * args , ** kwargs ) : self . mcp_cost = graph . MCP_Geometric ( self . raster , * args , ** kwargs ) | Setup MCP_Geometric object from skimage for optimal travel time computations |
58,952 | def notify ( self , method , params = None ) : log . debug ( 'Sending notification: %s %s' , method , params ) message = { 'jsonrpc' : JSONRPC_VERSION , 'method' : method , } if params is not None : message [ 'params' ] = params self . _consumer ( message ) | Send a JSON RPC notification to the client . |
58,953 | def request ( self , method , params = None ) : msg_id = self . _id_generator ( ) log . debug ( 'Sending request with id %s: %s %s' , msg_id , method , params ) message = { 'jsonrpc' : JSONRPC_VERSION , 'id' : msg_id , 'method' : method , } if params is not None : message [ 'params' ] = params request_future = futures ... | Send a JSON RPC request to the client . |
58,954 | def _cancel_callback ( self , request_id ) : def callback ( future ) : if future . cancelled ( ) : self . notify ( CANCEL_METHOD , { 'id' : request_id } ) future . set_exception ( JsonRpcRequestCancelled ( ) ) return callback | Construct a cancellation callback for the given request ID . |
58,955 | def consume ( self , message ) : if 'jsonrpc' not in message or message [ 'jsonrpc' ] != JSONRPC_VERSION : log . warn ( "Unknown message type %s" , message ) return if 'id' not in message : log . debug ( "Handling notification from client %s" , message ) self . _handle_notification ( message [ 'method' ] , message . ge... | Consume a JSON RPC message from the client . |
58,956 | def _handle_notification ( self , method , params ) : if method == CANCEL_METHOD : self . _handle_cancel_notification ( params [ 'id' ] ) return try : handler = self . _dispatcher [ method ] except KeyError : log . warn ( "Ignoring notification for unknown method %s" , method ) return try : handler_result = handler ( p... | Handle a notification from the client . |
58,957 | def _notification_callback ( method , params ) : def callback ( future ) : try : future . result ( ) log . debug ( "Successfully handled async notification %s %s" , method , params ) except Exception : log . exception ( "Failed to handle async notification %s %s" , method , params ) return callback | Construct a notification callback for the given request ID . |
58,958 | def _handle_cancel_notification ( self , msg_id ) : request_future = self . _client_request_futures . pop ( msg_id , None ) if not request_future : log . warn ( "Received cancel notification for unknown message id %s" , msg_id ) return if request_future . cancel ( ) : log . debug ( "Cancelled request with id %s" , msg_... | Handle a cancel notification from the client . |
58,959 | def _handle_request ( self , msg_id , method , params ) : try : handler = self . _dispatcher [ method ] except KeyError : raise JsonRpcMethodNotFound . of ( method ) handler_result = handler ( params ) if callable ( handler_result ) : log . debug ( "Executing async request handler %s" , handler_result ) request_future ... | Handle a request from the client . |
58,960 | def _request_callback ( self , request_id ) : def callback ( future ) : self . _client_request_futures . pop ( request_id , None ) if future . cancelled ( ) : future . set_exception ( JsonRpcRequestCancelled ( ) ) message = { 'jsonrpc' : JSONRPC_VERSION , 'id' : request_id , } try : message [ 'result' ] = future . resu... | Construct a request callback for the given request ID . |
58,961 | def _handle_response ( self , msg_id , result = None , error = None ) : request_future = self . _server_request_futures . pop ( msg_id , None ) if not request_future : log . warn ( "Received response to unknown message id %s" , msg_id ) return if error is not None : log . debug ( "Received error response to message %s:... | Handle a response from the client . |
58,962 | def listen ( self , message_consumer ) : while not self . _rfile . closed : request_str = self . _read_message ( ) if request_str is None : break try : message_consumer ( json . loads ( request_str . decode ( 'utf-8' ) ) ) except ValueError : log . exception ( "Failed to parse JSON message %s" , request_str ) continue | Blocking call to listen for messages on the rfile . |
58,963 | def _read_message ( self ) : line = self . _rfile . readline ( ) if not line : return None content_length = self . _content_length ( line ) while line and line . strip ( ) : line = self . _rfile . readline ( ) if not line : return None return self . _rfile . read ( content_length ) | Reads the contents of a message . |
58,964 | def _content_length ( line ) : if line . startswith ( b'Content-Length: ' ) : _ , value = line . split ( b'Content-Length: ' ) value = value . strip ( ) try : return int ( value ) except ValueError : raise ValueError ( "Invalid Content-Length header: {}" . format ( value ) ) return None | Extract the content length from an input line . |
58,965 | def hostapi_info ( index = None ) : if index is None : return ( hostapi_info ( i ) for i in range ( _pa . Pa_GetHostApiCount ( ) ) ) else : info = _pa . Pa_GetHostApiInfo ( index ) if not info : raise RuntimeError ( "Invalid host API" ) assert info . structVersion == 1 return { 'name' : ffi . string ( info . name ) . d... | Return a generator with information about each host API . |
58,966 | def device_info ( index = None ) : if index is None : return ( device_info ( i ) for i in range ( _pa . Pa_GetDeviceCount ( ) ) ) else : info = _pa . Pa_GetDeviceInfo ( index ) if not info : raise RuntimeError ( "Invalid device" ) assert info . structVersion == 2 if 'DirectSound' in hostapi_info ( info . hostApi ) [ 'n... | Return a generator with information about each device . |
58,967 | def _get_stream_parameters ( kind , device , channels , dtype , latency , samplerate ) : if device is None : if kind == 'input' : device = _pa . Pa_GetDefaultInputDevice ( ) elif kind == 'output' : device = _pa . Pa_GetDefaultOutputDevice ( ) info = device_info ( device ) if channels is None : channels = info [ 'max_' ... | Generate PaStreamParameters struct . |
58,968 | def _frombuffer ( ptr , frames , channels , dtype ) : framesize = channels * dtype . itemsize data = np . frombuffer ( ffi . buffer ( ptr , frames * framesize ) , dtype = dtype ) data . shape = - 1 , channels return data | Create NumPy array from a pointer to some memory . |
58,969 | def start ( self ) : err = _pa . Pa_StartStream ( self . _stream ) if err == _pa . paStreamIsNotStopped : return self . _handle_error ( err ) | Commence audio processing . |
58,970 | def stop ( self ) : err = _pa . Pa_StopStream ( self . _stream ) if err == _pa . paStreamIsStopped : return self . _handle_error ( err ) | Terminate audio processing . |
58,971 | def abort ( self ) : err = _pa . Pa_AbortStream ( self . _stream ) if err == _pa . paStreamIsStopped : return self . _handle_error ( err ) | Terminate audio processing immediately . |
58,972 | def read ( self , frames , raw = False ) : channels , _ = _split ( self . channels ) dtype , _ = _split ( self . dtype ) data = ffi . new ( "signed char[]" , channels * dtype . itemsize * frames ) self . _handle_error ( _pa . Pa_ReadStream ( self . _stream , data , frames ) ) if not raw : data = np . frombuffer ( ffi .... | Read samples from an input stream . |
58,973 | def write ( self , data ) : frames = len ( data ) _ , channels = _split ( self . channels ) _ , dtype = _split ( self . dtype ) if ( not isinstance ( data , np . ndarray ) or data . dtype != dtype ) : data = np . array ( data , dtype = dtype ) if len ( data . shape ) == 1 : data = np . tile ( data , ( channels , 1 ) ) ... | Write samples to an output stream . |
58,974 | def _handle_shell ( self , cfg_file , * args , ** options ) : args = ( "--interactive" , ) + args return supervisorctl . main ( ( "-c" , cfg_file ) + args ) | Command supervisord shell runs the interactive command shell . |
58,975 | def _handle_getconfig ( self , cfg_file , * args , ** options ) : if args : raise CommandError ( "supervisor getconfig takes no arguments" ) print cfg_file . read ( ) return 0 | Command supervisor getconfig prints merged config to stdout . |
58,976 | def _handle_autoreload ( self , cfg_file , * args , ** options ) : if args : raise CommandError ( "supervisor autoreload takes no arguments" ) live_dirs = self . _find_live_code_dirs ( ) reload_progs = self . _get_autoreload_programs ( cfg_file ) def autoreloader ( ) : if os . fork ( ) == 0 : sys . exit ( self . handle... | Command supervisor autoreload watches for code changes . |
58,977 | def _get_autoreload_programs ( self , cfg_file ) : cfg = RawConfigParser ( ) cfg . readfp ( cfg_file ) reload_progs = [ ] for section in cfg . sections ( ) : if section . startswith ( "program:" ) : try : if cfg . getboolean ( section , "autoreload" ) : reload_progs . append ( section . split ( ":" , 1 ) [ 1 ] ) except... | Get the set of programs to auto - reload when code changes . |
58,978 | def _find_live_code_dirs ( self ) : live_dirs = [ ] for mod in sys . modules . values ( ) : try : dirnm = os . path . dirname ( mod . __file__ ) except AttributeError : continue dirnm = os . path . realpath ( os . path . abspath ( dirnm ) ) if not dirnm . endswith ( os . sep ) : dirnm += os . sep if not os . path . isd... | Find all directories in which we might have live python code . |
58,979 | def render_config ( data , ctx ) : djsupervisor_tags . current_context = ctx data = "{% load djsupervisor_tags %}" + data t = template . Template ( data ) c = template . Context ( ctx ) return t . render ( c ) . encode ( "ascii" ) | Render the given config data using Django s template system . |
58,980 | def get_config_from_options ( ** options ) : data = [ ] data . append ( "[supervisord]\n" ) if options . get ( "daemonize" , False ) : data . append ( "nodaemon=false\n" ) else : data . append ( "nodaemon=true\n" ) if options . get ( "pidfile" , None ) : data . append ( "pidfile=%s\n" % ( options [ "pidfile" ] , ) ) if... | Get config file fragment reflecting command - line options . |
58,981 | def guess_project_dir ( ) : projname = settings . SETTINGS_MODULE . split ( "." , 1 ) [ 0 ] projmod = import_module ( projname ) projdir = os . path . dirname ( projmod . __file__ ) if os . path . isfile ( os . path . join ( projdir , "manage.py" ) ) : return projdir projdir = os . path . abspath ( os . path . join ( p... | Find the top - level Django project directory . |
58,982 | def set_if_missing ( cfg , section , option , value ) : try : cfg . get ( section , option ) except NoSectionError : cfg . add_section ( section ) cfg . set ( section , option , value ) except NoOptionError : cfg . set ( section , option , value ) | If the given option is missing set to the given value . |
58,983 | def rerender_options ( options ) : args = [ ] for name , value in options . iteritems ( ) : name = name . replace ( "_" , "-" ) if value is None : pass elif isinstance ( value , bool ) : if value : args . append ( "--%s" % ( name , ) ) elif isinstance ( value , list ) : for item in value : args . append ( "--%s=%s" % (... | Helper function to re - render command - line options . |
58,984 | def login ( self , email = None , password = None , user = None ) : if user is not None : data = { 'login' : user , 'password' : password } elif email is not None : data = { 'email' : email , 'password' : password } else : raise ValueError ( 'Neither username nor email provided to login' ) self . headers = { 'connectio... | Logs the user in and setups the header with the private token |
58,985 | def getuser ( self , user_id ) : request = requests . get ( '{0}/{1}' . format ( self . users_url , user_id ) , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_code == 200 : return request . json ( ) else : return False | Get info for a user identified by id |
58,986 | def deleteuser ( self , user_id ) : deleted = self . delete_user ( user_id ) if deleted is False : return False else : return True | Deletes a user . Available only for administrators . This is an idempotent function calling this function for a non - existent user id still returns a status code 200 OK . The JSON response differs if the user was actually deleted or not . In the former the user is returned and in the latter not . |
58,987 | def currentuser ( self ) : request = requests . get ( '{0}/api/v3/user' . format ( self . host ) , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) return request . json ( ) | Returns the current user parameters . The current user is linked to the secret token |
58,988 | def edituser ( self , user_id , ** kwargs ) : data = { } if kwargs : data . update ( kwargs ) request = requests . put ( '{0}/{1}' . format ( self . users_url , user_id ) , headers = self . headers , data = data , timeout = self . timeout , verify = self . verify_ssl , auth = self . auth ) if request . status_code == 2... | Edits an user data . |
58,989 | def getsshkeys ( self ) : request = requests . get ( self . keys_url , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_code == 200 : return request . json ( ) else : return False | Gets all the ssh keys for the current user |
58,990 | def addsshkey ( self , title , key ) : data = { 'title' : title , 'key' : key } request = requests . post ( self . keys_url , headers = self . headers , data = data , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_code == 201 : return True else : return False | Add a new ssh key for the current user |
58,991 | def addsshkeyuser ( self , user_id , title , key ) : data = { 'title' : title , 'key' : key } request = requests . post ( '{0}/{1}/keys' . format ( self . users_url , user_id ) , headers = self . headers , data = data , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_cod... | Add a new ssh key for the user identified by id |
58,992 | def deletesshkey ( self , key_id ) : request = requests . delete ( '{0}/{1}' . format ( self . keys_url , key_id ) , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . content == b'null' : return False else : return True | Deletes an sshkey for the current user identified by id |
58,993 | def get ( self , uri , default_response = None , ** kwargs ) : url = self . api_url + uri response = requests . get ( url , params = kwargs , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) return self . success_or_raise ( response , default_response = default_res... | Call GET on the Gitlab server |
58,994 | def post ( self , uri , default_response = None , ** kwargs ) : url = self . api_url + uri response = requests . post ( url , headers = self . headers , data = kwargs , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) return self . success_or_raise ( response , default_response = default_res... | Call POST on the Gitlab server |
58,995 | def delete ( self , uri , default_response = None ) : url = self . api_url + uri response = requests . delete ( url , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) return self . success_or_raise ( response , default_response = default_response ) | Call DELETE on the Gitlab server |
58,996 | def success_or_raise ( self , response , default_response = None ) : if self . suppress_http_error and not response . ok : return False response_json = default_response if response_json is None : response_json = { } response . raise_for_status ( ) try : response_json = response . json ( ) except ValueError : pass retur... | Check if request was successful or raises an HttpError |
58,997 | def getall ( fn , page = None , * args , ** kwargs ) : if not page : page = 1 while True : results = fn ( * args , page = page , ** kwargs ) if not results : break for x in results : yield x page += 1 | Auto - iterate over the paginated results of various methods of the API . Pass the GitLabAPI method as the first argument followed by the other parameters as normal . Include page to determine first page to poll . Remaining kwargs are passed on to the called method including per_page . |
58,998 | def setsudo ( self , user = None ) : if user is None : try : self . headers . pop ( 'SUDO' ) except KeyError : pass else : self . headers [ 'SUDO' ] = user | Set the subsequent API calls to the user provided |
58,999 | def createproject ( self , name , ** kwargs ) : data = { 'name' : name } if kwargs : data . update ( kwargs ) request = requests . post ( self . projects_url , headers = self . headers , data = data , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_code == 201 : return r... | Creates a new project owned by the authenticated user . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.