idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
58,900
def _get_addresses ( self , text ) : addresses = [ ] matches = utils . findall ( self . rules , text , flags = re . VERBOSE | re . U ) if ( matches ) : for match in matches : addresses . append ( match [ 0 ] . strip ( ) ) return addresses
Returns a list of addresses found in text
58,901
def parse ( some_text , ** kwargs ) : ap = parser . AddressParser ( ** kwargs ) return ap . parse ( some_text )
Creates request to AddressParser and returns list of Address objects
58,902
def setAttribute ( values , value ) : if isinstance ( value , int ) : values . add ( ) . int32_value = value elif isinstance ( value , float ) : values . add ( ) . double_value = value elif isinstance ( value , long ) : values . add ( ) . int64_value = value elif isinstance ( value , str ) : values . add ( ) . string_value = value elif isinstance ( value , bool ) : values . add ( ) . bool_value = value elif isinstance ( value , ( list , tuple , array . array ) ) : for v in value : setAttribute ( values , v ) elif isinstance ( value , dict ) : for key in value : setAttribute ( values . add ( ) . attributes . attr [ key ] . values , value [ key ] ) else : values . add ( ) . string_value = str ( value )
Takes the values of an attribute value list and attempts to append attributes of the proper type inferred from their Python type .
58,903
def deepSetAttr ( obj , path , val ) : first , _ , rest = path . rpartition ( '.' ) return setattr ( deepGetAttr ( obj , first ) if first else obj , rest , val )
Sets a deep attribute on an object by resolving a dot - delimited path . If path does not exist an AttributeError will be raised .
58,904
def convertDatetime ( t ) : epoch = datetime . datetime . utcfromtimestamp ( 0 ) delta = t - epoch millis = delta . total_seconds ( ) * 1000 return int ( millis )
Converts the specified datetime object into its appropriate protocol value . This is the number of milliseconds from the epoch .
58,905
def getValueFromValue ( value ) : if type ( value ) != common . AttributeValue : raise TypeError ( "Expected an AttributeValue, but got {}" . format ( type ( value ) ) ) if value . WhichOneof ( "value" ) is None : raise AttributeError ( "Nothing set for {}" . format ( value ) ) return getattr ( value , value . WhichOneof ( "value" ) )
Extract the currently set field from a Value structure
58,906
def toJson ( protoObject , indent = None ) : js = json_format . MessageToDict ( protoObject , False ) return json . dumps ( js , indent = indent )
Serialises a protobuf object as json
58,907
def getProtocolClasses ( superclass = message . Message ) : superclasses = set ( [ message . Message ] ) thisModule = sys . modules [ __name__ ] subclasses = [ ] for name , class_ in inspect . getmembers ( thisModule ) : if ( ( inspect . isclass ( class_ ) and issubclass ( class_ , superclass ) and class_ not in superclasses ) ) : subclasses . append ( class_ ) return subclasses
Returns all the protocol classes that are subclasses of the specified superclass . Only leaf classes are returned corresponding directly to the classes defined in the protocol .
58,908
def runCommandSplits ( splits , silent = False , shell = False ) : try : if silent : with open ( os . devnull , 'w' ) as devnull : subprocess . check_call ( splits , stdout = devnull , stderr = devnull , shell = shell ) else : subprocess . check_call ( splits , shell = shell ) except OSError as exception : if exception . errno == 2 : raise Exception ( "Can't find command while trying to run {}" . format ( splits ) ) else : raise
Run a shell command given the command s parsed command line
58,909
def _createSchemaFiles ( self , destPath , schemasPath ) : ga4ghPath = os . path . join ( destPath , 'ga4gh' ) if not os . path . exists ( ga4ghPath ) : os . mkdir ( ga4ghPath ) ga4ghSchemasPath = os . path . join ( ga4ghPath , 'schemas' ) if not os . path . exists ( ga4ghSchemasPath ) : os . mkdir ( ga4ghSchemasPath ) ga4ghSchemasGa4ghPath = os . path . join ( ga4ghSchemasPath , 'ga4gh' ) if not os . path . exists ( ga4ghSchemasGa4ghPath ) : os . mkdir ( ga4ghSchemasGa4ghPath ) ga4ghSchemasGooglePath = os . path . join ( ga4ghSchemasPath , 'google' ) if not os . path . exists ( ga4ghSchemasGooglePath ) : os . mkdir ( ga4ghSchemasGooglePath ) ga4ghSchemasGoogleApiPath = os . path . join ( ga4ghSchemasGooglePath , 'api' ) if not os . path . exists ( ga4ghSchemasGoogleApiPath ) : os . mkdir ( ga4ghSchemasGoogleApiPath ) for root , dirs , files in os . walk ( schemasPath ) : for protoFilePath in fnmatch . filter ( files , '*.proto' ) : src = os . path . join ( root , protoFilePath ) dst = os . path . join ( ga4ghSchemasPath , os . path . relpath ( root , schemasPath ) , protoFilePath ) self . _copySchemaFile ( src , dst )
Create a hierarchy of proto files in a destination directory copied from the schemasPath hierarchy
58,910
def _doLineReplacements ( self , line ) : packageString = 'package ga4gh;' if packageString in line : return line . replace ( packageString , 'package ga4gh.schemas.ga4gh;' ) importString = 'import "ga4gh/' if importString in line : return line . replace ( importString , 'import "ga4gh/schemas/ga4gh/' ) googlePackageString = 'package google.api;' if googlePackageString in line : return line . replace ( googlePackageString , 'package ga4gh.schemas.google.api;' ) googleImportString = 'import "google/api/' if googleImportString in line : return line . replace ( googleImportString , 'import "ga4gh/schemas/google/api/' ) optionString = 'option (google.api.http)' if optionString in line : return line . replace ( optionString , 'option (.ga4gh.schemas.google.api.http)' ) return line
Given a line of a proto file replace the line with one that is appropriate for the hierarchy that we want to compile
58,911
def _copySchemaFile ( self , src , dst ) : with open ( src ) as srcFile , open ( dst , 'w' ) as dstFile : srcLines = srcFile . readlines ( ) for srcLine in srcLines : toWrite = self . _doLineReplacements ( srcLine ) dstFile . write ( toWrite )
Copy a proto file to the temporary directory with appropriate line replacements
58,912
def convert_protodef_to_editable ( proto ) : class Editable ( object ) : def __init__ ( self , prot ) : self . kind = type ( prot ) self . name = prot . name self . comment = "" self . options = dict ( [ ( key . name , value ) for ( key , value ) in prot . options . ListFields ( ) ] ) if isinstance ( prot , EnumDescriptorProto ) : self . value = [ convert_protodef_to_editable ( x ) for x in prot . value ] elif isinstance ( prot , DescriptorProto ) : self . field = [ convert_protodef_to_editable ( x ) for x in prot . field ] self . enum_type = [ convert_protodef_to_editable ( x ) for x in prot . enum_type ] self . nested_type = prot . nested_type self . oneof_decl = prot . oneof_decl elif isinstance ( prot , EnumValueDescriptorProto ) : self . number = prot . number elif isinstance ( prot , FieldDescriptorProto ) : if prot . type in [ 11 , 14 ] : self . ref_type = prot . type_name [ 1 : ] self . type = prot . type self . label = prot . label elif isinstance ( prot , ServiceDescriptorProto ) : self . method = [ convert_protodef_to_editable ( x ) for x in prot . method ] elif isinstance ( prot , MethodDescriptorProto ) : self . input_type = prot . input_type self . output_type = prot . output_type else : raise Exception , type ( prot ) return Editable ( proto )
Protobuf objects can t have arbitrary fields addedd and we need to later on add comments to them so we instead make Editable objects that can do so
58,913
def haversine ( point1 , point2 , unit = 'km' ) : AVG_EARTH_RADIUS_KM = 6371.0088 conversions = { 'km' : 1 , 'm' : 1000 , 'mi' : 0.621371192 , 'nmi' : 0.539956803 , 'ft' : 3280.839895013 , 'in' : 39370.078740158 } avg_earth_radius = AVG_EARTH_RADIUS_KM * conversions [ unit ] lat1 , lng1 = point1 lat2 , lng2 = point2 lat1 , lng1 , lat2 , lng2 = map ( radians , ( lat1 , lng1 , lat2 , lng2 ) ) lat = lat2 - lat1 lng = lng2 - lng1 d = sin ( lat * 0.5 ) ** 2 + cos ( lat1 ) * cos ( lat2 ) * sin ( lng * 0.5 ) ** 2 return 2 * avg_earth_radius * asin ( sqrt ( d ) )
Calculate the great - circle distance between two points on the Earth surface .
58,914
def main ( ) : logging . basicConfig ( level = logging . INFO ) run_metrics = py_interop_run_metrics . run_metrics ( ) summary = py_interop_summary . run_summary ( ) valid_to_load = py_interop_run . uchar_vector ( py_interop_run . MetricCount , 0 ) py_interop_run_metrics . list_summary_metrics_to_load ( valid_to_load ) for run_folder_path in sys . argv [ 1 : ] : run_folder = os . path . basename ( run_folder_path ) try : run_metrics . read ( run_folder_path , valid_to_load ) except Exception , ex : logging . warn ( "Skipping - cannot read RunInfo.xml: %s - %s" % ( run_folder , str ( ex ) ) ) continue py_interop_summary . summarize_run_metrics ( run_metrics , summary ) error_rate_read_lane_surface = numpy . zeros ( ( summary . size ( ) , summary . lane_count ( ) , summary . surface_count ( ) ) ) for read_index in xrange ( summary . size ( ) ) : for lane_index in xrange ( summary . lane_count ( ) ) : for surface_index in xrange ( summary . surface_count ( ) ) : error_rate_read_lane_surface [ read_index , lane_index , surface_index ] = summary . at ( read_index ) . at ( lane_index ) . at ( surface_index ) . error_rate ( ) . mean ( ) logging . info ( "Run Folder: " + run_folder ) for read_index in xrange ( summary . size ( ) ) : read_summary = summary . at ( read_index ) logging . info ( "Read " + str ( read_summary . read ( ) . number ( ) ) + " - Top Surface Mean Error: " + str ( error_rate_read_lane_surface [ read_index , : , 0 ] . mean ( ) ) )
Retrieve run folder paths from the command line Ensure only metrics required for summary are loaded Load the run metrics Calculate the summary metrics Display error by lane read
58,915
def gen_csv ( sc , filename , field_list , source , filters ) : datafile = open ( filename , 'wb' ) csvfile = csv . writer ( datafile ) header = [ ] for field in field_list : header . append ( fields . fields [ field ] [ 'name' ] ) csvfile . writerow ( header ) debug . write ( 'Generating %s: ' % filename ) fparams = { 'fobj' : csvfile , 'flist' : field_list } sc . query ( 'vulndetails' , source = source , func = writer , func_params = fparams , ** filters ) debug . write ( '\n' ) datafile . close ( )
csv SecurityCenterObj AssetListName CSVFields EmailAddress
58,916
def login ( self , user , passwd ) : resp = self . post ( 'token' , json = { 'username' : user , 'password' : passwd } ) self . _token = resp . json ( ) [ 'response' ] [ 'token' ]
Logs the user into SecurityCenter and stores the needed token and cookies .
58,917
def download_scans ( sc , age = 0 , unzip = False , path = 'scans' ) : if not os . path . exists ( path ) : logger . debug ( 'scan path didn\'t exist. creating it.' ) os . makedirs ( path ) findate = ( date . today ( ) - timedelta ( days = age ) ) logger . debug ( 'getting scan results for parsing' ) resp = sc . get ( 'scanResult' , params = { 'startTime' : int ( time . mktime ( findate . timetuple ( ) ) ) , 'fields' : 'name,finishTime,downloadAvailable,repository' , } ) for scan in resp . json ( ) [ 'response' ] [ 'usable' ] : if scan [ 'downloadAvailable' ] == 'false' : logger . debug ( '%s/"%s" not available for download' % ( scan [ 'id' ] , scan [ 'name' ] ) ) else : logger . debug ( '%s/"%s" downloading' % ( scan [ 'id' ] , scan [ 'name' ] ) ) scandata = sc . post ( 'scanResult/%s/download' % scan [ 'id' ] , json = { 'downloadType' : 'v2' } ) sfin = datetime . fromtimestamp ( int ( scan [ 'finishTime' ] ) ) filename = '%s-%s.%s.%s' % ( scan [ 'id' ] , scan [ 'name' ] . replace ( ' ' , '_' ) , scan [ 'repository' ] [ 'id' ] , sfin . strftime ( '%Y.%m.%d-%H.%M' ) ) if unzip : logger . debug ( 'extracting %s/%s' % ( scan [ 'id' ] , scan [ 'name' ] ) ) zfile = ZipFile ( StringIO ( buf = scandata . content ) ) scanfile = zfile . filelist [ 0 ] scanfile . filename = '%s.nessus' % filename zfile . extract ( scanfile , path = path ) else : logger . debug ( 'writing zip for %s/%s' % ( scan [ 'id' ] , scan [ 'name' ] ) ) with open ( '%s.zip' % filename , 'wb' ) as zfile : zfile . write ( scandata . content ) logger . info ( '%s/"%s" downloaded' % ( scan [ 'id' ] , scan [ 'name' ] ) )
Scan Downloader Here we will attempt to download all of the scans that have completed between now and AGE days ago .
58,918
def update ( sc , filename , asset_id ) : addresses = [ ] with open ( filename ) as hostfile : for line in hostfile . readlines ( ) : addresses . append ( line . strip ( '\n' ) ) sc . asset_update ( asset_id , dns = addresses )
Updates a DNS Asset List with the contents of the filename . The assumed format of the file is 1 entry per line . This function will convert the file contents into an array of entries and then upload that array into SecurityCenter .
58,919
def generate_html_report ( base_path , asset_id ) : jenv = Environment ( loader = PackageLoader ( 'swchange' , 'templates' ) ) s = Session ( ) asset = s . query ( AssetList ) . filter_by ( id = asset_id ) . first ( ) if not asset : print 'Invalid Asset ID (%s)!' % asset_id return filename = os . path . join ( base_path , '%s-INV-CHANGE-%s.html' % ( asset . name , datetime . now ( ) . strftime ( '%Y-%m-%d.%H.%M.%S' ) ) ) print 'Generating Report : %s' % filename with open ( filename , 'wb' ) as report : report . write ( jenv . get_template ( 'layout.html' ) . render ( asset = asset , current_date = datetime . now ( ) ) )
Generates the HTML report and dumps it into the specified filename
58,920
def gen_csv ( sc , filename ) : datafile = open ( filename , 'wb' ) csvfile = csv . writer ( datafile ) csvfile . writerow ( [ 'Software Package Name' , 'Count' ] ) debug . write ( 'Generating %s: ' % filename ) fparams = { 'fobj' : csvfile } sc . query ( 'listsoftware' , func = writer , func_params = fparams ) debug . write ( '\n' ) datafile . close ( )
csv SecurityCenterObj EmailAddress
58,921
def download ( sc , age = 0 , path = 'reports' , ** args ) : if not os . path . exists ( path ) : logger . debug ( 'report path didn\'t exist. creating it.' ) os . makedirs ( path ) findate = ( date . today ( ) - timedelta ( days = age ) ) reports = sc . get ( 'report' , params = { 'startTime' : findate . strftime ( '%s' ) , 'fields' : 'name,type,status,finishTime' } ) for report in reports . json ( ) [ 'response' ] [ 'usable' ] : if report [ 'status' ] == 'Completed' : if 'name' in args and args [ 'name' ] not in report [ 'name' ] : continue if 'type' in args and args [ 'type' ] . lower ( ) != report [ 'type' ] . lower ( ) : continue report_data = sc . post ( 'report/%s/download' % report [ 'id' ] , json = { 'id' : int ( report [ 'id' ] ) } ) report_name = '%s-%s.%s' % ( report [ 'name' ] . replace ( ' ' , '_' ) , report [ 'finishTime' ] , report [ 'type' ] ) logger . info ( 'writing %s to disk' % report_name ) with open ( os . path . join ( path , report_name ) , 'wb' ) as report_file : report_file . write ( report_data . content )
Report Downloader The report downloader will pull reports down from SecurityCenter based on the conditions provided to the path provided .
58,922
def post ( self , path , ** kwargs ) : resp = self . _session . post ( self . _url ( path ) , ** self . _builder ( ** kwargs ) ) if 'stream' in kwargs : return resp else : return self . _resp_error_check ( resp )
Calls the specified path with the POST method
58,923
def import_repo ( self , repo_id , fileobj ) : filename = self . upload ( fileobj ) . json ( ) [ 'response' ] [ 'filename' ] return self . post ( 'repository/{}/import' . format ( repo_id ) , json = { 'file' : filename } )
Imports a repository package using the repository ID specified .
58,924
def _revint ( self , version ) : intrev = 0 vsplit = version . split ( '.' ) for c in range ( len ( vsplit ) ) : item = int ( vsplit [ c ] ) * ( 10 ** ( ( ( len ( vsplit ) - c - 1 ) * 2 ) ) ) intrev += item return intrev
Internal function to convert a version string to an integer .
58,925
def _revcheck ( self , func , version ) : current = self . _revint ( self . version ) check = self . _revint ( version ) if func in ( 'lt' , '<=' , ) : return check <= current elif func in ( 'gt' , '>=' ) : return check >= current elif func in ( 'eq' , '=' , 'equals' ) : return check == current else : return False
Internal function to see if a version is func than what we have determined to be talking to . This is very useful for newer API calls to make sure we don t accidentally make a call to something that doesnt exist .
58,926
def _build_xrefs ( self ) : xrefs = set ( ) plugins = self . plugins ( ) for plugin in plugins : for xref in plugin [ 'xrefs' ] . split ( ', ' ) : xrf = xref . replace ( '-' , '_' ) . split ( ':' ) [ 0 ] if xrf is not '' : xrefs . add ( xrf ) self . _xrefs = list ( xrefs )
Internal function to populate the xrefs list with the external references to be used in searching plugins and potentially other functions as well .
58,927
def login ( self , user , passwd ) : data = self . raw_query ( 'auth' , 'login' , data = { 'username' : user , 'password' : passwd } ) self . _token = data [ "token" ] self . _user = data
login user passwd Performs the login operation for Security Center storing the token that Security Center has generated for this login session for future queries .
58,928
def credential_add ( self , name , cred_type , ** options ) : if 'pirvateKey' in options : options [ 'privateKey' ] = self . _upload ( options [ 'privateKey' ] ) [ 'filename' ] if 'publicKey' in options : options [ 'publicKey' ] = self . _upload ( options [ 'publicKey' ] ) [ 'filename' ] return self . raw_query ( "credential" , "add" , data = options )
Adds a new credential into SecurityCenter . As credentials can be of multiple types we have different options to specify for each type of credential .
58,929
def credential_delete_simulate ( self , * ids ) : return self . raw_query ( "credential" , "deleteSimulate" , data = { "credentials" : [ { "id" : str ( id ) } for id in ids ] } )
Show the relationships and dependencies for one or more credentials .
58,930
def credential_delete ( self , * ids ) : return self . raw_query ( "credential" , "delete" , data = { "credentials" : [ { "id" : str ( id ) } for id in ids ] } )
Delete one or more credentials .
58,931
def plugins ( self , plugin_type = 'all' , sort = 'id' , direction = 'asc' , size = 1000 , offset = 0 , all = True , loops = 0 , since = None , ** filterset ) : plugins = [ ] payload = { 'size' : size , 'offset' : offset , 'type' : plugin_type , 'sortField' : sort , 'sortDirection' : direction . upper ( ) , } if len ( filterset ) > 0 : fname = list ( filterset . keys ( ) ) [ 0 ] if fname in self . _xrefs : fname = 'xrefs:%s' % fname . replace ( '_' , '-' ) payload [ 'filterField' ] = fname payload [ 'filterString' ] = filterset [ list ( filterset . keys ( ) ) [ 0 ] ] if since is not None and isinstance ( since , date ) : payload [ 'since' ] = calendar . timegm ( since . utctimetuple ( ) ) while all or loops > 0 : data = self . raw_query ( 'plugin' , 'init' , data = payload ) if not data : return [ ] for plugin in data [ 'plugins' ] : plugins . append ( plugin ) if len ( data [ 'plugins' ] ) < size : all = False loops = 0 else : loops -= 1 payload [ 'offset' ] += len ( data [ 'plugins' ] ) return plugins
plugins Returns a list of of the plugins and their associated families . For simplicity purposes the plugin family names will be injected into the plugin data so that only 1 list is returned back with all of the information .
58,932
def plugin_counts ( self ) : ret = { 'total' : 0 , } data = self . raw_query ( 'plugin' , 'init' ) ret [ 'total' ] = data [ 'pluginCount' ] if 'lastUpdates' in data : for item in [ 'active' , 'passive' , 'compliance' , 'custom' , 'event' ] : itemdata = { } if item in data [ 'lastUpdates' ] : itemdata = data [ 'lastUpdates' ] [ item ] if item in data : itemdata [ 'count' ] = data [ item ] else : itemdata [ 'count' ] = 0 ret [ item ] = itemdata return ret
plugin_counts Returns the plugin counts as dictionary with the last updated info if its available .
58,933
def ip_info ( self , ip , repository_ids = None ) : if not repository_ids : repository_ids = [ ] repos = [ ] for rid in repository_ids : repos . append ( { 'id' : rid } ) return self . raw_query ( 'vuln' , 'getIP' , data = { 'ip' : ip , 'repositories' : repos } )
ip_info Returns information about the IP specified in the repository ids defined .
58,934
def scan_list ( self , start_time = None , end_time = None , ** kwargs ) : try : end_time = datetime . utcfromtimestamp ( int ( end_time ) ) except TypeError : if end_time is None : end_time = datetime . utcnow ( ) try : start_time = datetime . utcfromtimestamp ( int ( start_time ) ) except TypeError : if start_time is None : start_time = end_time - timedelta ( days = 30 ) data = { "startTime" : calendar . timegm ( start_time . utctimetuple ( ) ) , "endTime" : calendar . timegm ( end_time . utctimetuple ( ) ) } data . update ( kwargs ) result = self . raw_query ( "scanResult" , "getRange" , data = data ) return result [ "scanResults" ]
List scans stored in Security Center in a given time range .
58,935
def dashboard_import ( self , name , fileobj ) : data = self . _upload ( fileobj ) return self . raw_query ( 'dashboard' , 'importTab' , data = { 'filename' : data [ 'filename' ] , 'name' : name , } )
dashboard_import Dashboard_Name filename Uploads a dashboard template to the current user s dashboard tabs .
58,936
def report_import ( self , name , filename ) : data = self . _upload ( filename ) return self . raw_query ( 'report' , 'import' , data = { 'filename' : data [ 'filename' ] , 'name' : name , } )
report_import Report_Name filename Uploads a report template to the current user s reports
58,937
def asset_create ( self , name , items , tag = '' , description = '' , atype = 'static' ) : data = { 'name' : name , 'description' : description , 'type' : atype , 'tags' : tag } if atype == 'static' : data [ 'definedIPs' ] = ',' . join ( items ) if atype == 'dns' : data [ 'type' ] = 'dnsname' data [ 'definedDNSNames' ] = ' ' . join ( items ) return self . raw_query ( 'asset' , 'add' , data = data )
asset_create_static name ips tags description Create a new asset list with the defined information .
58,938
def asset_create_combo ( self , name , combo , tag = '' , description = '' ) : return self . raw_query ( 'asset' , 'add' , data = { 'name' : name , 'description' : description , 'type' : 'combination' , 'combinations' : combo , } )
asset_create_combo name combination tag description Creates a new combination asset list . Operands can be either asset list IDs or be a nested combination asset list .
58,939
def risk_rule ( self , rule_type , rule_value , port , proto , plugin_id , repo_ids , comment = '' , expires = '-1' , severity = None ) : data = { 'hostType' : rule_type , 'port' : port , 'comments' : comment , 'protocol' : proto , 'pluginID' : plugin_id , 'repIDs' : [ { 'id' : i } for i in repo_ids ] } if rule_type != 'all' : data [ 'hostValue' ] = rule_value if severity is None : data [ 'expires' ] = expires return self . raw_query ( 'acceptRiskRule' , 'add' , data = data ) else : sevlevels = { 'info' : 0 , 'low' : 1 , 'medium' : 2 , 'high' : 3 , 'critical' : 4 } data [ 'severity' ] = sevlevels [ severity ] return self . raw_query ( 'recastRiskRule' , 'add' , data = data )
accept_risk rule_type rule_value port proto plugin_id comment Creates an accept rick rule based on information provided .
58,940
def group_add ( self , name , restrict , repos , lces = [ ] , assets = [ ] , queries = [ ] , policies = [ ] , dashboards = [ ] , credentials = [ ] , description = '' ) : return self . raw_query ( 'group' , 'add' , data = { 'lces' : [ { 'id' : i } for i in lces ] , 'assets' : [ { 'id' : i } for i in assets ] , 'queries' : [ { 'id' : i } for i in queries ] , 'policies' : [ { 'id' : i } for i in policies ] , 'dashboardTabs' : [ { 'id' : i } for i in dashboards ] , 'credentials' : [ { 'id' : i } for i in credentials ] , 'repositories' : [ { 'id' : i } for i in repos ] , 'definingAssets' : [ { 'id' : i } for i in restrict ] , 'name' : name , 'description' : description , 'users' : [ ] , 'context' : '' } )
group_add name restrict repos
58,941
def get_geo_info ( filename , band = 1 ) : sourceds = gdal . Open ( filename , GA_ReadOnly ) ndv = sourceds . GetRasterBand ( band ) . GetNoDataValue ( ) xsize = sourceds . RasterXSize ysize = sourceds . RasterYSize geot = sourceds . GetGeoTransform ( ) projection = osr . SpatialReference ( ) projection . ImportFromWkt ( sourceds . GetProjectionRef ( ) ) datatype = sourceds . GetRasterBand ( band ) . DataType datatype = gdal . GetDataTypeName ( datatype ) return ndv , xsize , ysize , geot , projection , datatype
Gets information from a Raster data set
58,942
def create_geotiff ( name , Array , driver , ndv , xsize , ysize , geot , projection , datatype , band = 1 ) : if isinstance ( datatype , np . int ) == False : if datatype . startswith ( 'gdal.GDT_' ) == False : datatype = eval ( 'gdal.GDT_' + datatype ) newfilename = name + '.tif' Array [ np . isnan ( Array ) ] = ndv DataSet = driver . Create ( newfilename , xsize , ysize , 1 , datatype ) DataSet . SetGeoTransform ( geot ) DataSet . SetProjection ( projection . ExportToWkt ( ) ) DataSet . GetRasterBand ( band ) . WriteArray ( Array ) DataSet . GetRasterBand ( band ) . SetNoDataValue ( ndv ) return newfilename
Creates new geotiff from array
58,943
def load_tiff ( file ) : ndv , xsize , ysize , geot , projection , datatype = get_geo_info ( file ) data = gdalnumeric . LoadFile ( file ) data = np . ma . masked_array ( data , mask = data == ndv , fill_value = ndv ) return data
Load a geotiff raster keeping ndv values using a masked array
58,944
def from_file ( filename , ** kwargs ) : ndv , xsize , ysize , geot , projection , datatype = get_geo_info ( filename , ** kwargs ) data = gdalnumeric . LoadFile ( filename , ** kwargs ) data = np . ma . masked_array ( data , mask = data == ndv , fill_value = ndv ) return GeoRaster ( data , geot , nodata_value = ndv , projection = projection , datatype = datatype )
Create a GeoRaster object from a file
58,945
def copy ( self ) : return GeoRaster ( self . raster . copy ( ) , self . geot , nodata_value = self . nodata_value , projection = self . projection , datatype = self . datatype )
Returns copy of itself
58,946
def clip ( self , shp , keep = False , * args , ** kwargs ) : df = pd . DataFrame ( zonal_stats ( shp , self . raster , nodata = self . nodata_value , all_touched = True , raster_out = True , affine = Affine . from_gdal ( * self . geot ) , geojson_out = keep , ) ) if keep : df [ 'GeoRaster' ] = df . properties . apply ( lambda x : GeoRaster ( x [ 'mini_raster_array' ] , Affine . to_gdal ( x [ 'mini_raster_affine' ] ) , nodata_value = x [ 'mini_raster_nodata' ] , projection = self . projection , datatype = self . datatype ) ) cols = list ( set ( [ i for i in df . properties [ 0 ] . keys ( ) ] ) . intersection ( set ( shp . columns ) ) ) df2 = pd . DataFrame ( [ df . properties . apply ( lambda x : x [ i ] ) for i in cols ] ) . T . merge ( df [ [ 'GeoRaster' ] ] , left_index = True , right_index = True , ) df2 . columns = cols + [ 'GeoRaster' ] df2 = df2 . merge ( df [ [ 'id' ] ] , left_index = True , right_index = True ) df2 . set_index ( 'id' , inplace = True ) return df2 else : df [ 'GeoRaster' ] = df . apply ( lambda x : GeoRaster ( x . mini_raster_array , Affine . to_gdal ( x . mini_raster_affine ) , nodata_value = x . mini_raster_nodata , projection = self . projection , datatype = self . datatype ) , axis = 1 ) return df [ 'GeoRaster' ] . values
Clip raster using shape where shape is either a GeoPandas DataFrame shapefile or some other geometry format used by python - raster - stats
58,947
def pysal_Gamma ( self , ** kwargs ) : if self . weights is None : self . raster_weights ( ** kwargs ) rasterf = self . raster . flatten ( ) rasterf = rasterf [ rasterf . mask == False ] self . Gamma = pysal . Gamma ( rasterf , self . weights , ** kwargs )
Compute Gamma Index of Spatial Autocorrelation for GeoRaster
58,948
def pysal_Join_Counts ( self , ** kwargs ) : if self . weights is None : self . raster_weights ( ** kwargs ) rasterf = self . raster . flatten ( ) rasterf = rasterf [ rasterf . mask == False ] self . Join_Counts = pysal . Join_Counts ( rasterf , self . weights , ** kwargs )
Compute join count statistics for GeoRaster
58,949
def pysal_Moran ( self , ** kwargs ) : if self . weights is None : self . raster_weights ( ** kwargs ) rasterf = self . raster . flatten ( ) rasterf = rasterf [ rasterf . mask == False ] self . Moran = pysal . Moran ( rasterf , self . weights , ** kwargs )
Compute Moran s I measure of global spatial autocorrelation for GeoRaster
58,950
def pysal_Moran_Local ( self , ** kwargs ) : if self . weights is None : self . raster_weights ( ** kwargs ) rasterf = self . raster . flatten ( ) rasterf = rasterf [ rasterf . mask == False ] self . Moran_Local = pysal . Moran_Local ( rasterf , self . weights , ** kwargs ) for i in self . Moran_Local . __dict__ . keys ( ) : if ( isinstance ( getattr ( self . Moran_Local , i ) , np . ma . masked_array ) or ( isinstance ( getattr ( self . Moran_Local , i ) , np . ndarray ) ) and len ( getattr ( self . Moran_Local , i ) . shape ) == 1 ) : setattr ( self . Moran_Local , i , self . map_vector ( getattr ( self . Moran_Local , i ) ) )
Compute Local Moran s I measure of local spatial autocorrelation for GeoRaster
58,951
def mcp ( self , * args , ** kwargs ) : self . mcp_cost = graph . MCP_Geometric ( self . raster , * args , ** kwargs )
Setup MCP_Geometric object from skimage for optimal travel time computations
58,952
def notify ( self , method , params = None ) : log . debug ( 'Sending notification: %s %s' , method , params ) message = { 'jsonrpc' : JSONRPC_VERSION , 'method' : method , } if params is not None : message [ 'params' ] = params self . _consumer ( message )
Send a JSON RPC notification to the client .
58,953
def request ( self , method , params = None ) : msg_id = self . _id_generator ( ) log . debug ( 'Sending request with id %s: %s %s' , msg_id , method , params ) message = { 'jsonrpc' : JSONRPC_VERSION , 'id' : msg_id , 'method' : method , } if params is not None : message [ 'params' ] = params request_future = futures . Future ( ) request_future . add_done_callback ( self . _cancel_callback ( msg_id ) ) self . _server_request_futures [ msg_id ] = request_future self . _consumer ( message ) return request_future
Send a JSON RPC request to the client .
58,954
def _cancel_callback ( self , request_id ) : def callback ( future ) : if future . cancelled ( ) : self . notify ( CANCEL_METHOD , { 'id' : request_id } ) future . set_exception ( JsonRpcRequestCancelled ( ) ) return callback
Construct a cancellation callback for the given request ID .
58,955
def consume ( self , message ) : if 'jsonrpc' not in message or message [ 'jsonrpc' ] != JSONRPC_VERSION : log . warn ( "Unknown message type %s" , message ) return if 'id' not in message : log . debug ( "Handling notification from client %s" , message ) self . _handle_notification ( message [ 'method' ] , message . get ( 'params' ) ) elif 'method' not in message : log . debug ( "Handling response from client %s" , message ) self . _handle_response ( message [ 'id' ] , message . get ( 'result' ) , message . get ( 'error' ) ) else : try : log . debug ( "Handling request from client %s" , message ) self . _handle_request ( message [ 'id' ] , message [ 'method' ] , message . get ( 'params' ) ) except JsonRpcException as e : log . exception ( "Failed to handle request %s" , message [ 'id' ] ) self . _consumer ( { 'jsonrpc' : JSONRPC_VERSION , 'id' : message [ 'id' ] , 'error' : e . to_dict ( ) } ) except Exception : log . exception ( "Failed to handle request %s" , message [ 'id' ] ) self . _consumer ( { 'jsonrpc' : JSONRPC_VERSION , 'id' : message [ 'id' ] , 'error' : JsonRpcInternalError . of ( sys . exc_info ( ) ) . to_dict ( ) } )
Consume a JSON RPC message from the client .
58,956
def _handle_notification ( self , method , params ) : if method == CANCEL_METHOD : self . _handle_cancel_notification ( params [ 'id' ] ) return try : handler = self . _dispatcher [ method ] except KeyError : log . warn ( "Ignoring notification for unknown method %s" , method ) return try : handler_result = handler ( params ) except Exception : log . exception ( "Failed to handle notification %s: %s" , method , params ) return if callable ( handler_result ) : log . debug ( "Executing async notification handler %s" , handler_result ) notification_future = self . _executor_service . submit ( handler_result ) notification_future . add_done_callback ( self . _notification_callback ( method , params ) )
Handle a notification from the client .
58,957
def _notification_callback ( method , params ) : def callback ( future ) : try : future . result ( ) log . debug ( "Successfully handled async notification %s %s" , method , params ) except Exception : log . exception ( "Failed to handle async notification %s %s" , method , params ) return callback
Construct a notification callback for the given request ID .
58,958
def _handle_cancel_notification ( self , msg_id ) : request_future = self . _client_request_futures . pop ( msg_id , None ) if not request_future : log . warn ( "Received cancel notification for unknown message id %s" , msg_id ) return if request_future . cancel ( ) : log . debug ( "Cancelled request with id %s" , msg_id )
Handle a cancel notification from the client .
58,959
def _handle_request ( self , msg_id , method , params ) : try : handler = self . _dispatcher [ method ] except KeyError : raise JsonRpcMethodNotFound . of ( method ) handler_result = handler ( params ) if callable ( handler_result ) : log . debug ( "Executing async request handler %s" , handler_result ) request_future = self . _executor_service . submit ( handler_result ) self . _client_request_futures [ msg_id ] = request_future request_future . add_done_callback ( self . _request_callback ( msg_id ) ) else : log . debug ( "Got result from synchronous request handler: %s" , handler_result ) self . _consumer ( { 'jsonrpc' : JSONRPC_VERSION , 'id' : msg_id , 'result' : handler_result } )
Handle a request from the client .
58,960
def _request_callback ( self , request_id ) : def callback ( future ) : self . _client_request_futures . pop ( request_id , None ) if future . cancelled ( ) : future . set_exception ( JsonRpcRequestCancelled ( ) ) message = { 'jsonrpc' : JSONRPC_VERSION , 'id' : request_id , } try : message [ 'result' ] = future . result ( ) except JsonRpcException as e : log . exception ( "Failed to handle request %s" , request_id ) message [ 'error' ] = e . to_dict ( ) except Exception : log . exception ( "Failed to handle request %s" , request_id ) message [ 'error' ] = JsonRpcInternalError . of ( sys . exc_info ( ) ) . to_dict ( ) self . _consumer ( message ) return callback
Construct a request callback for the given request ID .
58,961
def _handle_response ( self , msg_id , result = None , error = None ) : request_future = self . _server_request_futures . pop ( msg_id , None ) if not request_future : log . warn ( "Received response to unknown message id %s" , msg_id ) return if error is not None : log . debug ( "Received error response to message %s: %s" , msg_id , error ) request_future . set_exception ( JsonRpcException . from_dict ( error ) ) log . debug ( "Received result for message %s: %s" , msg_id , result ) request_future . set_result ( result )
Handle a response from the client .
58,962
def listen ( self , message_consumer ) : while not self . _rfile . closed : request_str = self . _read_message ( ) if request_str is None : break try : message_consumer ( json . loads ( request_str . decode ( 'utf-8' ) ) ) except ValueError : log . exception ( "Failed to parse JSON message %s" , request_str ) continue
Blocking call to listen for messages on the rfile .
58,963
def _read_message ( self ) : line = self . _rfile . readline ( ) if not line : return None content_length = self . _content_length ( line ) while line and line . strip ( ) : line = self . _rfile . readline ( ) if not line : return None return self . _rfile . read ( content_length )
Reads the contents of a message .
58,964
def _content_length ( line ) : if line . startswith ( b'Content-Length: ' ) : _ , value = line . split ( b'Content-Length: ' ) value = value . strip ( ) try : return int ( value ) except ValueError : raise ValueError ( "Invalid Content-Length header: {}" . format ( value ) ) return None
Extract the content length from an input line .
58,965
def hostapi_info ( index = None ) : if index is None : return ( hostapi_info ( i ) for i in range ( _pa . Pa_GetHostApiCount ( ) ) ) else : info = _pa . Pa_GetHostApiInfo ( index ) if not info : raise RuntimeError ( "Invalid host API" ) assert info . structVersion == 1 return { 'name' : ffi . string ( info . name ) . decode ( errors = 'ignore' ) , 'default_input_device' : info . defaultInputDevice , 'default_output_device' : info . defaultOutputDevice }
Return a generator with information about each host API .
58,966
def device_info ( index = None ) : if index is None : return ( device_info ( i ) for i in range ( _pa . Pa_GetDeviceCount ( ) ) ) else : info = _pa . Pa_GetDeviceInfo ( index ) if not info : raise RuntimeError ( "Invalid device" ) assert info . structVersion == 2 if 'DirectSound' in hostapi_info ( info . hostApi ) [ 'name' ] : enc = 'mbcs' else : enc = 'utf-8' return { 'name' : ffi . string ( info . name ) . decode ( encoding = enc , errors = 'ignore' ) , 'hostapi' : info . hostApi , 'max_input_channels' : info . maxInputChannels , 'max_output_channels' : info . maxOutputChannels , 'default_low_input_latency' : info . defaultLowInputLatency , 'default_low_output_latency' : info . defaultLowOutputLatency , 'default_high_input_latency' : info . defaultHighInputLatency , 'default_high_output_latency' : info . defaultHighOutputLatency , 'default_samplerate' : info . defaultSampleRate }
Return a generator with information about each device .
58,967
def _get_stream_parameters ( kind , device , channels , dtype , latency , samplerate ) : if device is None : if kind == 'input' : device = _pa . Pa_GetDefaultInputDevice ( ) elif kind == 'output' : device = _pa . Pa_GetDefaultOutputDevice ( ) info = device_info ( device ) if channels is None : channels = info [ 'max_' + kind + '_channels' ] dtype = np . dtype ( dtype ) try : sample_format = _np2pa [ dtype ] except KeyError : raise ValueError ( "Invalid " + kind + " sample format" ) if samplerate is None : samplerate = info [ 'default_samplerate' ] parameters = ffi . new ( "PaStreamParameters*" , ( device , channels , sample_format , latency , ffi . NULL ) ) return parameters , dtype , samplerate
Generate PaStreamParameters struct .
58,968
def _frombuffer ( ptr , frames , channels , dtype ) : framesize = channels * dtype . itemsize data = np . frombuffer ( ffi . buffer ( ptr , frames * framesize ) , dtype = dtype ) data . shape = - 1 , channels return data
Create NumPy array from a pointer to some memory .
58,969
def start ( self ) : err = _pa . Pa_StartStream ( self . _stream ) if err == _pa . paStreamIsNotStopped : return self . _handle_error ( err )
Commence audio processing .
58,970
def stop ( self ) : err = _pa . Pa_StopStream ( self . _stream ) if err == _pa . paStreamIsStopped : return self . _handle_error ( err )
Terminate audio processing .
58,971
def abort ( self ) : err = _pa . Pa_AbortStream ( self . _stream ) if err == _pa . paStreamIsStopped : return self . _handle_error ( err )
Terminate audio processing immediately .
58,972
def read ( self , frames , raw = False ) : channels , _ = _split ( self . channels ) dtype , _ = _split ( self . dtype ) data = ffi . new ( "signed char[]" , channels * dtype . itemsize * frames ) self . _handle_error ( _pa . Pa_ReadStream ( self . _stream , data , frames ) ) if not raw : data = np . frombuffer ( ffi . buffer ( data ) , dtype = dtype ) data . shape = frames , channels return data
Read samples from an input stream .
58,973
def write ( self , data ) : frames = len ( data ) _ , channels = _split ( self . channels ) _ , dtype = _split ( self . dtype ) if ( not isinstance ( data , np . ndarray ) or data . dtype != dtype ) : data = np . array ( data , dtype = dtype ) if len ( data . shape ) == 1 : data = np . tile ( data , ( channels , 1 ) ) . T if data . shape [ 1 ] > channels : data = data [ : , : channels ] if data . shape < ( frames , channels ) : tmp = data data = np . zeros ( ( frames , channels ) , dtype = dtype ) data [ : tmp . shape [ 0 ] , : tmp . shape [ 1 ] ] = tmp data = data . ravel ( ) . tostring ( ) err = _pa . Pa_WriteStream ( self . _stream , data , frames ) self . _handle_error ( err )
Write samples to an output stream .
58,974
def _handle_shell ( self , cfg_file , * args , ** options ) : args = ( "--interactive" , ) + args return supervisorctl . main ( ( "-c" , cfg_file ) + args )
Command supervisord shell runs the interactive command shell .
58,975
def _handle_getconfig ( self , cfg_file , * args , ** options ) : if args : raise CommandError ( "supervisor getconfig takes no arguments" ) print cfg_file . read ( ) return 0
Command supervisor getconfig prints merged config to stdout .
58,976
def _handle_autoreload ( self , cfg_file , * args , ** options ) : if args : raise CommandError ( "supervisor autoreload takes no arguments" ) live_dirs = self . _find_live_code_dirs ( ) reload_progs = self . _get_autoreload_programs ( cfg_file ) def autoreloader ( ) : if os . fork ( ) == 0 : sys . exit ( self . handle ( "restart" , * reload_progs , ** options ) ) handler = CallbackModifiedHandler ( callback = autoreloader , repeat_delay = 1 , patterns = AUTORELOAD_PATTERNS , ignore_patterns = AUTORELOAD_IGNORE , ignore_directories = True ) from watchdog . observers import Observer from watchdog . observers . polling import PollingObserver observer = None for ObserverCls in ( Observer , PollingObserver ) : observer = ObserverCls ( ) try : for live_dir in set ( live_dirs ) : observer . schedule ( handler , live_dir , True ) break except Exception : print >> sys . stderr , "COULD NOT WATCH FILESYSTEM USING" print >> sys . stderr , "OBSERVER CLASS: " , ObserverCls traceback . print_exc ( ) observer . start ( ) observer . stop ( ) if observer is None : print >> sys . stderr , "COULD NOT WATCH FILESYSTEM" return 1 observer . start ( ) try : while True : time . sleep ( 1 ) except KeyboardInterrupt : observer . stop ( ) observer . join ( ) return 0
Command supervisor autoreload watches for code changes .
58,977
def _get_autoreload_programs ( self , cfg_file ) : cfg = RawConfigParser ( ) cfg . readfp ( cfg_file ) reload_progs = [ ] for section in cfg . sections ( ) : if section . startswith ( "program:" ) : try : if cfg . getboolean ( section , "autoreload" ) : reload_progs . append ( section . split ( ":" , 1 ) [ 1 ] ) except NoOptionError : pass return reload_progs
Get the set of programs to auto - reload when code changes .
58,978
def _find_live_code_dirs ( self ) : live_dirs = [ ] for mod in sys . modules . values ( ) : try : dirnm = os . path . dirname ( mod . __file__ ) except AttributeError : continue dirnm = os . path . realpath ( os . path . abspath ( dirnm ) ) if not dirnm . endswith ( os . sep ) : dirnm += os . sep if not os . path . isdir ( dirnm ) : continue for dirnm2 in live_dirs : if dirnm . startswith ( dirnm2 ) : break else : live_dirs = [ dirnm2 for dirnm2 in live_dirs if not dirnm2 . startswith ( dirnm ) ] live_dirs . append ( dirnm ) return live_dirs
Find all directories in which we might have live python code .
58,979
def render_config ( data , ctx ) : djsupervisor_tags . current_context = ctx data = "{% load djsupervisor_tags %}" + data t = template . Template ( data ) c = template . Context ( ctx ) return t . render ( c ) . encode ( "ascii" )
Render the given config data using Django s template system .
58,980
def get_config_from_options ( ** options ) : data = [ ] data . append ( "[supervisord]\n" ) if options . get ( "daemonize" , False ) : data . append ( "nodaemon=false\n" ) else : data . append ( "nodaemon=true\n" ) if options . get ( "pidfile" , None ) : data . append ( "pidfile=%s\n" % ( options [ "pidfile" ] , ) ) if options . get ( "logfile" , None ) : data . append ( "logfile=%s\n" % ( options [ "logfile" ] , ) ) for progname in options . get ( "launch" , None ) or [ ] : data . append ( "[program:%s]\nautostart=true\n" % ( progname , ) ) for progname in options . get ( "nolaunch" , None ) or [ ] : data . append ( "[program:%s]\nautostart=false\n" % ( progname , ) ) for progname in options . get ( "include" , None ) or [ ] : data . append ( "[program:%s]\nexclude=false\n" % ( progname , ) ) for progname in options . get ( "exclude" , None ) or [ ] : data . append ( "[program:%s]\nexclude=true\n" % ( progname , ) ) if options . get ( "autoreload" , None ) : data . append ( "[program:autoreload]\nexclude=false\nautostart=true\n" ) data . append ( "[program:__defaults__]\nautoreload=false\n" ) for progname in options [ "autoreload" ] : data . append ( "[program:%s]\nautoreload=true\n" % ( progname , ) ) if options . get ( "noreload" , False ) : data . append ( "[program:autoreload]\nexclude=true\n" ) return "" . join ( data )
Get config file fragment reflecting command - line options .
58,981
def guess_project_dir ( ) : projname = settings . SETTINGS_MODULE . split ( "." , 1 ) [ 0 ] projmod = import_module ( projname ) projdir = os . path . dirname ( projmod . __file__ ) if os . path . isfile ( os . path . join ( projdir , "manage.py" ) ) : return projdir projdir = os . path . abspath ( os . path . join ( projdir , os . path . pardir ) ) if os . path . isfile ( os . path . join ( projdir , "manage.py" ) ) : return projdir msg = "Unable to determine the Django project directory;" " use --project-dir to specify it" raise RuntimeError ( msg )
Find the top - level Django project directory .
58,982
def set_if_missing ( cfg , section , option , value ) : try : cfg . get ( section , option ) except NoSectionError : cfg . add_section ( section ) cfg . set ( section , option , value ) except NoOptionError : cfg . set ( section , option , value )
If the given option is missing set to the given value .
58,983
def rerender_options ( options ) : args = [ ] for name , value in options . iteritems ( ) : name = name . replace ( "_" , "-" ) if value is None : pass elif isinstance ( value , bool ) : if value : args . append ( "--%s" % ( name , ) ) elif isinstance ( value , list ) : for item in value : args . append ( "--%s=%s" % ( name , item ) ) else : args . append ( "--%s=%s" % ( name , value ) ) return " " . join ( args )
Helper function to re - render command - line options .
58,984
def login ( self , email = None , password = None , user = None ) : if user is not None : data = { 'login' : user , 'password' : password } elif email is not None : data = { 'email' : email , 'password' : password } else : raise ValueError ( 'Neither username nor email provided to login' ) self . headers = { 'connection' : 'close' } response = self . post ( '/session' , ** data ) self . token = response [ 'private_token' ] self . headers = { 'PRIVATE-TOKEN' : self . token , 'connection' : 'close' } return response
Logs the user in and setups the header with the private token
58,985
def getuser ( self , user_id ) : request = requests . get ( '{0}/{1}' . format ( self . users_url , user_id ) , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_code == 200 : return request . json ( ) else : return False
Get info for a user identified by id
58,986
def deleteuser ( self , user_id ) : deleted = self . delete_user ( user_id ) if deleted is False : return False else : return True
Deletes a user . Available only for administrators . This is an idempotent function calling this function for a non - existent user id still returns a status code 200 OK . The JSON response differs if the user was actually deleted or not . In the former the user is returned and in the latter not .
58,987
def currentuser ( self ) : request = requests . get ( '{0}/api/v3/user' . format ( self . host ) , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) return request . json ( )
Returns the current user parameters . The current user is linked to the secret token
58,988
def edituser ( self , user_id , ** kwargs ) : data = { } if kwargs : data . update ( kwargs ) request = requests . put ( '{0}/{1}' . format ( self . users_url , user_id ) , headers = self . headers , data = data , timeout = self . timeout , verify = self . verify_ssl , auth = self . auth ) if request . status_code == 200 : return request . json ( ) else : return False
Edits an user data .
58,989
def getsshkeys ( self ) : request = requests . get ( self . keys_url , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_code == 200 : return request . json ( ) else : return False
Gets all the ssh keys for the current user
58,990
def addsshkey ( self , title , key ) : data = { 'title' : title , 'key' : key } request = requests . post ( self . keys_url , headers = self . headers , data = data , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_code == 201 : return True else : return False
Add a new ssh key for the current user
58,991
def addsshkeyuser ( self , user_id , title , key ) : data = { 'title' : title , 'key' : key } request = requests . post ( '{0}/{1}/keys' . format ( self . users_url , user_id ) , headers = self . headers , data = data , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_code == 201 : return True else : return False
Add a new ssh key for the user identified by id
58,992
def deletesshkey ( self , key_id ) : request = requests . delete ( '{0}/{1}' . format ( self . keys_url , key_id ) , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . content == b'null' : return False else : return True
Deletes an sshkey for the current user identified by id
58,993
def get ( self , uri , default_response = None , ** kwargs ) : url = self . api_url + uri response = requests . get ( url , params = kwargs , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) return self . success_or_raise ( response , default_response = default_response )
Call GET on the Gitlab server
58,994
def post ( self , uri , default_response = None , ** kwargs ) : url = self . api_url + uri response = requests . post ( url , headers = self . headers , data = kwargs , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) return self . success_or_raise ( response , default_response = default_response )
Call POST on the Gitlab server
58,995
def delete ( self , uri , default_response = None ) : url = self . api_url + uri response = requests . delete ( url , headers = self . headers , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) return self . success_or_raise ( response , default_response = default_response )
Call DELETE on the Gitlab server
58,996
def success_or_raise ( self , response , default_response = None ) : if self . suppress_http_error and not response . ok : return False response_json = default_response if response_json is None : response_json = { } response . raise_for_status ( ) try : response_json = response . json ( ) except ValueError : pass return response_json
Check if request was successful or raises an HttpError
58,997
def getall ( fn , page = None , * args , ** kwargs ) : if not page : page = 1 while True : results = fn ( * args , page = page , ** kwargs ) if not results : break for x in results : yield x page += 1
Auto - iterate over the paginated results of various methods of the API . Pass the GitLabAPI method as the first argument followed by the other parameters as normal . Include page to determine first page to poll . Remaining kwargs are passed on to the called method including per_page .
58,998
def setsudo ( self , user = None ) : if user is None : try : self . headers . pop ( 'SUDO' ) except KeyError : pass else : self . headers [ 'SUDO' ] = user
Set the subsequent API calls to the user provided
58,999
def createproject ( self , name , ** kwargs ) : data = { 'name' : name } if kwargs : data . update ( kwargs ) request = requests . post ( self . projects_url , headers = self . headers , data = data , verify = self . verify_ssl , auth = self . auth , timeout = self . timeout ) if request . status_code == 201 : return request . json ( ) elif request . status_code == 403 : if 'Your own projects limit is 0' in request . text : print ( request . text ) return False else : return False
Creates a new project owned by the authenticated user .