idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
57,100
def __init ( self ) : params = { "f" : "json" } json_dict = self . _get ( url = self . _url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) attributes = [ attr for attr in dir ( self ) if not attr . startswith ( '__' ) and not attr . startswith ( '_' ) ] for k , v in json_dict . items ( ) : if k in attributes : setattr ( self , "_" + k , json_dict [ k ] ) else : print ( k , " - attribute not implemented in RouteNetworkLayer." ) del k , v
initializes all the properties
57,101
def download ( self , itemID , savePath ) : if os . path . isdir ( savePath ) == False : os . makedirs ( savePath ) url = self . _url + "/%s/download" % itemID params = { } if len ( params . keys ( ) ) : url = url + "?%s" % urlencode ( params ) return self . _get ( url = url , param_dict = params , out_folder = savePath , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
downloads an item to local disk
57,102
def reports ( self ) : if self . _metrics is None : self . __init ( ) self . _reports = [ ] for r in self . _metrics : url = self . _url + "/%s" % six . moves . urllib . parse . quote_plus ( r [ 'reportname' ] ) self . _reports . append ( UsageReport ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) ) del url return self . _reports
returns a list of reports on the server
57,103
def editUsageReportSettings ( self , samplingInterval , enabled = True , maxHistory = 0 ) : params = { "f" : "json" , "maxHistory" : maxHistory , "enabled" : enabled , "samplingInterval" : samplingInterval } url = self . _url + "/settings/edit" return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The usage reports settings are applied to the entire site . A POST request updates the usage reports settings .
57,104
def createUsageReport ( self , reportname , queries , metadata , since = "LAST_DAY" , fromValue = None , toValue = None , aggregationInterval = None ) : url = self . _url + "/add" params = { "f" : "json" , "usagereport" : { "reportname" : reportname , "since" : since , "metadata" : metadata } } if isinstance ( queries , dict ) : params [ "usagereport" ] [ "queries" ] = [ queries ] elif isinstance ( queries , list ) : params [ "usagereport" ] [ "queries" ] = queries if aggregationInterval is not None : params [ "usagereport" ] [ 'aggregationInterval' ] = aggregationInterval if since . lower ( ) == "custom" : params [ "usagereport" ] [ 'to' ] = toValue params [ "usagereport" ] [ 'from' ] = fromValue res = self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) self . __init ( ) return res
Creates a new usage report . A usage report is created by submitting a JSON representation of the usage report to this operation .
57,105
def edit ( self ) : usagereport_dict = { "reportname" : self . reportname , "queries" : self . _queries , "since" : self . since , "metadata" : self . _metadata , "to" : self . _to , "from" : self . _from , "aggregationInterval" : self . _aggregationInterval } params = { "f" : "json" , "usagereport" : json . dumps ( usagereport_dict ) } url = self . _url + "/edit" return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Edits the usage report . To edit a usage report you need to submit the complete JSON representation of the usage report which includes updates to the usage report properties . The name of the report cannot be changed when editing the usage report .
57,106
def __init ( self ) : params = { "f" : "json" , } json_dict = self . _get ( self . _url , params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) self . _json_dict = json_dict self . _json = json . dumps ( self . _json_dict ) attributes = [ attr for attr in dir ( self ) if not attr . startswith ( '__' ) and not attr . startswith ( '_' ) ] for k , v in json_dict . items ( ) : if k in attributes : if k == "versions" and json_dict [ k ] : self . _versions = [ ] for version in v : self . _versions . append ( Version ( url = self . _url + "/versions/%s" % version , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False ) ) elif k == "replicas" and json_dict [ k ] : self . _replicas = [ ] for version in v : self . _replicas . append ( Replica ( url = self . _url + "/replicas/%s" % version , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False ) ) else : setattr ( self , "_" + k , v ) else : print ( k , " - attribute not implemented for GeoData Service" )
inializes the properties
57,107
def replicasResource ( self ) : if self . _replicasResource is None : self . _replicasResource = { } for replica in self . replicas : self . _replicasResource [ "replicaName" ] = replica . name self . _replicasResource [ "replicaID" ] = replica . guid return self . _replicasResource
returns a list of replices
57,108
def services ( self ) : self . _services = [ ] params = { "f" : "json" } if not self . _url . endswith ( '/services' ) : uURL = self . _url + "/services" else : uURL = self . _url res = self . _get ( url = uURL , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) for k , v in res . items ( ) : if k == "foldersDetail" : for item in v : if 'isDefault' in item and item [ 'isDefault' ] == False : fURL = self . _url + "/services/" + item [ 'folderName' ] resFolder = self . _get ( url = fURL , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) for k1 , v1 in resFolder . items ( ) : if k1 == "services" : self . _checkservice ( k1 , v1 , fURL ) elif k == "services" : self . _checkservice ( k , v , uURL ) return self . _services
returns all the service objects in the admin service s page
57,109
def refresh ( self , serviceDefinition = True ) : url = self . _url + "/MapServer/refresh" params = { "f" : "json" , "serviceDefinition" : serviceDefinition } res = self . _post ( url = self . _url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) self . __init ( ) return res
The refresh operation refreshes a service which clears the web server cache for the service .
57,110
def editTileService ( self , serviceDefinition = None , minScale = None , maxScale = None , sourceItemId = None , exportTilesAllowed = False , maxExportTileCount = 100000 ) : params = { "f" : "json" , } if not serviceDefinition is None : params [ "serviceDefinition" ] = serviceDefinition if not minScale is None : params [ 'minScale' ] = float ( minScale ) if not maxScale is None : params [ 'maxScale' ] = float ( maxScale ) if not sourceItemId is None : params [ "sourceItemId" ] = sourceItemId if not exportTilesAllowed is None : params [ "exportTilesAllowed" ] = exportTilesAllowed if not maxExportTileCount is None : params [ "maxExportTileCount" ] = int ( maxExportTileCount ) url = self . _url + "/edit" return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _securityHandler . proxy_url , proxy_port = self . _securityHandler . proxy_port )
This post operation updates a Tile Service s properties
57,111
def refresh ( self ) : params = { "f" : "json" } uURL = self . _url + "/refresh" res = self . _get ( url = uURL , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) self . __init ( ) return res
refreshes a service
57,112
def addToDefinition ( self , json_dict ) : params = { "f" : "json" , "addToDefinition" : json . dumps ( json_dict ) , "async" : False } uURL = self . _url + "/addToDefinition" res = self . _post ( url = uURL , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) self . refresh ( ) return res
The addToDefinition operation supports adding a definition property to a hosted feature service . The result of this operation is a response indicating success or failure with error code and description .
57,113
def updateDefinition ( self , json_dict ) : definition = None if json_dict is not None : if isinstance ( json_dict , collections . OrderedDict ) == True : definition = json_dict else : definition = collections . OrderedDict ( ) if 'hasStaticData' in json_dict : definition [ 'hasStaticData' ] = json_dict [ 'hasStaticData' ] if 'allowGeometryUpdates' in json_dict : definition [ 'allowGeometryUpdates' ] = json_dict [ 'allowGeometryUpdates' ] if 'capabilities' in json_dict : definition [ 'capabilities' ] = json_dict [ 'capabilities' ] if 'editorTrackingInfo' in json_dict : definition [ 'editorTrackingInfo' ] = collections . OrderedDict ( ) if 'enableEditorTracking' in json_dict [ 'editorTrackingInfo' ] : definition [ 'editorTrackingInfo' ] [ 'enableEditorTracking' ] = json_dict [ 'editorTrackingInfo' ] [ 'enableEditorTracking' ] if 'enableOwnershipAccessControl' in json_dict [ 'editorTrackingInfo' ] : definition [ 'editorTrackingInfo' ] [ 'enableOwnershipAccessControl' ] = json_dict [ 'editorTrackingInfo' ] [ 'enableOwnershipAccessControl' ] if 'allowOthersToUpdate' in json_dict [ 'editorTrackingInfo' ] : definition [ 'editorTrackingInfo' ] [ 'allowOthersToUpdate' ] = json_dict [ 'editorTrackingInfo' ] [ 'allowOthersToUpdate' ] if 'allowOthersToDelete' in json_dict [ 'editorTrackingInfo' ] : definition [ 'editorTrackingInfo' ] [ 'allowOthersToDelete' ] = json_dict [ 'editorTrackingInfo' ] [ 'allowOthersToDelete' ] if 'allowOthersToQuery' in json_dict [ 'editorTrackingInfo' ] : definition [ 'editorTrackingInfo' ] [ 'allowOthersToQuery' ] = json_dict [ 'editorTrackingInfo' ] [ 'allowOthersToQuery' ] if isinstance ( json_dict [ 'editorTrackingInfo' ] , dict ) : for k , v in json_dict [ 'editorTrackingInfo' ] . items ( ) : if k not in definition [ 'editorTrackingInfo' ] : definition [ 'editorTrackingInfo' ] [ k ] = v if isinstance ( json_dict , dict ) : for k , v in json_dict . items ( ) : if k not in definition : definition [ k ] = v params = { "f" : "json" , "updateDefinition" : json . dumps ( obj = definition , separators = ( ',' , ':' ) ) , "async" : False } uURL = self . _url + "/updateDefinition" res = self . _post ( url = uURL , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) self . refresh ( ) return res
The updateDefinition operation supports updating a definition property in a hosted feature service . The result of this operation is a response indicating success or failure with error code and description .
57,114
def calc_resp ( password_hash , server_challenge ) : password_hash += b'\0' * ( 21 - len ( password_hash ) ) res = b'' dobj = des . DES ( password_hash [ 0 : 7 ] ) res = res + dobj . encrypt ( server_challenge [ 0 : 8 ] ) dobj = des . DES ( password_hash [ 7 : 14 ] ) res = res + dobj . encrypt ( server_challenge [ 0 : 8 ] ) dobj = des . DES ( password_hash [ 14 : 21 ] ) res = res + dobj . encrypt ( server_challenge [ 0 : 8 ] ) return res
calc_resp generates the LM response given a 16 - byte password hash and the challenge from the Type - 2 message .
57,115
def create_LM_hashed_password_v1 ( passwd ) : if re . match ( r'^[\w]{32}:[\w]{32}$' , passwd ) : return binascii . unhexlify ( passwd . split ( ':' ) [ 0 ] ) passwd = passwd . upper ( ) lm_pw = passwd + '\0' * ( 14 - len ( passwd ) ) lm_pw = passwd [ 0 : 14 ] magic_str = b"KGS!@#$%" res = b'' dobj = des . DES ( lm_pw [ 0 : 7 ] ) res = res + dobj . encrypt ( magic_str ) dobj = des . DES ( lm_pw [ 7 : 14 ] ) res = res + dobj . encrypt ( magic_str ) return res
create LanManager hashed password
57,116
def _readcsv ( self , path_to_csv ) : return np . genfromtxt ( path_to_csv , dtype = None , delimiter = ',' , names = True )
reads a csv column
57,117
def queryDataCollectionByName ( self , countryName ) : var = self . _dataCollectionCodes try : return [ x [ 0 ] for x in var [ var [ 'Countries' ] == countryName ] ] except : return None
returns a list of available data collections for a given country name .
57,118
def __geometryToDict ( self , geom ) : if isinstance ( geom , dict ) : return geom elif isinstance ( geom , Point ) : pt = geom . asDictionary return { "geometry" : { "x" : pt [ 'x' ] , "y" : pt [ 'y' ] } } elif isinstance ( geom , Polygon ) : poly = geom . asDictionary return { "geometry" : { "rings" : poly [ 'rings' ] , 'spatialReference' : poly [ 'spatialReference' ] } } elif isinstance ( geom , list ) : return [ self . __geometryToDict ( g ) for g in geom ]
converts a geometry object to a dictionary
57,119
def lookUpReportsByCountry ( self , countryName ) : code = self . findCountryTwoDigitCode ( countryName ) if code is None : raise Exception ( "Invalid country name." ) url = self . _base_url + self . _url_list_reports + "/%s" % code params = { "f" : "json" , } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
looks up a country by it s name Inputs countryName - name of the country to get reports list .
57,120
def createReport ( self , out_file_path , studyAreas , report = None , format = "PDF" , reportFields = None , studyAreasOptions = None , useData = None , inSR = 4326 , ) : url = self . _base_url + self . _url_create_report if isinstance ( studyAreas , list ) == False : studyAreas = [ studyAreas ] studyAreas = self . __geometryToDict ( studyAreas ) params = { "f" : "bin" , "studyAreas" : studyAreas , "inSR" : inSR , } if not report is None : params [ 'report' ] = report if format is None : format = "pdf" elif format . lower ( ) in [ 'pdf' , 'xlsx' ] : params [ 'format' ] = format . lower ( ) else : raise AttributeError ( "Invalid format value." ) if not reportFields is None : params [ 'reportFields' ] = reportFields if not studyAreasOptions is None : params [ 'studyAreasOptions' ] = studyAreasOptions if not useData is None : params [ 'useData' ] = useData result = self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , out_folder = os . path . dirname ( out_file_path ) ) return result
The GeoEnrichment Create Report method uses the concept of a study area to define the location of the point or area that you want to enrich with generated reports . This method allows you to create many types of high - quality reports for a variety of use cases describing the input area . If a point is used as a study area the service will create a 1 - mile ring buffer around the point to collect and append enrichment data . Optionally you can create a buffer ring or drive - time service area around the points to prepare PDF or Excel reports for the study areas .
57,121
def getVariables ( self , sourceCountry , optionalCountryDataset = None , searchText = None ) : r url = self . _base_url + self . _url_getVariables params = { "f" : "json" , "sourceCountry" : sourceCountry } if not searchText is None : params [ "searchText" ] = searchText if not optionalCountryDataset is None : params [ 'optionalCountryDataset' ] = optionalCountryDataset return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
r The GeoEnrichment GetVariables helper method allows you to search the data collections for variables that contain specific keywords .
57,122
def folders ( self ) : if self . _folders is None : self . __init ( ) if "/" not in self . _folders : self . _folders . append ( "/" ) return self . _folders
returns a list of all folders
57,123
def services ( self ) : self . _services = [ ] params = { "f" : "json" } json_dict = self . _get ( url = self . _currentURL , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) if "services" in json_dict . keys ( ) : for s in json_dict [ 'services' ] : uURL = self . _currentURL + "/%s.%s" % ( s [ 'serviceName' ] , s [ 'type' ] ) self . _services . append ( AGSService ( url = uURL , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) ) return self . _services
returns the services in the current folder
57,124
def createService ( self , service ) : url = self . _url + "/createService" params = { "f" : "json" } if isinstance ( service , str ) : params [ 'service' ] = service elif isinstance ( service , dict ) : params [ 'service' ] = json . dumps ( service ) return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Creates a new GIS service in the folder . A service is created by submitting a JSON representation of the service to this operation .
57,125
def exists ( self , folderName , serviceName = None , serviceType = None ) : url = self . _url + "/exists" params = { "f" : "json" , "folderName" : folderName , "serviceName" : serviceName , "type" : serviceType } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation allows you to check whether a folder or a service exists . To test if a folder exists supply only a folderName . To test if a service exists in a root folder supply both serviceName and serviceType with folderName = None . To test if a service exists in a folder supply all three parameters .
57,126
def __init ( self ) : params = { "f" : "json" } json_dict = self . _get ( url = self . _currentURL , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) self . _json = json . dumps ( json_dict ) self . _json_dict = json_dict attributes = [ attr for attr in dir ( self ) if not attr . startswith ( '__' ) and not attr . startswith ( '_' ) ] for k , v in json_dict . items ( ) : if k . lower ( ) == "extensions" : self . _extensions = [ ] for ext in v : self . _extensions . append ( Extension . fromJSON ( ext ) ) del ext elif k in attributes : setattr ( self , "_" + k , json_dict [ k ] ) else : print ( k , " - attribute not implemented in manageags.AGSService." ) del k del v
populates server admin information
57,127
def serviceManifest ( self , fileType = "json" ) : url = self . _url + "/iteminfo/manifest/manifest.%s" % fileType params = { } f = self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , out_folder = tempfile . gettempdir ( ) , file_name = os . path . basename ( url ) ) if fileType == 'json' : return f if fileType == 'xml' : return ET . ElementTree ( ET . fromstring ( f ) )
The service manifest resource documents the data and other resources that define the service origins and power the service . This resource will tell you underlying databases and their location along with other supplementary files that make up the service .
57,128
def startDataStoreMachine ( self , dataStoreItemName , machineName ) : url = self . _url + "/items/enterpriseDatabases/%s/machines/%s/start" % ( dataStoreItemName , machineName ) params = { "f" : "json" } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Starts the database instance running on the Data Store machine .
57,129
def unregisterDataItem ( self , path ) : url = self . _url + "/unregisterItem" params = { "f" : "json" , "itempath" : path , "force" : "true" } return self . _post ( url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Unregisters a data item that has been previously registered with the server s data store .
57,130
def validateDataStore ( self , dataStoreName , machineName ) : url = self . _url + "/items/enterpriseDatabases/%s/machines/%s/validate" % ( dataStoreName , machineName ) params = { "f" : "json" } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Checks the status of ArcGIS Data Store and provides a health check response .
57,131
def layers ( self ) : if self . _layers is None : self . __init ( ) lyrs = [ ] for lyr in self . _layers : lyr [ 'object' ] = GlobeServiceLayer ( url = self . _url + "/%s" % lyr [ 'id' ] , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) lyrs . append ( lyr ) return lyrs
gets the globe service layers
57,132
def loadFeatures ( self , path_to_fc ) : from . . common . spatial import featureclass_to_json v = json . loads ( featureclass_to_json ( path_to_fc ) ) self . value = v
loads a feature class features to the object
57,133
def fromFeatureClass ( fc , paramName ) : from . . common . spatial import featureclass_to_json val = json . loads ( featureclass_to_json ( fc ) ) v = GPFeatureRecordSetLayer ( ) v . value = val v . paramName = paramName return v
returns a GPFeatureRecordSetLayer object from a feature class
57,134
def asDictionary ( self ) : template = { "type" : "simple" , "symbol" : self . _symbol . asDictionary , "label" : self . _label , "description" : self . _description , "rotationType" : self . _rotationType , "rotationExpression" : self . _rotationExpression } return template
provides a dictionary representation of the object
57,135
def searchDiagrams ( self , whereClause = None , relatedObjects = None , relatedSchematicObjects = None ) : params = { "f" : "json" } if whereClause : params [ "where" ] = whereClause if relatedObjects : params [ "relatedObjects" ] = relatedObjects if relatedSchematicObjects : params [ "relatedSchematicObjects" ] = relatedSchematicObjects exportURL = self . _url + "/searchDiagrams" return self . _get ( url = exportURL , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The Schematic Search Diagrams operation is performed on the schematic service resource . The result of this operation is an array of Schematic Diagram Information Object .
57,136
def _validateurl ( self , url ) : parsed = urlparse ( url ) path = parsed . path . strip ( "/" ) if path : parts = path . split ( "/" ) url_types = ( "admin" , "manager" , "rest" ) if any ( i in parts for i in url_types ) : while parts . pop ( ) not in url_types : next elif "services" in parts : while parts . pop ( ) not in "services" : next path = "/" . join ( parts ) else : path = "arcgis" self . _adminUrl = "%s://%s/%s/admin" % ( parsed . scheme , parsed . netloc , path ) return "%s://%s/%s/rest/services" % ( parsed . scheme , parsed . netloc , path )
assembles the server url
57,137
def admin ( self ) : if self . _securityHandler is None : raise Exception ( "Cannot connect to adminstrative server without authentication" ) from . . manageags import AGSAdministration return AGSAdministration ( url = self . _adminUrl , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False )
points to the adminstrative side of ArcGIS Server
57,138
def addUser ( self , username , password , firstname , lastname , email , role ) : self . _invites . append ( { "username" : username , "password" : password , "firstname" : firstname , "lastname" : lastname , "fullname" : "%s %s" % ( firstname , lastname ) , "email" : email , "role" : role } )
adds a user to the invitation list
57,139
def removeByIndex ( self , index ) : if index < len ( self . _invites ) - 1 and index >= 0 : self . _invites . remove ( index )
removes a user from the invitation list by position
57,140
def fromDictionary ( value ) : if isinstance ( value , dict ) : pp = PortalParameters ( ) for k , v in value . items ( ) : setattr ( pp , "_%s" % k , v ) return pp else : raise AttributeError ( "Invalid input." )
creates the portal properties object from a dictionary
57,141
def value ( self ) : val = { } for k in self . __allowed_keys : value = getattr ( self , "_" + k ) if value is not None : val [ k ] = value return val
returns the values as a dictionary
57,142
def tile_fonts ( self , fontstack , stack_range , out_folder = None ) : url = "{url}/resources/fonts/{fontstack}/{stack_range}.pbf" . format ( url = self . _url , fontstack = fontstack , stack_range = stack_range ) params = { } if out_folder is None : out_folder = tempfile . gettempdir ( ) return self . _get ( url = url , param_dict = params , out_folder = out_folder , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_host )
This resource returns glyphs in PBF format . The template url for this fonts resource is represented in Vector Tile Style resource .
57,143
def tile_sprite ( self , out_format = "sprite.json" , out_folder = None ) : url = "{url}/resources/sprites/{f}" . format ( url = self . _url , f = out_format ) if out_folder is None : out_folder = tempfile . gettempdir ( ) return self . _get ( url = url , param_dict = { } , out_folder = out_folder , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_host )
This resource returns sprite image and metadata
57,144
def layers ( self ) : if self . _layers is None : self . __init ( ) self . _getLayers ( ) return self . _layers
gets the layers for the feature service
57,145
def _getLayers ( self ) : params = { "f" : "json" } json_dict = self . _get ( self . _url , params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) self . _layers = [ ] if 'layers' in json_dict : for l in json_dict [ "layers" ] : self . _layers . append ( layer . FeatureLayer ( url = self . _url + "/%s" % l [ 'id' ] , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) )
gets layers for the featuer service
57,146
def query ( self , layerDefsFilter = None , geometryFilter = None , timeFilter = None , returnGeometry = True , returnIdsOnly = False , returnCountOnly = False , returnZ = False , returnM = False , outSR = None ) : qurl = self . _url + "/query" params = { "f" : "json" , "returnGeometry" : returnGeometry , "returnIdsOnly" : returnIdsOnly , "returnCountOnly" : returnCountOnly , "returnZ" : returnZ , "returnM" : returnM } if not layerDefsFilter is None and isinstance ( layerDefsFilter , LayerDefinitionFilter ) : params [ 'layerDefs' ] = layerDefsFilter . filter if not geometryFilter is None and isinstance ( geometryFilter , GeometryFilter ) : gf = geometryFilter . filter params [ 'geometryType' ] = gf [ 'geometryType' ] params [ 'spatialRel' ] = gf [ 'spatialRel' ] params [ 'geometry' ] = gf [ 'geometry' ] params [ 'inSR' ] = gf [ 'inSR' ] if not outSR is None and isinstance ( outSR , SpatialReference ) : params [ 'outSR' ] = outSR . asDictionary if not timeFilter is None and isinstance ( timeFilter , TimeFilter ) : params [ 'time' ] = timeFilter . filter res = self . _post ( url = qurl , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) if returnIdsOnly == False and returnCountOnly == False : if isinstance ( res , str ) : jd = json . loads ( res ) return [ FeatureSet . fromJSON ( json . dumps ( lyr ) ) for lyr in jd [ 'layers' ] ] elif isinstance ( res , dict ) : return [ FeatureSet . fromJSON ( json . dumps ( lyr ) ) for lyr in res [ 'layers' ] ] else : return res return res
The Query operation is performed on a feature service resource
57,147
def create_feature_layer ( ds , sql , name = "layer" ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) result = arcpy . MakeFeatureLayer_management ( in_features = ds , out_layer = name , where_clause = sql ) return result [ 0 ]
creates a feature layer object
57,148
def featureclass_to_json ( fc ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) desc = arcpy . Describe ( fc ) if desc . dataType == "Table" or desc . dataType == "TableView" : return recordset_to_json ( table = fc ) else : return arcpy . FeatureSet ( fc ) . JSON
converts a feature class to JSON
57,149
def get_attachment_data ( attachmentTable , sql , nameField = "ATT_NAME" , blobField = "DATA" , contentTypeField = "CONTENT_TYPE" , rel_object_field = "REL_OBJECTID" ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) ret_rows = [ ] with arcpy . da . SearchCursor ( attachmentTable , [ nameField , blobField , contentTypeField , rel_object_field ] , where_clause = sql ) as rows : for row in rows : temp_f = os . environ [ 'temp' ] + os . sep + row [ 0 ] writer = open ( temp_f , 'wb' ) writer . write ( row [ 1 ] ) writer . flush ( ) writer . close ( ) del writer ret_rows . append ( { "name" : row [ 0 ] , "blob" : temp_f , "content" : row [ 2 ] , "rel_oid" : row [ 3 ] } ) del row return ret_rows
gets all the data to pass to a feature service
57,150
def get_records_with_attachments ( attachment_table , rel_object_field = "REL_OBJECTID" ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) OIDs = [ ] with arcpy . da . SearchCursor ( attachment_table , [ rel_object_field ] ) as rows : for row in rows : if not str ( row [ 0 ] ) in OIDs : OIDs . append ( "%s" % str ( row [ 0 ] ) ) del row del rows return OIDs
returns a list of ObjectIDs for rows in the attachment table
57,151
def get_OID_field ( fs ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) desc = arcpy . Describe ( fs ) if desc . hasOID : return desc . OIDFieldName return None
returns a featureset s object id field
57,152
def merge_feature_class ( merges , out_fc , cleanUp = True ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) if cleanUp == False : if len ( merges ) == 0 : return None elif len ( merges ) == 1 : desc = arcpy . Describe ( merges [ 0 ] ) if hasattr ( desc , 'shapeFieldName' ) : return arcpy . CopyFeatures_management ( merges [ 0 ] , out_fc ) [ 0 ] else : return arcpy . CopyRows_management ( merges [ 0 ] , out_fc ) [ 0 ] else : return arcpy . Merge_management ( inputs = merges , output = out_fc ) [ 0 ] else : if len ( merges ) == 0 : return None elif len ( merges ) == 1 : desc = arcpy . Describe ( merges [ 0 ] ) if hasattr ( desc , 'shapeFieldName' ) : merged = arcpy . CopyFeatures_management ( merges [ 0 ] , out_fc ) [ 0 ] else : merged = arcpy . CopyRows_management ( merges [ 0 ] , out_fc ) [ 0 ] else : merged = arcpy . Merge_management ( inputs = merges , output = out_fc ) [ 0 ] for m in merges : arcpy . Delete_management ( m ) del m return merged
merges featureclass into a single feature class
57,153
def insert_rows ( fc , features , fields , includeOIDField = False , oidField = None ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) icur = None if includeOIDField : arcpy . AddField_management ( fc , "FSL_OID" , "LONG" ) fields . append ( "FSL_OID" ) if len ( features ) > 0 : fields . append ( "SHAPE@" ) workspace = os . path . dirname ( fc ) with arcpy . da . Editor ( workspace ) as edit : date_fields = getDateFields ( fc ) icur = arcpy . da . InsertCursor ( fc , fields ) for feat in features : row = [ "" ] * len ( fields ) drow = feat . asRow [ 0 ] dfields = feat . fields for field in fields : if field in dfields or ( includeOIDField and field == "FSL_OID" ) : if field in date_fields : row [ fields . index ( field ) ] = toDateTime ( drow [ dfields . index ( field ) ] ) elif field == "FSL_OID" : row [ fields . index ( "FSL_OID" ) ] = drow [ dfields . index ( oidField ) ] else : row [ fields . index ( field ) ] = drow [ dfields . index ( field ) ] del field row [ fields . index ( "SHAPE@" ) ] = feat . geometry icur . insertRow ( row ) del row del drow del dfields del feat del features icur = None del icur del fields return fc else : return fc
inserts rows based on a list features object
57,154
def create_feature_class ( out_path , out_name , geom_type , wkid , fields , objectIdField ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) arcpy . env . overwriteOutput = True field_names = [ ] fc = arcpy . CreateFeatureclass_management ( out_path = out_path , out_name = out_name , geometry_type = lookUpGeometry ( geom_type ) , spatial_reference = arcpy . SpatialReference ( wkid ) ) [ 0 ] for field in fields : if field [ 'name' ] != objectIdField : field_names . append ( field [ 'name' ] ) arcpy . AddField_management ( out_path + os . sep + out_name , field [ 'name' ] , lookUpFieldType ( field [ 'type' ] ) ) return fc , field_names
creates a feature class in a given gdb or folder
57,155
def download_arcrest ( ) : arcrest_name = "arcrest.zip" arcresthelper_name = "arcresthelper.zip" url = "https://github.com/Esri/ArcREST/archive/master.zip" file_name = os . path . join ( arcpy . env . scratchFolder , os . path . basename ( url ) ) scratch_folder = os . path . join ( arcpy . env . scratchFolder , "temp34asdf3d" ) arcrest_zip = os . path . join ( scratch_folder , arcrest_name ) arcresthelper_zip = os . path . join ( scratch_folder , arcresthelper_name ) if sys . version_info . major == 3 : import urllib . request urllib . request . urlretrieve ( url , file_name ) else : import urllib urllib . urlretrieve ( url , file_name ) if os . path . isdir ( scratch_folder ) : shutil . rmtree ( scratch_folder ) os . makedirs ( scratch_folder ) zip_obj = zipfile . ZipFile ( file_name , 'r' ) zip_obj . extractall ( scratch_folder ) zip_obj . close ( ) del zip_obj zip_obj = zipfile . ZipFile ( arcrest_zip , 'w' ) zipws ( path = os . path . join ( scratch_folder , "arcrest-master" , "src" , "arcrest" ) , zip = zip_obj , keep = True ) zip_obj . close ( ) del zip_obj zip_obj = zipfile . ZipFile ( arcresthelper_zip , 'w' ) zipws ( path = os . path . join ( scratch_folder , "arcrest-master" , "src" , "arcresthelper" ) , zip = zip_obj , keep = True ) zip_obj . close ( ) del zip_obj shutil . rmtree ( os . path . join ( scratch_folder , "arcrest-master" ) ) return arcrest_zip , arcresthelper_zip
downloads arcrest to disk
57,156
def handler ( self ) : if hasNTLM : if self . _handler is None : passman = request . HTTPPasswordMgrWithDefaultRealm ( ) passman . add_password ( None , self . _parsed_org_url , self . _login_username , self . _password ) self . _handler = HTTPNtlmAuthHandler . HTTPNtlmAuthHandler ( passman ) return self . _handler else : raise Exception ( "Missing Ntlm python package." )
gets the security handler for the class
57,157
def token ( self ) : return self . _portalTokenHandler . servertoken ( serverURL = self . _serverUrl , referer = self . _referer )
gets the AGS server token
57,158
def token ( self ) : if self . _token is None or datetime . datetime . now ( ) >= self . _token_expires_on : self . _generateForOAuthSecurity ( self . _client_id , self . _secret_id , self . _token_url ) return self . _token
obtains a token from the site
57,159
def _generateForOAuthSecurity ( self , client_id , secret_id , token_url = None ) : grant_type = "client_credentials" if token_url is None : token_url = "https://www.arcgis.com/sharing/rest/oauth2/token" params = { "client_id" : client_id , "client_secret" : secret_id , "grant_type" : grant_type , "f" : "json" } token = self . _post ( url = token_url , param_dict = params , securityHandler = None , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) if 'access_token' in token : self . _token = token [ 'access_token' ] self . _expires_in = token [ 'expires_in' ] self . _token_created_on = datetime . datetime . now ( ) self . _token_expires_on = self . _token_created_on + datetime . timedelta ( seconds = int ( token [ 'expires_in' ] ) ) self . _valid = True self . _message = "Token Generated" else : self . _token = None self . _expires_in = None self . _token_created_on = None self . _token_expires_on = None self . _valid = False self . _message = token
generates a token based on the OAuth security model
57,160
def referer_url ( self , value ) : if self . _referer_url != value : self . _token = None self . _referer_url = value
sets the referer url
57,161
def __getRefererUrl ( self , url = None ) : if url is None : url = "http://www.arcgis.com/sharing/rest/portals/self" params = { "f" : "json" , "token" : self . token } val = self . _get ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) self . _referer_url = "arcgis.com" self . _token = None return self . _referer_url
gets the referer url for the token handler
57,162
def servertoken ( self , serverURL , referer ) : if self . _server_token is None or self . _server_token_expires_on is None or datetime . datetime . now ( ) >= self . _server_token_expires_on or self . _server_url != serverURL : self . _server_url = serverURL result = self . _generateForServerTokenSecurity ( serverURL = serverURL , token = self . token , tokenUrl = self . _token_url , referer = referer ) if 'error' in result : self . _valid = False self . _message = result else : self . _valid = True self . _message = "Server Token Generated" return self . _server_token
returns the server token for the server
57,163
def exportCertificate ( self , certificate , folder ) : url = self . _url + "/sslcertificates/%s/export" % certificate params = { "f" : "json" , } return self . _get ( url = url , param_dict = params , out_folder = folder )
gets the SSL Certificates for a given machine
57,164
def currentVersion ( self ) : if self . _currentVersion is None : self . __init ( self . _url ) return self . _currentVersion
returns the current version of the site
57,165
def portals ( self ) : url = "%s/portals" % self . root return _portals . Portals ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns the Portals class that provides administration access into a given organization
57,166
def oauth2 ( self ) : if self . _url . endswith ( "/oauth2" ) : url = self . _url else : url = self . _url + "/oauth2" return _oauth2 . oauth2 ( oauth_url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns the oauth2 class
57,167
def community ( self ) : return _community . Community ( url = self . _url + "/community" , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The portal community root covers user and group resources and operations .
57,168
def content ( self ) : return _content . Content ( url = self . _url + "/content" , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns access into the site s content
57,169
def search ( self , q , t = None , focus = None , bbox = None , start = 1 , num = 10 , sortField = None , sortOrder = "asc" , useSecurity = True ) : if self . _url . endswith ( "/rest" ) : url = self . _url + "/search" else : url = self . _url + "/rest/search" params = { "f" : "json" , "q" : q , "sortOrder" : sortOrder , "num" : num , "start" : start , 'restrict' : useSecurity } if not focus is None : params [ 'focus' ] = focus if not t is None : params [ 't' ] = t if useSecurity and self . _securityHandler is not None and self . _securityHandler . method == "token" : params [ "token" ] = self . _securityHandler . token if sortField is not None : params [ 'sortField' ] = sortField if bbox is not None : params [ 'bbox' ] = bbox return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation searches for content items in the portal . The searches are performed against a high performance index that indexes the most popular fields of an item . See the Search reference page for information on the fields and the syntax of the query . The search index is updated whenever users add update or delete content . There can be a lag between the time that the content is updated and the time when it s reflected in the search results . The results of a search only contain items that the user has permission to access .
57,170
def hostingServers ( self ) : portals = self . portals portal = portals . portalSelf urls = portal . urls if 'error' in urls : print ( urls ) return services = [ ] if urls != { } : if 'urls' in urls : if 'features' in urls [ 'urls' ] : if 'https' in urls [ 'urls' ] [ 'features' ] : for https in urls [ 'urls' ] [ 'features' ] [ 'https' ] : if portal . isPortal == True : url = "%s/admin" % https services . append ( AGSAdministration ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) ) else : url = "https://%s/%s/ArcGIS/rest/admin" % ( https , portal . portalId ) services . append ( Services ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) ) elif 'http' in urls [ 'urls' ] [ 'features' ] : for http in urls [ 'urls' ] [ 'features' ] [ 'http' ] : if ( portal . isPortal == True ) : url = "%s/admin" % http services . append ( AGSAdministration ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) ) else : url = "http://%s/%s/ArcGIS/rest/admin" % ( http , portal . portalId ) services . append ( Services ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) ) else : print ( "Publishing servers not found" ) else : print ( "Publishing servers not found" ) else : print ( "Publishing servers not found" ) return services else : for server in portal . servers [ 'servers' ] : url = server [ 'adminUrl' ] + "/admin" sh = PortalServerSecurityHandler ( tokenHandler = self . _securityHandler , serverUrl = url , referer = server [ 'name' ] . replace ( ":6080" , ":6443" ) ) services . append ( AGSAdministration ( url = url , securityHandler = sh , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False ) ) return services
Returns the objects to manage site s hosted services . It returns AGSAdministration object if the site is Portal and it returns a hostedservice . Services object if it is AGOL .
57,171
def add_codedValue ( self , name , code ) : if self . _codedValues is None : self . _codedValues = [ ] self . _codedValues . append ( { "name" : name , "code" : code } )
adds a value to the coded value list
57,172
def __init ( self ) : res = self . _get ( url = self . _url , param_dict = { "f" : "json" } , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) self . _json_dict = res self . _json_string = json . dumps ( self . _json_dict ) for k , v in self . _json_dict . items ( ) : setattr ( self , k , v )
loads the json values
57,173
def areasAndLengths ( self , polygons , lengthUnit , areaUnit , calculationType , ) : url = self . _url + "/areasAndLengths" params = { "f" : "json" , "lengthUnit" : lengthUnit , "areaUnit" : { "areaUnit" : areaUnit } , "calculationType" : calculationType } if isinstance ( polygons , list ) and len ( polygons ) > 0 : p = polygons [ 0 ] if isinstance ( p , Polygon ) : params [ 'sr' ] = p . spatialReference [ 'wkid' ] params [ 'polygons' ] = [ poly . asDictionary for poly in polygons ] del p else : return "No polygons provided, please submit a list of polygon geometries" return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The areasAndLengths operation is performed on a geometry service resource . This operation calculates areas and perimeter lengths for each polygon specified in the input array .
57,174
def __geometryToGeomTemplate ( self , geometry ) : template = { "geometryType" : None , "geometry" : None } if isinstance ( geometry , Polyline ) : template [ 'geometryType' ] = "esriGeometryPolyline" elif isinstance ( geometry , Polygon ) : template [ 'geometryType' ] = "esriGeometryPolygon" elif isinstance ( geometry , Point ) : template [ 'geometryType' ] = "esriGeometryPoint" elif isinstance ( geometry , MultiPoint ) : template [ 'geometryType' ] = "esriGeometryMultipoint" elif isinstance ( geometry , Envelope ) : template [ 'geometryType' ] = "esriGeometryEnvelope" else : raise AttributeError ( "Invalid geometry type" ) template [ 'geometry' ] = geometry . asDictionary return template
Converts a single geometry object to a geometry service geometry template value .
57,175
def __geomToStringArray ( self , geometries , returnType = "str" ) : listGeoms = [ ] for g in geometries : if isinstance ( g , Point ) : listGeoms . append ( g . asDictionary ) elif isinstance ( g , Polygon ) : listGeoms . append ( g . asDictionary ) elif isinstance ( g , Polyline ) : listGeoms . append ( { 'paths' : g . asDictionary [ 'paths' ] } ) if returnType == "str" : return json . dumps ( listGeoms ) elif returnType == "list" : return listGeoms else : return json . dumps ( listGeoms )
function to convert the geomtries to strings
57,176
def autoComplete ( self , polygons = [ ] , polylines = [ ] , sr = None ) : url = self . _url + "/autoComplete" params = { "f" : "json" } if sr is not None : params [ 'sr' ] = sr params [ 'polygons' ] = self . __geomToStringArray ( polygons ) params [ 'polylines' ] = self . __geomToStringArray ( polylines ) return self . _get ( url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The autoComplete operation simplifies the process of constructing new polygons that are adjacent to other polygons . It constructs polygons that fill in the gaps between existing polygons and a set of polylines .
57,177
def buffer ( self , geometries , inSR , distances , units , outSR = None , bufferSR = None , unionResults = True , geodesic = True ) : url = self . _url + "/buffer" params = { "f" : "json" , "inSR" : inSR , "geodesic" : geodesic , "unionResults" : unionResults } if isinstance ( geometries , list ) and len ( geometries ) > 0 : g = geometries [ 0 ] if isinstance ( g , Polygon ) : params [ 'geometries' ] = { "geometryType" : "esriGeometryPolygon" , "geometries" : self . __geomToStringArray ( geometries , "list" ) } elif isinstance ( g , Point ) : params [ 'geometries' ] = { "geometryType" : "esriGeometryPoint" , "geometries" : self . __geomToStringArray ( geometries , "list" ) } elif isinstance ( g , Polyline ) : params [ 'geometries' ] = { "geometryType" : "esriGeometryPolyline" , "geometries" : self . __geomToStringArray ( geometries , "list" ) } else : return None if isinstance ( distances , list ) : distances = [ str ( d ) for d in distances ] params [ 'distances' ] = "," . join ( distances ) else : params [ 'distances' ] = str ( distances ) params [ 'units' ] = units if bufferSR is not None : params [ 'bufferSR' ] = bufferSR if outSR is not None : params [ 'outSR' ] = outSR return self . _get ( url , param_dict = params , proxy_port = self . _proxy_port , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url )
The buffer operation is performed on a geometry service resource The result of this operation is buffered polygons at the specified distances for the input geometry array . Options are available to union buffers and to use geodesic distance .
57,178
def findTransformation ( self , inSR , outSR , extentOfInterest = None , numOfResults = 1 ) : params = { "f" : "json" , "inSR" : inSR , "outSR" : outSR } url = self . _url + "/findTransformations" if isinstance ( numOfResults , int ) : params [ 'numOfResults' ] = numOfResults if isinstance ( extentOfInterest , Envelope ) : params [ 'extentOfInterest' ] = extentOfInterest . asDictionary return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The findTransformations operation is performed on a geometry service resource . This operation returns a list of applicable geographic transformations you should use when projecting geometries from the input spatial reference to the output spatial reference . The transformations are in JSON format and are returned in order of most applicable to least applicable . Recall that a geographic transformation is not needed when the input and output spatial references have the same underlying geographic coordinate systems . In this case findTransformations returns an empty list . Every returned geographic transformation is a forward transformation meaning that it can be used as - is to project from the input spatial reference to the output spatial reference . In the case where a predefined transformation needs to be applied in the reverse direction it is returned as a forward composite transformation containing one transformation and a transformForward element with a value of false .
57,179
def fromGeoCoordinateString ( self , sr , strings , conversionType , conversionMode = None ) : url = self . _url + "/fromGeoCoordinateString" params = { "f" : "json" , "sr" : sr , "strings" : strings , "conversionType" : conversionType } if not conversionMode is None : params [ 'conversionMode' ] = conversionMode return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The fromGeoCoordinateString operation is performed on a geometry service resource . The operation converts an array of well - known strings into xy - coordinates based on the conversion type and spatial reference supplied by the user . An optional conversion mode parameter is available for some conversion types .
57,180
def toGeoCoordinateString ( self , sr , coordinates , conversionType , conversionMode = "mgrsDefault" , numOfDigits = None , rounding = True , addSpaces = True ) : params = { "f" : "json" , "sr" : sr , "coordinates" : coordinates , "conversionType" : conversionType } url = self . _url + "/toGeoCoordinateString" if not conversionMode is None : params [ 'conversionMode' ] = conversionMode if isinstance ( numOfDigits , int ) : params [ 'numOfDigits' ] = numOfDigits if isinstance ( rounding , int ) : params [ 'rounding' ] = rounding if isinstance ( addSpaces , bool ) : params [ 'addSpaces' ] = addSpaces return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , securityHandler = self . _securityHandler )
The toGeoCoordinateString operation is performed on a geometry service resource . The operation converts an array of xy - coordinates into well - known strings based on the conversion type and spatial reference supplied by the user . Optional parameters are available for some conversion types . Note that if an optional parameter is not applicable for a particular conversion type but a value is supplied for that parameter the value will be ignored .
57,181
def __init_url ( self ) : portals_self_url = "{}/portals/self" . format ( self . _url ) params = { "f" : "json" } if not self . _securityHandler is None : params [ 'token' ] = self . _securityHandler . token res = self . _get ( url = portals_self_url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) if "helperServices" in res : helper_services = res . get ( "helperServices" ) if "hydrology" in helper_services : analysis_service = helper_services . get ( "elevation" ) if "url" in analysis_service : self . _analysis_url = analysis_service . get ( "url" ) self . _gpService = GPService ( url = self . _analysis_url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False )
loads the information into the class
57,182
def get_argument_parser ( name = None , ** kwargs ) : if name is None : name = "default" if len ( kwargs ) > 0 or name not in _parsers : init_argument_parser ( name , ** kwargs ) return _parsers [ name ]
Returns the global ArgumentParser instance with the given name . The 1st time this function is called a new ArgumentParser instance will be created for the given name and any args other than name will be passed on to the ArgumentParser constructor .
57,183
def parse ( self , stream ) : items = OrderedDict ( ) for i , line in enumerate ( stream ) : line = line . strip ( ) if not line or line [ 0 ] in [ "#" , ";" , "[" ] or line . startswith ( "---" ) : continue white_space = "\\s*" key = "(?P<key>[^:=;#\s]+?)" value = white_space + "[:=\s]" + white_space + "(?P<value>.+?)" comment = white_space + "(?P<comment>\\s[;#].*)?" key_only_match = re . match ( "^" + key + comment + "$" , line ) if key_only_match : key = key_only_match . group ( "key" ) items [ key ] = "true" continue key_value_match = re . match ( "^" + key + value + comment + "$" , line ) if key_value_match : key = key_value_match . group ( "key" ) value = key_value_match . group ( "value" ) if value . startswith ( "[" ) and value . endswith ( "]" ) : value = [ elem . strip ( ) for elem in value [ 1 : - 1 ] . split ( "," ) ] items [ key ] = value continue raise ConfigFileParserException ( "Unexpected line %s in %s: %s" % ( i , getattr ( stream , 'name' , 'stream' ) , line ) ) return items
Parses the keys + values from a config file .
57,184
def parse ( self , stream ) : yaml = self . _load_yaml ( ) try : parsed_obj = yaml . safe_load ( stream ) except Exception as e : raise ConfigFileParserException ( "Couldn't parse config file: %s" % e ) if not isinstance ( parsed_obj , dict ) : raise ConfigFileParserException ( "The config file doesn't appear to " "contain 'key: value' pairs (aka. a YAML mapping). " "yaml.load('%s') returned type '%s' instead of 'dict'." % ( getattr ( stream , 'name' , 'stream' ) , type ( parsed_obj ) . __name__ ) ) result = OrderedDict ( ) for key , value in parsed_obj . items ( ) : if isinstance ( value , list ) : result [ key ] = value else : result [ key ] = str ( value ) return result
Parses the keys and values from a config file .
57,185
def write_config_file ( self , parsed_namespace , output_file_paths , exit_after = False ) : for output_file_path in output_file_paths : try : with open ( output_file_path , "w" ) as output_file : pass except IOError as e : raise ValueError ( "Couldn't open %s for writing: %s" % ( output_file_path , e ) ) if output_file_paths : config_items = self . get_items_for_config_file_output ( self . _source_to_settings , parsed_namespace ) file_contents = self . _config_file_parser . serialize ( config_items ) for output_file_path in output_file_paths : with open ( output_file_path , "w" ) as output_file : output_file . write ( file_contents ) message = "Wrote config file to " + ", " . join ( output_file_paths ) if exit_after : self . exit ( 0 , message ) else : print ( message )
Write the given settings to output files .
57,186
def convert_item_to_command_line_arg ( self , action , key , value ) : args = [ ] if action is None : command_line_key = self . get_command_line_key_for_unknown_config_file_setting ( key ) else : command_line_key = action . option_strings [ - 1 ] if action is not None and isinstance ( action , ACTION_TYPES_THAT_DONT_NEED_A_VALUE ) : if value . lower ( ) in ( "true" , "yes" , "1" ) : args . append ( command_line_key ) elif value . lower ( ) in ( "false" , "no" , "0" ) : pass else : self . error ( "Unexpected value for %s: '%s'. Expecting 'true', " "'false', 'yes', 'no', '1' or '0'" % ( key , value ) ) elif isinstance ( value , list ) : if action is None or isinstance ( action , argparse . _AppendAction ) : for list_elem in value : args . append ( command_line_key ) args . append ( str ( list_elem ) ) elif ( isinstance ( action , argparse . _StoreAction ) and action . nargs in ( '+' , '*' ) ) or ( isinstance ( action . nargs , int ) and action . nargs > 1 ) : args . append ( command_line_key ) for list_elem in value : args . append ( str ( list_elem ) ) else : self . error ( ( "%s can't be set to a list '%s' unless its action type is changed " "to 'append' or nargs is set to '*', '+', or > 1" ) % ( key , value ) ) elif isinstance ( value , str ) : args . append ( command_line_key ) args . append ( value ) else : raise ValueError ( "Unexpected value type %s for value: %s" % ( type ( value ) , value ) ) return args
Converts a config file or env var key + value to a list of commandline args to append to the commandline .
57,187
def get_possible_config_keys ( self , action ) : keys = [ ] if getattr ( action , 'is_write_out_config_file_arg' , None ) : return keys for arg in action . option_strings : if any ( [ arg . startswith ( 2 * c ) for c in self . prefix_chars ] ) : keys += [ arg [ 2 : ] , arg ] return keys
This method decides which actions can be set in a config file and what their keys will be . It returns a list of 0 or more config keys that can be used to set the given action s value in a config file .
57,188
def eval ( lisp ) : macro_values = [ ] if not isinstance ( lisp , list ) : raise EvalError ( 'eval root element must be a list' ) for item in lisp : if not isinstance ( item , list ) : raise EvalError ( 'must evaluate list of list' ) if not all ( isinstance ( i , str ) for i in item ) : raise EvalError ( 'must evaluate list of list of strings. not a list of strings: {}' . format ( item ) ) name = item [ 0 ] args = item [ 1 : ] try : macro = state [ 'macros' ] [ name ] except KeyError : raise MacroNotFoundError ( "macro {} not found" . format ( repr ( name ) ) ) try : res = macro ( * args ) except Exception as exc : if os . getenv ( 'PLASH_DEBUG' , '' ) . lower ( ) in ( '1' , 'yes' , 'true' ) : raise if isinstance ( exc , MacroError ) : raise raise MacroError ( macro , name , sys . exc_info ( ) ) if not isinstance ( res , str ) and res is not None : raise EvalError ( 'eval macro must return string or None ({} returned {})' . format ( name , type ( res ) ) ) if res is not None : macro_values . append ( res ) return '\n' . join ( macro_values )
plash lisp is one dimensional lisp .
57,189
def plash_map ( * args ) : from subprocess import check_output 'thin wrapper around plash map' out = check_output ( [ 'plash' , 'map' ] + list ( args ) ) if out == '' : return None return out . decode ( ) . strip ( '\n' )
thin wrapper around plash map
57,190
def defpm ( name , * lines ) : 'define a new package manager' @ register_macro ( name , group = 'package managers' ) @ shell_escape_args def package_manager ( * packages ) : if not packages : return sh_packages = ' ' . join ( pkg for pkg in packages ) expanded_lines = [ line . format ( sh_packages ) for line in lines ] return eval ( [ [ 'run' ] + expanded_lines ] ) package_manager . __doc__ = "install packages with {}" . format ( name )
define a new package manager
57,191
def layer ( command = None , * args ) : 'hints the start of a new layer' if not command : return eval ( [ [ 'hint' , 'layer' ] ] ) else : lst = [ [ 'layer' ] ] for arg in args : lst . append ( [ command , arg ] ) lst . append ( [ 'layer' ] ) return eval ( lst )
hints the start of a new layer
57,192
def import_env ( * envs ) : 'import environment variables from host' for env in envs : parts = env . split ( ':' , 1 ) if len ( parts ) == 1 : export_as = env else : env , export_as = parts env_val = os . environ . get ( env ) if env_val is not None : yield '{}={}' . format ( export_as , shlex . quote ( env_val ) )
import environment variables from host
57,193
def write_file ( fname , * lines ) : 'write lines to a file' yield 'touch {}' . format ( fname ) for line in lines : yield "echo {} >> {}" . format ( line , fname )
write lines to a file
57,194
def eval_file ( file ) : 'evaluate file content as expressions' fname = os . path . realpath ( os . path . expanduser ( file ) ) with open ( fname ) as f : inscript = f . read ( ) sh = run_write_read ( [ 'plash' , 'eval' ] , inscript . encode ( ) ) . decode ( ) if sh . endswith ( '\n' ) : return sh [ : - 1 ] return sh
evaluate file content as expressions
57,195
def eval_string ( stri ) : 'evaluate expressions passed as string' tokens = shlex . split ( stri ) return run_write_read ( [ 'plash' , 'eval' ] , '\n' . join ( tokens ) . encode ( ) ) . decode ( )
evaluate expressions passed as string
57,196
def eval_stdin ( ) : 'evaluate expressions read from stdin' cmd = [ 'plash' , 'eval' ] p = subprocess . Popen ( cmd , stdin = sys . stdin , stdout = sys . stdout ) exit = p . wait ( ) if exit : raise subprocess . CalledProcessError ( exit , cmd )
evaluate expressions read from stdin
57,197
def from_map ( map_key ) : 'use resolved map as image' image_id = subprocess . check_output ( [ 'plash' , 'map' , map_key ] ) . decode ( ) . strip ( '\n' ) if not image_id : raise MapDoesNotExist ( 'map {} not found' . format ( repr ( map_key ) ) ) return hint ( 'image' , image_id )
use resolved map as image
57,198
def fields ( self ) : fields = super ( DynamicFieldsMixin , self ) . fields if not hasattr ( self , '_context' ) : return fields is_root = self . root == self parent_is_list_root = self . parent == self . root and getattr ( self . parent , 'many' , False ) if not ( is_root or parent_is_list_root ) : return fields try : request = self . context [ 'request' ] except KeyError : conf = getattr ( settings , 'DRF_DYNAMIC_FIELDS' , { } ) if not conf . get ( 'SUPPRESS_CONTEXT_WARNING' , False ) is True : warnings . warn ( 'Context does not have access to request. ' 'See README for more information.' ) return fields params = getattr ( request , 'query_params' , getattr ( request , 'GET' , None ) ) if params is None : warnings . warn ( 'Request object does not contain query paramters' ) try : filter_fields = params . get ( 'fields' , None ) . split ( ',' ) except AttributeError : filter_fields = None try : omit_fields = params . get ( 'omit' , None ) . split ( ',' ) except AttributeError : omit_fields = [ ] existing = set ( fields . keys ( ) ) if filter_fields is None : allowed = existing else : allowed = set ( filter ( None , filter_fields ) ) omitted = set ( filter ( None , omit_fields ) ) for field in existing : if field not in allowed : fields . pop ( field , None ) if field in omitted : fields . pop ( field , None ) return fields
Filters the fields according to the fields query parameter .
57,199
def setup_admin_on_rest_handlers ( admin , admin_handler ) : add_route = admin . router . add_route add_static = admin . router . add_static static_folder = str ( PROJ_ROOT / 'static' ) a = admin_handler add_route ( 'GET' , '' , a . index_page , name = 'admin.index' ) add_route ( 'POST' , '/token' , a . token , name = 'admin.token' ) add_static ( '/static' , path = static_folder , name = 'admin.static' ) add_route ( 'DELETE' , '/logout' , a . logout , name = 'admin.logout' )
Initialize routes .