idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
56,900
def getUsersEnterpriseGroups ( self , username , searchFilter , maxCount = 100 ) : params = { "f" : "json" , "username" : username , "filter" : searchFilter , "maxCount" : maxCount } url = self . _url + "/Groups/getEnterpriseGroupsForUser" return self . _get ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation lists the groups assigned to a user account in the configured enterprise group store . You can use the filter parameter to narrow down the search results .
56,901
def refreshGroupMembership ( self , groups ) : params = { "f" : "json" , "groups" : groups } url = self . _url + "/groups/refreshMembership" return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation iterates over every enterprise account configured in the portal and determines if the user account is a part of the input enterprise group . If there are any change in memberships the database and the indexes are updated for each group . While portal automatically refreshes the memberships during a user login and during a periodic refresh configured through the Update Identity Store operation this operation allows an administrator to force a refresh .
56,902
def searchEnterpriseGroups ( self , searchFilter = "" , maxCount = 100 ) : params = { "f" : "json" , "filter" : searchFilter , "maxCount" : maxCount } url = self . _url + "/groups/searchEnterpriseGroups" return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation searches groups in the configured enterprise group store . You can narrow down the search using the search filter parameter .
56,903
def SSLCertificates ( self ) : url = self . _url + "/SSLCertificate" params = { "f" : "json" } return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Lists certificates .
56,904
def updateAppInfo ( self , appInfo ) : params = { "f" : "json" , "appInfo" : appInfo } url = self . _url + "/oauth/updateAppInfo" return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation allows you to update the OAuth - specific properties associated with an application . Use the Get App Info operation to obtain the existing OAuth properties that can be edited .
56,905
def updateEnterpriseUser ( self , username , idpUsername ) : params = { "f" : "json" , "username" : username , "idpUsername" : idpUsername } url = self . _url + "/users/updateEnterpriseUser" return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation allows an administrator to update the idpUsername for an enterprise user in the portal . This is used when migrating from accounts used with web - tier authentication to SAML authentication .
56,906
def updateIdenityStore ( self , userPassword , user , userFullnameAttribute , ldapURLForUsers , userEmailAttribute , usernameAttribute , isPasswordEncrypted = False , caseSensitive = True ) : r url = self . _url + "/config/updateIdentityStore" params = { "f" : "json" , "userPassword" : userPassword , "isPasswordEncrypted" : isPasswordEncrypted , "user" : user , "userFullnameAttribute" : userFullnameAttribute , "ldapURLForUsers" : ldapURLForUsers , "userEmailAttribute" : userEmailAttribute , "usernameAttribute" : usernameAttribute , "caseSensitive" : caseSensitive } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url )
r You can use this operation to change the identity provider configuration in your portal . When Portal for ArcGIS is first installed it supports token - based authentication using the built - in identity store for accounts . To configure your portal to connect to your enterprise authentication mechanism it must be configured to use an enterprise identity store such as Windows Active Directory or LDAP .
56,907
def editDirectory ( self , directoryName , physicalPath , description ) : url = self . _url + "/directories/%s/edit" % directoryName params = { "f" : "json" , "physicalPath" : physicalPath , "description" : description } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url )
The edit operation on a directory can be used to change the physical path and description properties of the directory . This is useful when changing the location of a directory from a local path to a network share . However the API does not copy your content and data from the old path to the new path . This has to be done independently by the system administrator .
56,908
def releaseLicense ( self , username ) : url = self . _url + "/licenses/releaseLicense" params = { "username" : username , "f" : "json" } return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
If a user checks out an ArcGIS Pro license for offline or disconnected use this operation releases the license for the specified account . A license can only be used with a single device running ArcGIS Pro . To check in the license a valid access token and refresh token is required . If the refresh token for the device is lost damaged corrupted or formatted the user will not be able to check in the license . This prevents the user from logging in to ArcGIS Pro from any other device . As an administrator you can release the license . This frees the outstanding license and allows the user to check out a new license or use ArcGIS Pro in a connected environment .
56,909
def removeAllEntitlements ( self , appId ) : params = { "f" : "json" , "appId" : appId } url = self . _url + "/licenses/removeAllEntitlements" return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation removes all entitlements from the portal for ArcGIS Pro or additional products such as Navigator for ArcGIS and revokes all entitlements assigned to users for the specified product . The portal is no longer a licensing portal for that product . License assignments are retained on disk . Therefore if you decide to configure this portal as a licensing portal for the product again in the future all licensing assignments will be available in the website .
56,910
def updateLanguages ( self , languages ) : url = self . _url = "/languages/update" params = { "f" : "json" , "languages" : languages } return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
You can use this operation to change which languages will have content displayed in portal search results .
56,911
def updateLicenseManager ( self , licenseManagerInfo ) : url = self . _url + "/licenses/updateLicenseManager" params = { "f" : "json" , "licenseManagerInfo" : licenseManagerInfo } return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
ArcGIS License Server Administrator works with your portal and enforces licenses for ArcGIS Pro . This operation allows you to change the license server connection information for your portal . When you import entitlements into portal using the Import Entitlements operation a license server is automatically configured for you . If your license server changes after the entitlements have been imported you only need to change the license server connection information . You can register a backup license manager for high availability of your licensing portal . When configuring a backup license manager you need to make sure that the backup license manager has been authorized with the same organizational entitlements . After configuring the backup license manager Portal for ArcGIS is restarted automatically . When the restart completes the portal is configured with the backup license server you specified .
56,912
def updateIndexConfiguration ( self , indexerHost = "localhost" , indexerPort = 7199 ) : url = self . _url + "/indexer/update" params = { "f" : "json" , "indexerHost" : indexerHost , "indexerPort" : indexerPort } return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url )
You can use this operation to change the connection information for the indexing service . By default Portal for ArcGIS runs an indexing service that runs on port 7199 . If you want the sharing API to refer to the indexing service on another instance you need to provide the host and port parameters .
56,913
def exportSite ( self , location ) : params = { "location" : location , "f" : "json" } url = self . _url + "/exportSite" return self . _post ( url = url , param_dict = params )
This operation exports the portal site configuration to a location you specify .
56,914
def importSite ( self , location ) : params = { "location" : location , "f" : "json" } url = self . _url + "/importSite" return self . _post ( url = url , param_dict = params )
This operation imports the portal site configuration to a location you specify .
56,915
def joinSite ( self , machineAdminUrl , username , password ) : params = { "machineAdminUrl" : machineAdminUrl , "username" : username , "password" : password , "f" : "json" } url = self . _url + "/joinSite" return self . _post ( url = url , param_dict = params )
The joinSite operation connects a portal machine to an existing site . You must provide an account with administrative privileges to the site for the operation to be successful .
56,916
def unregisterMachine ( self , machineName ) : url = self . _url + "/machines/unregister" params = { "f" : "json" , "machineName" : machineName } return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation unregisters a portal machine from a portal site . The operation can only performed when there are two machines participating in a portal site .
56,917
def federation ( self ) : url = self . _url + "/federation" return _Federation ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns the class that controls federation
56,918
def system ( self ) : url = self . _url + "/system" return _System ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Creates a reference to the System operations for Portal
56,919
def security ( self ) : url = self . _url + "/security" return _Security ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Creates a reference to the Security operations for Portal
56,920
def logs ( self ) : url = self . _url + "/logs" return _log ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns the portals log information
56,921
def search ( self , q = None , per_page = None , page = None , bbox = None , sort_by = "relavance" , sort_order = "asc" ) : url = self . _url + "/datasets.json" param_dict = { "sort_by" : sort_by , "f" : "json" } if q is not None : param_dict [ 'q' ] = q if per_page is not None : param_dict [ 'per_page' ] = per_page if page is not None : param_dict [ 'page' ] = page if bbox is not None : param_dict [ 'bbox' ] = bbox if sort_by is not None : param_dict [ 'sort_by' ] = sort_by if sort_order is not None : param_dict [ 'sort_order' ] = sort_order ds_data = self . _get ( url = url , param_dict = param_dict , securityHandler = self . _securityHandler , additional_headers = [ ] , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) return ds_data
searches the opendata site and returns the dataset results
56,922
def getDataset ( self , itemId ) : if self . _url . lower ( ) . find ( 'datasets' ) > - 1 : url = self . _url else : url = self . _url + "/datasets" return OpenDataItem ( url = url , itemId = itemId , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
gets a dataset class
56,923
def __init ( self ) : url = "%s/%s.json" % ( self . _url , self . _itemId ) params = { "f" : "json" } json_dict = self . _get ( url , params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) self . _json_dict = json_dict self . _json = json . dumps ( self . _json_dict ) setattr ( self , "data" , json_dict [ 'data' ] ) if 'data' in json_dict : for k , v in json_dict [ 'data' ] . items ( ) : setattr ( self , k , v ) del k , v
gets the properties for the site
56,924
def export ( self , outFormat = "shp" , outFolder = None ) : export_formats = { 'shp' : ".zip" , 'kml' : '.kml' , 'geojson' : ".geojson" , 'csv' : '.csv' } url = "%s/%s%s" % ( self . _url , self . _itemId , export_formats [ outFormat ] ) results = self . _get ( url = url , securityHandler = self . _securityHandler , out_folder = outFolder ) if 'status' in results : self . time . sleep ( 7 ) results = self . export ( outFormat = outFormat , outFolder = outFolder ) return results
exports a dataset t
56,925
def error ( self ) : if self . _error is None : try : init = getattr ( self , "_" + self . __class__ . __name__ + "__init" , None ) if init is not None and callable ( init ) : init ( ) except Exception as e : pass return self . _error
gets the error
56,926
def _2 ( self ) : boundary = self . boundary buf = StringIO ( ) for ( key , value ) in self . form_fields : buf . write ( '--%s\r\n' % boundary ) buf . write ( 'Content-Disposition: form-data; name="%s"' % key ) buf . write ( '\r\n\r\n%s\r\n' % value ) for ( key , filename , mimetype , filepath ) in self . files : if os . path . isfile ( filepath ) : buf . write ( '--{boundary}\r\n' 'Content-Disposition: form-data; name="{key}"; ' 'filename="{filename}"\r\n' 'Content-Type: {content_type}\r\n\r\n' . format ( boundary = boundary , key = key , filename = filename , content_type = mimetype ) ) with open ( filepath , "rb" ) as f : shutil . copyfileobj ( f , buf ) buf . write ( '\r\n' ) buf . write ( '--' + boundary + '--\r\n\r\n' ) buf = buf . getvalue ( ) self . form_data = buf
python 2 . x version of formatting body data
56,927
def _3 ( self ) : boundary = self . boundary buf = BytesIO ( ) textwriter = io . TextIOWrapper ( buf , 'utf8' , newline = '' , write_through = True ) for ( key , value ) in self . form_fields : textwriter . write ( '--{boundary}\r\n' 'Content-Disposition: form-data; name="{key}"\r\n\r\n' '{value}\r\n' . format ( boundary = boundary , key = key , value = value ) ) for ( key , filename , mimetype , filepath ) in self . files : if os . path . isfile ( filepath ) : textwriter . write ( '--{boundary}\r\n' 'Content-Disposition: form-data; name="{key}"; ' 'filename="{filename}"\r\n' 'Content-Type: {content_type}\r\n\r\n' . format ( boundary = boundary , key = key , filename = filename , content_type = mimetype ) ) with open ( filepath , "rb" ) as f : shutil . copyfileobj ( f , buf ) textwriter . write ( '\r\n' ) textwriter . write ( '--{}--\r\n\r\n' . format ( boundary ) ) self . form_data = buf . getvalue ( )
python 3 method
56,928
def _get_file_name ( self , contentDisposition , url , ext = ".unknown" ) : if self . PY2 : if contentDisposition is not None : return re . findall ( r'filename[^;=\n]*=(([\'"]).*?\2|[^;\n]*)' , contentDisposition . strip ( ) . replace ( '"' , '' ) ) [ 0 ] [ 0 ] elif os . path . basename ( url ) . find ( '.' ) > - 1 : return os . path . basename ( url ) elif self . PY3 : if contentDisposition is not None : p = re . compile ( r'filename[^;=\n]*=(([\'"]).*?\2|[^;\n]*)' ) return p . findall ( contentDisposition . strip ( ) . replace ( '"' , '' ) ) [ 0 ] [ 0 ] elif os . path . basename ( url ) . find ( '.' ) > - 1 : return os . path . basename ( url ) return "%s.%s" % ( uuid . uuid4 ( ) . get_hex ( ) , ext )
gets the file name from the header or url if possible
56,929
def _mainType ( self , resp ) : if self . PY2 : return resp . headers . maintype elif self . PY3 : return resp . headers . get_content_maintype ( ) else : return None
gets the main type from the response object
56,930
def _chunk ( self , response , size = 4096 ) : method = response . headers . get ( "content-encoding" ) if method == "gzip" : d = zlib . decompressobj ( 16 + zlib . MAX_WBITS ) b = response . read ( size ) while b : data = d . decompress ( b ) yield data b = response . read ( size ) del data else : while True : chunk = response . read ( size ) if not chunk : break yield chunk
downloads a web response in pieces
56,931
def _asString ( self , value ) : if sys . version_info [ 0 ] == 3 : if isinstance ( value , str ) : return value elif isinstance ( value , bytes ) : return value . decode ( 'utf-8' ) elif sys . version_info [ 0 ] == 2 : return value . encode ( 'ascii' )
converts the value as a string
56,932
def machines ( self ) : if self . _resources is None : self . __init ( ) if "machines" in self . _resources : url = self . _url + "/machines" return _machines . Machines ( url , securityHandler = self . _securityHandler , initialize = False , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) else : return None
gets a reference to the machines object
56,933
def data ( self ) : if self . _resources is None : self . __init ( ) if "data" in self . _resources : url = self . _url + "/data" return _data . Data ( url = url , securityHandler = self . _securityHandler , initialize = True , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) else : return None
returns the reference to the data functions as a class
56,934
def info ( self ) : if self . _resources is None : self . __init ( ) url = self . _url + "/info" return _info . Info ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True )
A read - only resource that returns meta information about the server
56,935
def clusters ( self ) : if self . _resources is None : self . __init ( ) if "clusters" in self . _resources : url = self . _url + "/clusters" return _clusters . Cluster ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) else : return None
returns the clusters functions if supported in resources
56,936
def services ( self ) : if self . _resources is None : self . __init ( ) if "services" in self . _resources : url = self . _url + "/services" return _services . Services ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) else : return None
Gets the services object which will provide the ArcGIS Server s admin information about services and folders .
56,937
def usagereports ( self ) : if self . _resources is None : self . __init ( ) if "usagereports" in self . _resources : url = self . _url + "/usagereports" return _usagereports . UsageReports ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) else : return None
Gets the services object which will provide the ArcGIS Server s admin information about the usagereports .
56,938
def kml ( self ) : url = self . _url + "/kml" return _kml . KML ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True )
returns the kml functions for server
56,939
def logs ( self ) : if self . _resources is None : self . __init ( ) if "logs" in self . _resources : url = self . _url + "/logs" return _logs . Log ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) else : return None
returns an object to work with the site logs
56,940
def mode ( self ) : if self . _resources is None : self . __init ( ) if "mode" in self . _resources : url = self . _url + "/mode" return _mode . Mode ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) else : return None
returns an object to work with the site mode
56,941
def security ( self ) : if self . _resources is None : self . __init ( ) if "security" in self . _resources : url = self . _url + "/security" return _security . Security ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) else : return None
returns an object to work with the site security
56,942
def system ( self ) : if self . _resources is None : self . __init ( ) if "system" in self . _resources : url = self . _url + "/system" return _system . System ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) else : return None
returns an object to work with the site system
56,943
def uploads ( self ) : if self . _resources is None : self . __init ( ) if "uploads" in self . _resources : url = self . _url + "/uploads" return _uploads . Uploads ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) else : return None
returns an object to work with the site uploads
56,944
def getGroupIDs ( self , groupNames , communityInfo = None ) : group_ids = [ ] if communityInfo is None : communityInfo = self . communitySelf if isinstance ( groupNames , list ) : groupNames = map ( str . upper , groupNames ) else : groupNames = groupNames . upper ( ) if 'groups' in communityInfo : for gp in communityInfo [ 'groups' ] : if str ( gp [ 'title' ] ) . upper ( ) in groupNames : group_ids . append ( gp [ 'id' ] ) del communityInfo return group_ids
This function retrieves the group IDs
56,945
def groups ( self ) : return Groups ( url = "%s/groups" % self . root , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initalize = False )
returns the group object
56,946
def group ( self , groupId ) : url = "%s/%s" % ( self . root , groupId ) return Group ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initalize = False )
gets a group based on it s ID
56,947
def invite ( self , users , role , expiration = 1440 ) : params = { "f" : "json" , "users" : users , "role" : role , "expiration" : expiration } return self . _post ( url = self . _url + "/invite" , securityHandler = self . _securityHandler , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
A group administrator can invite users to join their group using the Invite to Group operation . This creates a new user invitation which the users accept or decline . The role of the user and the invitation expiration date can be set in the invitation . A notification is created for the user indicating that they were invited to join the group . Available only to authenticated users .
56,948
def applications ( self ) : url = self . _url + "/applications" params = { "f" : "json" } res = self . _get ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) items = [ ] if "applications" in res . keys ( ) : for apps in res [ 'applications' ] : items . append ( self . Application ( url = "%s/%s" % ( self . _url , apps [ 'username' ] ) , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) ) return items
returns all the group applications to join
56,949
def search ( self , q , start = 1 , num = 10 , sortField = "username" , sortOrder = "asc" ) : params = { "f" : "json" , "q" : q , "start" : start , "num" : num , "sortField" : sortField , "sortOrder" : sortOrder } url = self . _url return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The User Search operation searches for users in the portal . The search index is updated whenever users are created updated or deleted . There can be a lag between the time that the user is updated and the time when it s reflected in the search results . The results only contain users that the calling user has permissions to see . Users can control this visibility by changing the access property of their user .
56,950
def __getUsername ( self ) : if self . _securityHandler is not None and not self . _securityHandler . _username is None : return self . _securityHandler . _username elif self . _securityHandler is not None and hasattr ( self . _securityHandler , "org_url" ) and self . _securityHandler . org_url is not None : from . administration import Administration user = Administration ( url = self . _securityHandler . org_url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) . portals . portalSelf . user return user [ 'username' ] else : from . administration import Administration url = self . _url . lower ( ) . split ( '/content/' ) [ 0 ] user = Administration ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) . portals . portalSelf . user return user [ 'username' ]
tries to parse the user name from various objects
56,951
def user ( self , username = None ) : if username is None : username = self . __getUsername ( ) parsedUsername = urlparse . quote ( username ) url = self . root + "/%s" % parsedUsername return User ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False )
A user resource that represents a registered user in the portal .
56,952
def userContent ( self ) : replace_start = self . _url . lower ( ) . find ( "/community/" ) len_replace = len ( "/community/" ) url = self . _url . replace ( self . _url [ replace_start : replace_start + len_replace ] , '/content/' ) from . _content import User as UserContent return UserContent ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
allows access into the individual user s content to get at the items owned by the current user
56,953
def invitations ( self ) : url = "%s/invitations" % self . root return Invitations ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns a class to access the current user s invitations
56,954
def notifications ( self ) : params = { "f" : "json" } url = "%s/notifications" % self . root return Notifications ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The notifications that are available for the given user . Notifications are events that need the user s attention - application for joining a group administered by the user acceptance of a group membership application and so on . A notification is initially marked as new . The user can mark it as read or delete the notification .
56,955
def resetPassword ( self , email = True ) : url = self . root + "/reset" params = { "f" : "json" , "email" : email } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
resets a users password for an account . The password will be randomly generated and emailed by the system .
56,956
def expirePassword ( self , hours = "now" ) : params = { "f" : "json" } expiration = - 1 if isinstance ( hours , str ) : if expiration == "now" : expiration = - 1 elif expiration == "never" : expiration = 0 else : expiration = - 1 elif isinstance ( expiration , ( int , long ) ) : dt = datetime . now ( ) + timedelta ( hours = hours ) expiration = local_time_to_online ( dt = dt ) else : expiration = - 1 params [ 'expiration' ] = expiration url = "%s/expirePassword" % self . root return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
sets a time when a user must reset their password
56,957
def userInvitations ( self ) : self . __init ( ) items = [ ] for n in self . _userInvitations : if "id" in n : url = "%s/%s" % ( self . root , n [ 'id' ] ) items . append ( self . Invitation ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) ) return items
gets all user invitations
56,958
def notifications ( self ) : self . __init ( ) items = [ ] for n in self . _notifications : if "id" in n : url = "%s/%s" % ( self . root , n [ 'id' ] ) items . append ( self . Notification ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) ) return items
gets the user s notifications
56,959
def add ( self , statisticType , onStatisticField , outStatisticFieldName = None ) : val = { "statisticType" : statisticType , "onStatisticField" : onStatisticField , "outStatisticFieldName" : outStatisticFieldName } if outStatisticFieldName is None : del val [ 'outStatisticFieldName' ] self . _array . append ( val )
Adds the statistics group to the filter .
56,960
def addFilter ( self , layer_id , where = None , outFields = "*" ) : import copy f = copy . deepcopy ( self . _filterTemplate ) f [ 'layerId' ] = layer_id f [ 'outFields' ] = outFields if where is not None : f [ 'where' ] = where if f not in self . _filter : self . _filter . append ( f )
adds a layer definition filter
56,961
def removeFilter ( self , filter_index ) : f = self . _filter [ filter_index ] self . _filter . remove ( f )
removes a layer filter based on position in filter list
56,962
def geometry ( self , geometry ) : if isinstance ( geometry , AbstractGeometry ) : self . _geomObject = geometry self . _geomType = geometry . type elif arcpyFound : wkid = None wkt = None if ( hasattr ( geometry , 'spatialReference' ) and geometry . spatialReference is not None ) : if ( hasattr ( geometry . spatialReference , 'factoryCode' ) and geometry . spatialReference . factoryCode is not None ) : wkid = geometry . spatialReference . factoryCode else : wkt = geometry . spatialReference . exportToString ( ) if isinstance ( geometry , arcpy . Polygon ) : self . _geomObject = Polygon ( geometry , wkid = wkid , wkt = wkt ) self . _geomType = "esriGeometryPolygon" elif isinstance ( geometry , arcpy . Point ) : self . _geomObject = Point ( geometry , wkid = wkid , wkt = wkt ) self . _geomType = "esriGeometryPoint" elif isinstance ( geometry , arcpy . Polyline ) : self . _geomObject = Polyline ( geometry , wkid = wkid , wkt = wkt ) self . _geomType = "esriGeometryPolyline" elif isinstance ( geometry , arcpy . Multipoint ) : self . _geomObject = MultiPoint ( geometry , wkid = wkid , wkt = wkt ) self . _geomType = "esriGeometryMultipoint" else : raise AttributeError ( "geometry must be a common.Geometry or arcpy.Geometry type." ) else : raise AttributeError ( "geometry must be a common.Geometry or arcpy.Geometry type." )
sets the geometry value
56,963
def layers ( self ) : if self . _layers is None : self . __init ( ) lyrs = [ ] for lyr in self . _layers : url = self . _url + "/%s" % lyr [ 'id' ] lyr [ 'object' ] = MobileServiceLayer ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False ) return self . _layers
gets the service layers
56,964
def clusters ( self ) : if self . _clusters is not None : self . __init ( ) Cs = [ ] for c in self . _clusters : url = self . _url + "/%s" % c [ 'clusterName' ] Cs . append ( Cluster ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) ) self . _clusters = Cs return self . _clusters
returns the cluster object for each server
56,965
def editProtocol ( self , clusterProtocolObj ) : if isinstance ( clusterProtocolObj , ClusterProtocol ) : pass else : raise AttributeError ( "Invalid Input, must be a ClusterProtocal Object" ) url = self . _url + "/editProtocol" params = { "f" : "json" , "tcpClusterPort" : str ( clusterProtocolObj . value [ 'tcpClusterPort' ] ) } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Updates the Cluster Protocol . This will cause the cluster to be restarted with updated protocol configuration .
56,966
def parameters ( self ) : if self . _parameters is None : self . __init ( ) for param in self . _parameters : if not isinstance ( param [ 'defaultValue' ] , BaseGPObject ) : if param [ 'dataType' ] == "GPFeatureRecordSetLayer" : param [ 'defaultValue' ] = GPFeatureRecordSetLayer . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPString" : param [ 'defaultValue' ] = GPString . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPLong" : param [ 'defaultValue' ] = GPLong . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPDouble" : param [ 'defaultValue' ] = GPDouble . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPDate" : param [ 'defaultValue' ] = GPDate . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPBoolean" : param [ 'defaultValue' ] = GPBoolean . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPDataFile" : param [ 'defaultValue' ] = GPDataFile . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPLinearUnit" : param [ 'defaultValue' ] = GPLinearUnit . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPMultiValue" : param [ 'defaultValue' ] = GPMultiValue . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPRasterData" : param [ 'defaultValue' ] = GPRasterData . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPRasterDataLayer" : param [ 'defaultValue' ] = GPRasterDataLayer . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPRecordSet" : param [ 'defaultValue' ] = GPRecordSet . fromJSON ( json . dumps ( param ) ) return self . _parameters
returns the default parameters
56,967
def getJob ( self , jobID ) : url = self . _url + "/jobs/%s" % ( jobID ) return GPJob ( url = url , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url )
returns the results or status of a job
56,968
def executeTask ( self , inputs , outSR = None , processSR = None , returnZ = False , returnM = False , f = "json" , method = "POST" ) : params = { "f" : f } url = self . _url + "/execute" params = { "f" : "json" } if not outSR is None : params [ 'env:outSR' ] = outSR if not processSR is None : params [ 'end:processSR' ] = processSR params [ 'returnZ' ] = returnZ params [ 'returnM' ] = returnM for p in inputs : if isinstance ( p , BaseGPObject ) : params [ p . paramName ] = p . value del p if method . lower ( ) == "post" : return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) else : return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
performs the execute task method
56,969
def _get_json ( self , urlpart ) : url = self . _url + "/%s" % urlpart params = { "f" : "json" , } return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . proxy_port )
gets the result object dictionary
56,970
def results ( self ) : self . __init ( ) for k , v in self . _results . items ( ) : param = self . _get_json ( v [ 'paramUrl' ] ) if param [ 'dataType' ] == "GPFeatureRecordSetLayer" : self . _results [ k ] = GPFeatureRecordSetLayer . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] . lower ( ) . find ( 'gpmultivalue' ) > - 1 : self . _results [ k ] = GPMultiValue . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPString" : self . _results [ k ] = GPString . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPLong" : self . _results [ k ] = GPLong . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPDouble" : self . _results [ k ] = GPDouble . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPDate" : self . _results [ k ] = GPDate . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPBoolean" : self . _results [ k ] = GPBoolean . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPDataFile" : self . _results [ k ] = GPDataFile . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPLinearUnit" : self . _results [ k ] = GPLinearUnit . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPMultiValue" : self . _results [ k ] = GPMultiValue . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPRasterData" : self . _results [ k ] = GPRasterData . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPRasterDataLayer" : self . _results [ k ] = GPRasterDataLayer . fromJSON ( json . dumps ( param ) ) elif param [ 'dataType' ] == "GPRecordSet" : self . _results [ k ] = GPRecordSet . fromJSON ( json . dumps ( param ) ) return self . _results
returns the results
56,971
def getParameterValue ( self , parameterName ) : if self . _results is None : self . __init ( ) parameter = self . _results [ parameterName ] return parameter
gets a parameter value
56,972
def parentLayer ( self ) : if self . _parentLayer is None : from . . agol . services import FeatureService self . __init ( ) url = os . path . dirname ( self . _url ) self . _parentLayer = FeatureService ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) return self . _parentLayer
returns information about the parent
56,973
def _chunks ( self , l , n ) : l . sort ( ) newn = int ( 1.0 * len ( l ) / n + 0.5 ) for i in range ( 0 , n - 1 ) : yield l [ i * newn : i * newn + newn ] yield l [ n * newn - newn : ]
Yield n successive chunks from a list l .
56,974
def calculate ( self , where , calcExpression , sqlFormat = "standard" ) : url = self . _url + "/calculate" params = { "f" : "json" , "where" : where , } if isinstance ( calcExpression , dict ) : params [ "calcExpression" ] = json . dumps ( [ calcExpression ] , default = _date_handler ) elif isinstance ( calcExpression , list ) : params [ "calcExpression" ] = json . dumps ( calcExpression , default = _date_handler ) if sqlFormat . lower ( ) in [ 'native' , 'standard' ] : params [ 'sqlFormat' ] = sqlFormat . lower ( ) else : params [ 'sqlFormat' ] = "standard" return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url )
The calculate operation is performed on a feature service layer resource . It updates the values of one or more fields in an existing feature service layer based on SQL expressions or scalar values . The calculate operation can only be used if the supportsCalculate property of the layer is true . Neither the Shape field nor system fields can be updated using calculate . System fields include ObjectId and GlobalId . See Calculate a field for more information on supported expressions
56,975
def validateSQL ( self , sql , sqlType = "where" ) : url = self . _url + "/validateSQL" if not sqlType . lower ( ) in [ 'where' , 'expression' , 'statement' ] : raise Exception ( "Invalid Input for sqlType: %s" % sqlType ) params = { "f" : "json" , "sql" : sql , "sqlType" : sqlType } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The validateSQL operation validates an SQL - 92 expression or WHERE clause . The validateSQL operation ensures that an SQL - 92 expression such as one written by a user through a user interface is correct before performing another operation that uses the expression . For example validateSQL can be used to validate information that is subsequently passed in as part of the where parameter of the calculate operation . validateSQL also prevents SQL injection . In addition all table and field names used in the SQL expression or WHERE clause are validated to ensure they are valid tables and fields .
56,976
def asDictionary ( self ) : template = { "type" : self . _type , "mapLayerId" : self . _mapLayerId } if not self . _gdbVersion is None and self . _gdbVersion != "" : template [ 'gdbVersion' ] = self . _gdbVersion return template
converts the object to a dictionary
56,977
def asDictionary ( self ) : template = { "type" : "dataLayer" , "dataSource" : self . _dataSource } if not self . _fields is None : template [ 'fields' ] = self . _fields return template
returns the value as a dictionary
56,978
def dataSource ( self , value ) : if isinstance ( value , DataSource ) : self . _dataSource = value else : raise TypeError ( "value must be a DataSource object" )
sets the datasource object
56,979
def fields ( self , value ) : if type ( value ) is list : self . _fields = value else : raise TypeError ( "Input must be a list" )
sets the fields variable
56,980
def addCodedValue ( self , name , code ) : i = { "name" : name , "code" : code } if i not in self . _codedValues : self . _codedValues . append ( i )
adds a coded value to the domain
56,981
def removeCodedValue ( self , name ) : for i in self . _codedValues : if i [ 'name' ] == name : self . _codedValues . remove ( i ) return True return False
removes a codedValue by name
56,982
def value ( self ) : return { "type" : self . _type , "name" : self . _name , "range" : [ self . _rangeMin , self . _rangeMax ] }
gets the value as a dictionary
56,983
def exportImage ( self , bbox , imageSR , bboxSR , size = [ 400 , 400 ] , time = None , format = "jpgpng" , pixelType = "UNKNOWN" , noData = None , noDataInterpretation = "esriNoDataMatchAny" , interpolation = None , compression = None , compressionQuality = 75 , bandIds = None , moasiacRule = None , renderingRule = "" , f = "json" , saveFolder = None , saveFile = None ) : params = { "bbox" : bbox , "imageSR" : imageSR , "bboxSR" : bboxSR , "size" : "%s %s" % ( size [ 0 ] , size [ 1 ] ) , "pixelType" : pixelType , "compressionQuality" : compressionQuality , } url = self . _url + "/exportImage" __allowedFormat = [ "jpgpng" , "png" , "png8" , "png24" , "jpg" , "bmp" , "gif" , "tiff" , "png32" ] __allowedPixelTypes = [ "C128" , "C64" , "F32" , "F64" , "S16" , "S32" , "S8" , "U1" , "U16" , "U2" , "U32" , "U4" , "U8" , "UNKNOWN" ] __allowednoDataInt = [ "esriNoDataMatchAny" , "esriNoDataMatchAll" ] __allowedInterpolation = [ "RSP_BilinearInterpolation" , "RSP_CubicConvolution" , "RSP_Majority" , "RSP_NearestNeighbor" ] __allowedCompression = [ "JPEG" , "LZ77" ] if isinstance ( moasiacRule , MosaicRuleObject ) : params [ "moasiacRule" ] = moasiacRule . value if format in __allowedFormat : params [ 'format' ] = format if isinstance ( time , datetime . datetime ) : params [ 'time' ] = local_time_to_online ( time ) if interpolation is not None and interpolation in __allowedInterpolation and isinstance ( interpolation , str ) : params [ 'interpolation' ] = interpolation if pixelType is not None and pixelType in __allowedPixelTypes : params [ 'pixelType' ] = pixelType if noDataInterpretation in __allowedInterpolation : params [ 'noDataInterpretation' ] = noDataInterpretation if noData is not None : params [ 'noData' ] = noData if compression is not None and compression in __allowedCompression : params [ 'compression' ] = compression if bandIds is not None and isinstance ( bandIds , list ) : params [ 'bandIds' ] = "," . join ( bandIds ) if renderingRule is not None : params [ 'renderingRule' ] = renderingRule params [ "f" ] = f if f == "json" : return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) elif f == "image" : result = self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , out_folder = saveFolder , file_name = saveFile ) return result elif f == "kmz" : return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , out_folder = saveFolder , file_name = saveFile )
The exportImage operation is performed on an image service resource The result of this operation is an image resource . This resource provides information about the exported image such as its URL extent width and height . In addition to the usual response formats of HTML and JSON you can also request the image format while performing this operation . When you perform an export with the image format the server responds by directly streaming the image bytes to the client . With this approach you don t get any information associated with the exported image other than the image itself .
56,984
def measure ( self , fromGeometry , toGeometry , measureOperation , geometryType = "esriGeometryPoint" , pixelSize = None , mosaicRule = None , linearUnit = None , angularUnit = None , areaUnit = None ) : url = self . _url + "/measure" params = { "f" : "json" , "fromGeometry" : fromGeometry , "toGeometry" : toGeometry , "geometryType" : geometryType , "measureOperation" : measureOperation } if not pixelSize is None : params [ "pixelSize" ] = pixelSize if not mosaicRule is None : params [ "mosaicRule" ] = mosaicRule if not linearUnit is None : params [ "linearUnit" ] = linearUnit if not angularUnit is None : params [ "angularUnit" ] = angularUnit if not areaUnit is None : params [ "areaUnit" ] = areaUnit return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The measure operation is performed on an image service resource . It lets a user measure distance direction area perimeter and height from an image service . The result of this operation includes the name of the raster dataset being used sensor name and measured values .
56,985
def computeStatisticsHistograms ( self , geometry , geometryType , mosaicRule = None , renderingRule = None , pixelSize = None ) : url = self . _url + "/computeStatisticsHistograms" params = { "f" : "json" , "geometry" : geometry , "geometryType" : geometryType } if not mosaicRule is None : params [ "mosaicRule" ] = mosaicRule if not renderingRule is None : params [ "renderingRule" ] = renderingRule if not pixelSize is None : params [ "pixelSize" ] = pixelSize return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The computeStatisticsHistograms operation is performed on an image service resource . This operation is supported by any image service published with mosaic datasets or a raster dataset . The result of this operation contains both statistics and histograms computed from the given extent .
56,986
def uploadByParts ( self , registerID , filePath , commit = True ) : url = self . _url + "/%s/uploadPart" % registerID params = { "f" : "json" } with open ( filePath , 'rb' ) as f : mm = mmap . mmap ( f . fileno ( ) , 0 , access = mmap . ACCESS_READ ) size = 1000000 steps = int ( os . fstat ( f . fileno ( ) ) . st_size / size ) if os . fstat ( f . fileno ( ) ) . st_size % size > 0 : steps += 1 for i in range ( steps ) : files = { } tempFile = os . path . join ( os . environ [ 'TEMP' ] , "split.part%s" % i ) if os . path . isfile ( tempFile ) : os . remove ( tempFile ) with open ( tempFile , 'wb' ) as writer : writer . write ( mm . read ( size ) ) writer . flush ( ) writer . close ( ) del writer files [ 'file' ] = tempFile params [ 'partNum' ] = i + 1 res = self . _post ( url = url , param_dict = params , files = files , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) os . remove ( tempFile ) del files del mm return self . commit ( registerID )
loads the data by small parts . If commit is set to true then parts will be merged together . If commit is false the function will return the registerID so a manual commit can occur .
56,987
def uploads ( self ) : if self . syncEnabled == True : return Uploads ( url = self . _url + "/uploads" , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) return None
returns the class to perform the upload function . it will only return the uploads class if syncEnabled is True .
56,988
def administration ( self ) : url = self . _url res = search ( "/rest/" , url ) . span ( ) addText = "admin/" part1 = url [ : res [ 1 ] ] part2 = url [ res [ 1 ] : ] adminURL = "%s%s%s" % ( part1 , addText , part2 ) res = AdminFeatureService ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False ) return res
returns the hostservice object to manage the back - end functions
56,989
def replicas ( self ) : params = { "f" : "json" , } url = self . _url + "/replicas" return self . _get ( url , params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns all the replicas for a feature service
56,990
def createReplica ( self , replicaName , layers , layerQueries = None , geometryFilter = None , replicaSR = None , transportType = "esriTransportTypeUrl" , returnAttachments = False , returnAttachmentsDatabyURL = False , async = False , attachmentsSyncDirection = "none" , syncModel = "none" , dataFormat = "json" , replicaOptions = None , wait = False , out_path = None ) : if self . syncEnabled == False and "Extract" not in self . capabilities : return None url = self . _url + "/createReplica" dataformat = [ "filegdb" , "json" , "sqlite" , "shapefile" ] params = { "f" : "json" , "replicaName" : replicaName , "returnAttachments" : returnAttachments , "returnAttachmentsDatabyURL" : returnAttachmentsDatabyURL , "attachmentsSyncDirection" : attachmentsSyncDirection , "async" : async , "syncModel" : syncModel , "layers" : layers } if dataFormat . lower ( ) in dataformat : params [ 'dataFormat' ] = dataFormat . lower ( ) else : raise Exception ( "Invalid dataFormat" ) if layerQueries is not None : params [ 'layerQueries' ] = layerQueries if geometryFilter is not None and isinstance ( geometryFilter , GeometryFilter ) : params . update ( geometryFilter . filter ) if replicaSR is not None : params [ 'replicaSR' ] = replicaSR if replicaOptions is not None : params [ 'replicaOptions' ] = replicaOptions if transportType is not None : params [ 'transportType' ] = transportType if async : if wait : exportJob = self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) status = self . replicaStatus ( url = exportJob [ 'statusUrl' ] ) while status [ 'status' ] . lower ( ) != "completed" : status = self . replicaStatus ( url = exportJob [ 'statusUrl' ] ) if status [ 'status' ] . lower ( ) == "failed" : return status res = status else : res = self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) else : res = self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) if out_path is not None and os . path . isdir ( out_path ) : dlURL = None if 'resultUrl' in res : dlURL = res [ "resultUrl" ] elif 'responseUrl' in res : dlURL = res [ "responseUrl" ] elif 'URL' in res : dlURL = res [ "URL" ] if dlURL is not None : return self . _get ( url = dlURL , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , out_folder = out_path ) else : return res elif res is not None : return res return None
The createReplica operation is performed on a feature service resource . This operation creates the replica between the feature service and a client based on a client - supplied replica definition . It requires the Sync capability . See Sync overview for more information on sync . The response for createReplica includes replicaID server generation number and data similar to the response from the feature service query operation . The createReplica operation returns a response of type esriReplicaResponseTypeData as the response has data for the layers in the replica . If the operation is called to register existing data by using replicaOptions the response type will be esriReplicaResponseTypeInfo and the response will not contain data for the layers in the replica .
56,991
def replicaStatus ( self , url ) : params = { "f" : "json" } url = url + "/status" return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url )
gets the replica status when exported async set to True
56,992
def listAttachments ( self , oid ) : url = self . _url + "/%s/attachments" % oid params = { "f" : "json" } return self . _get ( url , params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url )
list attachements for a given OBJECT ID
56,993
def getAttachment ( self , oid , attachment_id , out_folder = None ) : attachments = self . listAttachments ( oid = oid ) if "attachmentInfos" in attachments : for attachment in attachments [ 'attachmentInfos' ] : if "id" in attachment and attachment [ 'id' ] == attachment_id : url = self . _url + "/%s/attachments/%s" % ( oid , attachment_id ) return self . _get ( url = url , param_dict = { "f" : 'json' } , securityHandler = self . _securityHandler , out_folder = out_folder , file_name = attachment [ 'name' ] ) return None
downloads a feature s attachment .
56,994
def create_fc_template ( self , out_path , out_name ) : fields = self . fields objectIdField = self . objectIdField geomType = self . geometryType wkid = self . parentLayer . spatialReference [ 'wkid' ] return create_feature_class ( out_path , out_name , geomType , wkid , fields , objectIdField )
creates a featureclass template on local disk
56,995
def create_feature_template ( self ) : fields = self . fields feat_schema = { } att = { } for fld in fields : self . _globalIdField if not fld [ 'name' ] == self . _objectIdField and not fld [ 'name' ] == self . _globalIdField : att [ fld [ 'name' ] ] = '' feat_schema [ 'attributes' ] = att feat_schema [ 'geometry' ] = '' return Feature ( feat_schema )
creates a feature template
56,996
def spatialReference ( self ) : if self . _wkid == None and self . _wkt is not None : return { "wkt" : self . _wkt } else : return { "wkid" : self . _wkid }
returns the geometry spatial reference
56,997
def asJSON ( self ) : value = self . _json if value is None : value = json . dumps ( self . asDictionary , default = _date_handler ) self . _json = value return self . _json
returns a geometry as JSON
56,998
def asArcPyObject ( self ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) return arcpy . AsShape ( self . asDictionary , True )
returns the Point as an ESRI arcpy . Point object
56,999
def X ( self , value ) : if isinstance ( value , ( int , float , long , types . NoneType ) ) : self . _x = value
sets the X coordinate