text stringlengths 4 1.02M | meta dict |
|---|---|
import json
from .._abstract.abstract import BaseAGOLClass, BaseSecurityHandler
from ..security import security
import collections
########################################################################
class Services(BaseAGOLClass):
"""
The administration resource is the root node and initial entry point
into a Spatial Data Server adminstrative interface. This resource
represents a catalog of data sources and services published on the
host.
The current version and type of the server is also returned in the
response. The value of the version is a number such that its value
at a future release is guaranteed to be greater than its value at a
previous release.
Inputs:
url - url to service admin site: http://<web server hostname>/arcgis/rest/admin
securityHandler - AGOL/Portal
"""
_url = None
_currentVersion = None
_resources = None
_serverType = None
_proxy_port = None
_proxy_url = None
_securityHandler = None
_services = None
_folders = None
_description = None
_folderName = None
#----------------------------------------------------------------------
def __init__(self,
url,
securityHandler,
initialize=False,
proxy_url=None,
proxy_port=None):
"""Constructor"""
self._url = url
self._proxy_url = proxy_url
self._proxy_port = proxy_port
if isinstance(securityHandler, BaseSecurityHandler):
if hasattr(securityHandler, 'is_portal'):
if securityHandler.is_portal:
if hasattr(securityHandler, 'portalServerHandler'):
self._securityHandler = securityHandler.portalServerHandler(serverUrl=url)
else:
self._securityHandler = securityHandler
else:
self._securityHandler = securityHandler
else:
self._securityHandler = securityHandler
else:
raise AttributeError("Admin only supports AGOL, ArcGIS, Portal, NTLM, LDAP, PKI and OAuth security handlers")
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
""" initializes the service """
params = {"f" : "json"}
json_dict = self._get(self._url, params,
proxy_port=self._proxy_port,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in hostedservice.Services.")
del k, v
#----------------------------------------------------------------------
@property
def folders(self):
"""returns the service folders"""
if self._folders is None:
self.__init()
return self._folders
#----------------------------------------------------------------------
@property
def description(self):
"""returns the description property"""
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def folderName(self):
"""returns the folder name"""
if self._folderName is None:
self.__init()
return self._folderName
#----------------------------------------------------------------------
@property
def securityHandler(self):
""" gets the security handler """
return self._securityHandler
#----------------------------------------------------------------------
@securityHandler.setter
def securityHandler(self, value):
""" sets the security handler """
if isinstance(value, BaseSecurityHandler):
if isinstance(value, security.AGOLTokenSecurityHandler):
self._securityHandler = value
elif isinstance(value, security.OAuthSecurityHandler):
self._securityHandler = value
else:
pass
#----------------------------------------------------------------------
@property
def currentVersion(self):
""" returns the software's current version """
if self._currentVersion is None:
self.__init()
return self._currentVersion
#----------------------------------------------------------------------
@property
def resources(self):
""" list of all resources on the AGOL site """
if self._resources is None:
self.__init()
return self._resources
#----------------------------------------------------------------------
@property
def serverType(self):
""" returns the server type """
if self._serverType is None:
self.__init()
return self._serverType
#----------------------------------------------------------------------
@property
def services(self):
""" returns all the service objects in the admin service's page """
self._services = []
params = {"f": "json"}
if not self._url.endswith('/services'):
uURL = self._url + "/services"
else:
uURL = self._url
res = self._get(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
for k, v in res.items():
if k == "foldersDetail":
for item in v:
if 'isDefault' in item and item['isDefault'] == False:
fURL = self._url + "/services/" + item['folderName']
resFolder = self._get(url=fURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
for k1, v1 in resFolder.items():
if k1 == "services":
self._checkservice(k1,v1,fURL)
elif k == "services":
self._checkservice(k,v,uURL)
return self._services
def _checkservice(self,k,v,url):
for item in v:
if 'adminServiceInfo' in item:
item = item['adminServiceInfo']
if 'type' in item and item['type'] == 'MapServer':
if 'name' in item:
name = item['name']
typefs = item['type']
if 'name' in item == True:
name = item['name']
elif 'serviceName' in item == True:
name = item['serviceName']
self._services.append(
AdminMapService(url=url + r"/%s.%s" % (name,item['type']),
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initialize=False)
)
elif 'type' in item and item['type'] == 'FeatureServer':
if 'name' in item:
name = item['name']
typefs = item['type']
if 'adminServiceInfo' in item == True:
name = item['adminServiceInfo']['name']
typefs = item['adminServiceInfo']['type']
elif 'serviceName' in item == True:
name = item['serviceName']
typefs = item['type']
surl = url + r"/%s/%s" % (name,
typefs)
self._services.append(
AdminFeatureService(url=surl,
securityHandler=self._securityHandler,
initialize=False,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port))
########################################################################
class AdminMapService(BaseAGOLClass):
"""
A map service offer access to map and layer content.
The REST API administrative map service resource represents a map
service. This resource provides basic information about the map,
including the layers that it contains, whether the map is cached or
not, its spatial reference, initial and full extents, etc... The
administrative map service resource maintains a set of operations
that manage the state and contents of the service.
"""
_securityHandler = None
_url = None
_initialExtent = None
_currentJob = None
_lodInfos = None
_id = None
_size = None
_tileInfo = None
_jobStatus = None
_access = None
_cacheExecutionStatus = None
_type = None
_status = None
_jobs = None
_sourceType = None
_fullExtent = None
_minScale = None
_count = None
_maxExportTilesCount = None
_name = None
_created = None
_maxScale = None
_modified = None
_serverId = None
_exportTilesAllowed = None
_urlService = None
_readonly = None
_resampling = None
_json = None
_json_dict = None
#----------------------------------------------------------------------
def __init__(self, url,
securityHandler,
initialize=False,
proxy_url=None,
proxy_port=None):
"""Constructor"""
self._url = url
if isinstance(securityHandler, BaseSecurityHandler):
if hasattr(securityHandler, 'is_portal'):
if securityHandler.is_portal:
if hasattr(securityHandler, 'portalServerHandler'):
self._securityHandler = securityHandler.portalServerHandler(serverUrl=url)
else:
self._securityHandler = securityHandler
else:
self._securityHandler = securityHandler
else:
self._securityHandler = securityHandler
else:
raise AttributeError("Admin only supports AGOL, ArcGIS, Portal, NTLM, LDAP, PKI and OAuth security handlers")
self._proxy_url = proxy_url
self._proxy_port = proxy_port
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
""" initializes the service """
params = {
"f" : "json",
}
json_dict = self._get(self._url, params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(self._json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k == "url":
self._urlService = v
elif k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented. Please log an support request.")
del k, v
#----------------------------------------------------------------------
def __iter__(self):
"""returns the key/value pair of the raw JSON"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
def __str__(self):
"""returns raw json from url query"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
@property
def readonly(self):
"""returns the readonly property"""
if self._readonly is None:
self.__init()
return self._readonly
#----------------------------------------------------------------------
@property
def resampling(self):
"""returns the resampling property"""
if self._resampling is None:
self.__init()
return self._resampling
#----------------------------------------------------------------------
@property
def currentJob(self):
'''gets the currentJob'''
if self._currentJob is None:
self.__init()
return self._currentJob
#----------------------------------------------------------------------
@property
def lodInfos(self):
'''gets the lodInfos'''
if self._lodInfos is None:
self.__init()
return self._lodInfos
#----------------------------------------------------------------------
@property
def id(self):
'''gets the id'''
if self._id is None:
self.__init()
return self._id
#----------------------------------------------------------------------
@property
def size(self):
'''gets the size'''
if self._size is None:
self.__init()
return self._size
#----------------------------------------------------------------------
@property
def tileInfo(self):
'''gets the tileInfo'''
if self._tileInfo is None:
self.__init()
return self._tileInfo
#----------------------------------------------------------------------
@property
def jobStatus(self):
'''gets the jobStatus'''
if self._jobStatus is None:
self.__init()
return self._jobStatus
#----------------------------------------------------------------------
@property
def access(self):
'''gets the access'''
if self._access is None:
self.__init()
return self._access
#----------------------------------------------------------------------
@property
def cacheExecutionStatus(self):
'''gets the cacheExecutionStatus'''
if self._cacheExecutionStatus is None:
self.__init()
return self._cacheExecutionStatus
#----------------------------------------------------------------------
@property
def type(self):
'''gets the type'''
if self._type is None:
self.__init()
return self._type
#----------------------------------------------------------------------
@property
def jobs(self):
'''gets the jobs'''
if self._jobs is None:
self.__init()
return self._jobs
#----------------------------------------------------------------------
@property
def sourceType(self):
'''gets the sourceType'''
if self._sourceType is None:
self.__init()
return self._sourceType
#----------------------------------------------------------------------
@property
def fullExtent(self):
'''gets the fullExtent'''
if self._fullExtent is None:
self.__init()
return self._fullExtent
#----------------------------------------------------------------------
@property
def minScale(self):
'''gets the minScale'''
if self._minScale is None:
self.__init()
return self._minScale
#----------------------------------------------------------------------
@property
def count(self):
'''gets the count'''
if self._count is None:
self.__init()
return self._count
#----------------------------------------------------------------------
@property
def maxExportTilesCount(self):
'''gets the maxExportTilesCount'''
if self._maxExportTilesCount is None:
self.__init()
return self._maxExportTilesCount
#----------------------------------------------------------------------
@property
def name(self):
'''gets the name'''
if self._name is None:
self.__init()
return self._name
#----------------------------------------------------------------------
@property
def created(self):
'''gets the created'''
if self._created is None:
self.__init()
return self._created
#----------------------------------------------------------------------
@property
def urlService(self):
'''gets the url'''
if self._urlService is None:
self.__init()
return self._urlService
#----------------------------------------------------------------------
@property
def maxScale(self):
'''gets the maxScale'''
if self._maxScale is None:
self.__init()
return self._maxScale
#----------------------------------------------------------------------
@property
def modified(self):
'''gets the modified'''
if self._modified is None:
self.__init()
return self._modified
#----------------------------------------------------------------------
@property
def serverId(self):
'''gets the serverId'''
if self._serverId is None:
self.__init()
return self._serverId
#----------------------------------------------------------------------
@property
def exportTilesAllowed(self):
'''gets the exportTilesAllowed'''
if self._exportTilesAllowed is None:
self.__init()
return self._exportTilesAllowed
#----------------------------------------------------------------------
@property
def initialExtent(self):
"""gets the initialExtent"""
if self._initialExtent is None:
self.__init()
return self._initialExtent
#----------------------------------------------------------------------
@property
def status(self):
""" returns the service status """
if self._status is None:
self.__init()
return self._status
#----------------------------------------------------------------------
@property
def securityHandler(self):
""" returns the current security handler """
return self._securityHandler
#----------------------------------------------------------------------
@securityHandler.setter
def securityHandler(self, value):
""" sets the security handler """
if isinstance(value, security.AGOLTokenSecurityHandler):
self._securityHandler = value
else:
raise AttributeError("This object only accepts security.AGOLTokenSecurityHandler")
#----------------------------------------------------------------------
def refresh(self, serviceDefinition=True):
"""
The refresh operation refreshes a service, which clears the web
server cache for the service.
"""
url = self._url + "/MapServer/refresh"
params = {
"f" : "json",
"serviceDefinition" : serviceDefinition
}
res = self._post(url=self._url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
self.__init()
return res
#----------------------------------------------------------------------
def cancelJob(self, jobId):
"""
The cancel job operation supports cancelling a job while update
tiles is running from a hosted feature service. The result of this
operation is a response indicating success or failure with error
code and description.
Inputs:
jobId - jobId to cancel
"""
url = self._url + "/jobs/%s/cancel" % jobId
params = {
"f" : "json"
}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def jobStatistics(self, jobId):
"""
The delete job operation supports deleting a job from a hosted map
service. The result of this operation is a response indicating
success or failure with error code and description.
"""
url = self._url + "/jobs/%s" % jobId
params = {
"f" : "json"
}
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def editTileService(self,
serviceDefinition=None,
minScale=None,
maxScale=None,
sourceItemId=None,
exportTilesAllowed=False,
maxExportTileCount=100000):
"""
This post operation updates a Tile Service's properties
Inputs:
serviceDefinition - updates a service definition
minScale - sets the services minimum scale for caching
maxScale - sets the service's maximum scale for caching
sourceItemId - The Source Item ID is the GeoWarehouse Item ID of the map service
exportTilesAllowed - sets the value to let users export tiles
maxExportTileCount - sets the maximum amount of tiles to be exported
from a single call.
"""
params = {
"f" : "json",
}
if not serviceDefinition is None:
params["serviceDefinition"] = serviceDefinition
if not minScale is None:
params['minScale'] = float(minScale)
if not maxScale is None:
params['maxScale'] = float(maxScale)
if not sourceItemId is None:
params["sourceItemId"] = sourceItemId
if not exportTilesAllowed is None:
params["exportTilesAllowed"] = exportTilesAllowed
if not maxExportTileCount is None:
params["maxExportTileCount"] = int(maxExportTileCount)
url = self._url + "/edit"
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._securityHandler.proxy_url,
proxy_port=self._securityHandler.proxy_port)
########################################################################
class AdminFeatureService(BaseAGOLClass):
"""
A feature service can contain datasets (e.g. tables, views) with and
without a spatial column. Datasets with a spatial column are
considered layers and without a spatial column are considered
tables. A feature service allows clients to query and edit feature
geometry and attributes.
This resource provides basic information about the feature service
including the feature layers and tables that it contains, the
service description, etc. The administrative feature service
resource maintains a set of operations that manage the state and
contents of the service. Note, query and edit operations are not
available via the adminstrative resource.
"""
_url = None
_xssPreventionInfo = None
_size = None
_adminServiceInfo = None
_initialExtent = None
_copyrightText = None
_layers = None
_tables = None
_enableZDefaults = None
_syncCapabilities = None
_capabilities = None
_currentVersion = None
_hasVersionedData = None
_units = None
_supportedQueryFormats = None
_maxRecordCount = None
_allowGeometryUpdates = None
_description = None
_hasStaticData = None
_fullExtent = None
_serviceDescription = None
_editorTrackingInfo = None
_supportsDisconnectedEditing = None
_spatialReference = None
_syncEnabled = None
_dict = None
_json = None
_json_dict = None
_error = None
_serviceItemId = None
_supportsApplyEditsWithGlobalIds = None
#----------------------------------------------------------------------
def __init__(self, url,
securityHandler,
initialize=False,
proxy_url=None,
proxy_port=None):
"""Constructor"""
if url is None:
return
if 'rest/services' in url:
url = url.replace('rest/services', 'rest/admin/services')
self._url = url
if isinstance(securityHandler, BaseSecurityHandler):
if hasattr(securityHandler, 'is_portal'):
if securityHandler.is_portal:
if hasattr(securityHandler, 'portalServerHandler'):
self._securityHandler = securityHandler.portalServerHandler(serverUrl=url)
else:
self._securityHandler = securityHandler
else:
self._securityHandler = securityHandler
else:
self._securityHandler = securityHandler
else:
raise AttributeError("Admin only supports AGOL, ArcGIS, Portal, NTLM, LDAP, PKI and OAuth security handlers")
self._proxy_url = proxy_url
self._proxy_port = proxy_port
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
""" initializes the service """
params = {
"f" : "json",
}
json_dict = self._get(self._url, params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._dict = json_dict
self._json = json.dumps(self._dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k == "layers":
self._layers = []
for lyr in v:
fl = AdminFeatureServiceLayer(url=self._url + "/%s" % lyr['id'],
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
fl.loadAttributes(json_dict = lyr)
self._layers.append(fl)
del fl
del lyr
elif k == "tables":
self._tables = []
for lyr in v:
fl = AdminFeatureServiceLayer(url=self._url + "/%s" % lyr['id'],
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
fl.loadAttributes(json_dict = lyr)
self._tables.append(fl)
del fl
del lyr
elif k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented in AdminFeatureService.")
#----------------------------------------------------------------------
@property
def supportsApplyEditsWithGlobalIds(self):
'''gets the property value for supportsApplyEditsWithGlobalIds'''
if self._supportsApplyEditsWithGlobalIds is None:
self.__init()
return self._supportsApplyEditsWithGlobalIds
#----------------------------------------------------------------------
@property
def serviceItemId(self):
'''gets the property value for serviceItemId'''
if self._serviceItemId is None:
self.__init()
return self._serviceItemId
#----------------------------------------------------------------------
@property
def error(self):
"""gets the error message"""
if self._error is None:
self.__init()
return self._error
#----------------------------------------------------------------------
@property
def securityHandler(self):
""" returns the security handler """
return self._securityHandler
#----------------------------------------------------------------------
@securityHandler.setter
def securityHandler(self, value):
""" sets the security handler """
if isinstance(value, BaseSecurityHandler):
if isinstance(value, (security.AGOLTokenSecurityHandler,security.PortalTokenSecurityHandler,security.ArcGISTokenSecurityHandler)):
self._securityHandler = value
else:
raise AttributeError("Admin only supports security.AGOLTokenSecurityHandler")
#----------------------------------------------------------------------
def __str__(self):
"""return object as string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns the key/value pair of the raw JSON"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def status(self):
""" returns the service status """
uURL = self._url + "/status"
params = {
"f" : "json"
}
return self._get(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
#----------------------------------------------------------------------
def refresh(self):
""" refreshes a service """
params = {"f": "json"}
uURL = self._url + "/refresh"
res = self._get(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self.__init()
return res
#----------------------------------------------------------------------
@property
def xssPreventionInfo(self):
"""returns the xssPreventionInfo information """
if self._xssPreventionInfo is None:
self.__init()
return self._xssPreventionInfo
#----------------------------------------------------------------------
@property
def size(self):
"""returns the size parameter"""
if self._size is None:
self.__init()
return self._size
#----------------------------------------------------------------------
@property
def maxRecordCount(self):
"""returns the max record count"""
if self._maxRecordCount is None:
self.__init()
return self._maxRecordCount
#----------------------------------------------------------------------
@property
def supportedQueryFormats(self):
""""""
if self._supportedQueryFormats is None:
self.__init()
return self._supportedQueryFormats
#----------------------------------------------------------------------
@property
def capabilities(self):
""" returns a list of capabilities """
if self._capabilities is None:
self.__init()
return self._capabilities
#----------------------------------------------------------------------
@property
def description(self):
""" returns the service description """
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def copyrightText(self):
""" returns the copyright text """
if self._copyrightText is None:
self.__init()
return self._copyrightText
#----------------------------------------------------------------------
@property
def spatialReference(self):
""" returns the spatial reference """
if self._spatialReference is None:
self.__init()
return self._spatialReference
#----------------------------------------------------------------------
@property
def initialExtent(self):
""" returns the initial extent of the feature service """
if self._initialExtent is None:
self.__init()
return self._initialExtent
#----------------------------------------------------------------------
@property
def fullExtent(self):
""" returns the full extent of the feature service """
if self._fullExtent is None:
self.__init()
return self._fullExtent
#----------------------------------------------------------------------
@property
def allowGeometryUpdates(self):
""" informs the user if the data allows geometry updates """
if self._allowGeometryUpdates is None:
self.__init()
return self._allowGeometryUpdates
#----------------------------------------------------------------------
@property
def units(self):
""" returns the measurement unit """
if self._units is None:
self.__init()
return self._units
#----------------------------------------------------------------------
@property
def syncEnabled(self):
""" informs the user if sync of data can be performed """
if self._syncEnabled is None:
self.__init()
return self._syncEnabled
#----------------------------------------------------------------------
@property
def syncCapabilities(self):
""" type of sync that can be performed """
if self._syncCapabilities is None:
self.__init()
return self._syncCapabilities
#----------------------------------------------------------------------
@property
def editorTrackingInfo(self):
""""""
if self._editorTrackingInfo is None:
self.__init()
return self._editorTrackingInfo
#----------------------------------------------------------------------
@property
def hasStaticData(self):
""""""
if self._hasStaticData is None:
self.__init()
return self._hasStaticData
#----------------------------------------------------------------------
@property
def currentVersion(self):
""" returns the map service current version """
if self._currentVersion is None:
self.__init()
return self._currentVersion
#----------------------------------------------------------------------
@property
def serviceDescription(self):
""" returns the serviceDescription of the map service """
if self._serviceDescription is None:
self.__init()
return self._serviceDescription
#----------------------------------------------------------------------
@property
def hasVersionedData(self):
""" returns boolean for versioned data """
if self._hasVersionedData is None:
self.__init()
return self._hasVersionedData
#----------------------------------------------------------------------
@property
def supportsDisconnectedEditing(self):
""" returns boolean is disconnecting editted supported """
if self._supportsDisconnectedEditing is None:
self.__init()
return self._supportsDisconnectedEditing
#----------------------------------------------------------------------
@property
def adminServiceInfo(self):
""" returns the admin service information"""
if self._adminServiceInfo is None:
self.__init()
return self._adminServiceInfo
#----------------------------------------------------------------------
@property
def layers(self):
""" returns the layers for a service """
if self._layers is None:
self.__init()
return self._layers
#----------------------------------------------------------------------
@property
def tables(self):
""" returns the layers for a service """
if self._tables is None:
self.__init()
return self._tables
#----------------------------------------------------------------------
@property
def enableZDefaults(self):
""" returns the layers for a service """
if self._enableZDefaults is None:
self.__init()
return self._enableZDefaults
#----------------------------------------------------------------------
@property
def asDictionary(self):
""" returns the feature service as a dictionary object """
if self._dict is None:
self.__init()
return self._dict
#----------------------------------------------------------------------
@property
def url(self):
""" returns boolean is disconnecting editted supported """
if self._url is None:
return None
return self._url
#----------------------------------------------------------------------
@property
def json(self):
""" returns boolean is disconnecting editted supported """
if self._dict is None:
self.__init()
return self._dict
#----------------------------------------------------------------------
def addToDefinition(self, json_dict):
"""
The addToDefinition operation supports adding a definition
property to a hosted feature service. The result of this
operation is a response indicating success or failure with error
code and description.
This function will allow users to change add additional values
to an already published service.
Input:
json_dict - part to add to host service. The part format can
be derived from the asDictionary property. For
layer level modifications, run updates on each
individual feature service layer object.
Output:
JSON message as dictionary
"""
params = {
"f" : "json",
"addToDefinition" : json.dumps(json_dict),
"async" : False
}
uURL = self._url + "/addToDefinition"
res = self._post(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self.refresh()
return res
#----------------------------------------------------------------------
def updateDefinition(self, json_dict):
"""
The updateDefinition operation supports updating a definition
property in a hosted feature service. The result of this
operation is a response indicating success or failure with error
code and description.
Input:
json_dict - part to add to host service. The part format can
be derived from the asDictionary property. For
layer level modifications, run updates on each
individual feature service layer object.
Output:
JSON Message as dictionary
"""
definition = None
if json_dict is not None:
if isinstance(json_dict,collections.OrderedDict) == True:
definition = json_dict
else:
definition = collections.OrderedDict()
if 'hasStaticData' in json_dict:
definition['hasStaticData'] = json_dict['hasStaticData']
if 'allowGeometryUpdates' in json_dict:
definition['allowGeometryUpdates'] = json_dict['allowGeometryUpdates']
if 'capabilities' in json_dict:
definition['capabilities'] = json_dict['capabilities']
if 'editorTrackingInfo' in json_dict:
definition['editorTrackingInfo'] = collections.OrderedDict()
if 'enableEditorTracking' in json_dict['editorTrackingInfo']:
definition['editorTrackingInfo']['enableEditorTracking'] = json_dict['editorTrackingInfo']['enableEditorTracking']
if 'enableOwnershipAccessControl' in json_dict['editorTrackingInfo']:
definition['editorTrackingInfo']['enableOwnershipAccessControl'] = json_dict['editorTrackingInfo']['enableOwnershipAccessControl']
if 'allowOthersToUpdate' in json_dict['editorTrackingInfo']:
definition['editorTrackingInfo']['allowOthersToUpdate'] = json_dict['editorTrackingInfo']['allowOthersToUpdate']
if 'allowOthersToDelete' in json_dict['editorTrackingInfo']:
definition['editorTrackingInfo']['allowOthersToDelete'] = json_dict['editorTrackingInfo']['allowOthersToDelete']
if 'allowOthersToQuery' in json_dict['editorTrackingInfo']:
definition['editorTrackingInfo']['allowOthersToQuery'] = json_dict['editorTrackingInfo']['allowOthersToQuery']
if isinstance(json_dict['editorTrackingInfo'],dict):
for k,v in json_dict['editorTrackingInfo'].items():
if k not in definition['editorTrackingInfo']:
definition['editorTrackingInfo'][k] = v
if isinstance(json_dict,dict):
for k,v in json_dict.items():
if k not in definition:
definition[k] = v
params = {
"f" : "json",
"updateDefinition" : json.dumps(obj=definition,separators=(',', ':')),
"async" : False
}
uURL = self._url + "/updateDefinition"
res = self._post(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self.refresh()
return res
#----------------------------------------------------------------------
def deleteFromDefinition(self, json_dict):
"""
The deleteFromDefinition operation supports deleting a
definition property from a hosted feature service. The result of
this operation is a response indicating success or failure with
error code and description.
See: http://resources.arcgis.com/en/help/arcgis-rest-api/index.html#/Delete_From_Definition_Feature_Service/02r30000021w000000/
for additional information on this function.
Input:
json_dict - part to add to host service. The part format can
be derived from the asDictionary property. For
layer level modifications, run updates on each
individual feature service layer object. Only
include the items you want to remove from the
FeatureService or layer.
Output:
JSON Message as dictionary
"""
params = {
"f" : "json",
"deleteFromDefinition" : json.dumps(json_dict),
"async" : False
}
uURL = self._url + "/deleteFromDefinition"
res = self._post(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self.refresh()
return res
########################################################################
class AdminFeatureServiceLayer(BaseAGOLClass):
"""
The layer resource represents a single feature layer or a non
spatial table in a feature service. A feature layer is a table or
view with at least one spatial column.
For tables, it provides basic information about the table such as
its id, name, fields, types and templates.
For feature layers, in addition to the table information above, it
provides information such as its geometry type, min and max scales,
and spatial reference.
Each type includes information about the type such as the type id,
name, and definition expression. Sub-types also include a default
symbol and a list of feature templates.
Each feature template includes a template name, description and a
prototypical feature.
The property supportsRollbackOnFailures will be true to indicate the
support for transactional edits.
The property maxRecordCount returns the maximum number of records
that will be returned at once for a query.
The property capabilities returns Query, Create, Delete, Update, and
Editing capabilities. The Editing capability will be included if
Create, Delete or Update is enabled for a Feature Service.
Note, query and edit operations are not available on a layer in the
adminstrative view.
"""
_editFieldsInfo = None
_drawingInfo = None
_typeIdField = None
_advancedQueryCapabilities = None
_supportsRollbackOnFailureParameter = None
_globalIdField = None
_supportsAdvancedQueries = None
_id = None
_relationships = None
_capabilities = None
_indexes = None
_currentVersion = None
_geometryType = None
_hasStaticData = None
_type = None
_supportedQueryFormats = None
_isDataVersioned = None
_allowGeometryUpdates = None
_description = None
_defaultVisibility = None
_extent = None
_objectIdField = None
_htmlPopupType = None
_types = None
_hasM = None
_displayField = None
_name = None
_templates = None
_supportsStatistics = None
_hasAttachments = None
_fields = None
_maxScale = None
_copyrightText = None
_hasZ = None
_maxRecordCount = None
_minScale = None
_drawingInfo = None
_typeIdField = None
_advancedQueryCapabilities = None
_supportsRollbackOnFailureParameter = None
_globalIdField = None
_supportsAdvancedQueries = None
_id = None
_relationships = None
_capabilities = None
_indexes = None
_currentVersion = None
_geometryType = None
_hasStaticData = None
_type = None
_supportedQueryFormats = None
_isDataVersioned = None
_allowGeometryUpdates = None
_description = None
_defaultVisibility = None
_extent = None
_objectIdField = None
_htmlPopupType = None
_types = None
_hasM = None
_displayField = None
_name = None
_templates = None
_supportsStatistics = None
_hasAttachments = None
_fields = None
_maxScale = None
_copyrightText = None
_hasZ = None
_maxRecordCount = None
_minScale = None
_definitionExpression = None
_parentLayer = None
_subLayers = None
_effectiveMinScale = None
_effectiveMaxScale = None
_timeInfo = None
_canModifyLayer = None
_hasLabels = None
_canScaleSymbols = None
_ownershipBasedAccessControlForFeatures = None
_adminLayerInfo = None
_supportsAttachmentsByUploadId = None
_editingInfo = None
_supportsCalculate = None
_supportsValidateSql = None
_supportsCoordinatesQuantization = None
_json = None
_json_dict = None
_error = None
_adminLayerInfo = None
_syncCanReturnChanges = None
_dateFieldsTimeReference = None
_enableZDefaults = None
_ogcGeometryType = None
_exceedsLimitFactor = None
_useStandardizedQueries = None
_definitionQuery = None
_zDefault = None
_supportsApplyEditsWithGlobalIds = None
_supportsValidateSQL = None
_serviceItemId = None
_standardMaxRecordCount = None
_tileMaxRecordCount = None
_maxRecordCountFactor = None
#----------------------------------------------------------------------
def __init__(self, url,
securityHandler,
initialize=False,
proxy_url=None,
proxy_port=None):
"""Constructor"""
self._url = url
self._proxy_url = proxy_url
self._proxy_port = proxy_port
if isinstance(securityHandler, BaseSecurityHandler):
if hasattr(securityHandler, 'is_portal'):
if securityHandler.is_portal:
if hasattr(securityHandler, 'portalServerHandler'):
self._securityHandler = securityHandler.portalServerHandler(serverUrl=url)
else:
self._securityHandler = securityHandler
else:
self._securityHandler = securityHandler
else:
self._securityHandler = securityHandler
else:
raise AttributeError("Admin only supports AGOL, ArcGIS, Portal, NTLM, LDAP, PKI and OAuth security handlers")
if initialize:
self.__init()
#----------------------------------------------------------------------
def __str__(self):
"""returns the object as a string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns the key/value pair of the raw JSON"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.items():
yield [k,v]
#----------------------------------------------------------------------
@property
def supportsValidateSQL (self):
""" returns the current security handler """
return self._supportsValidateSQL
#----------------------------------------------------------------------
@property
def serviceItemId(self):
""" returns the current security handler """
return self._serviceItemId
#----------------------------------------------------------------------
@property
def securityHandler(self):
""" returns the current security handler """
return self._securityHandler
#----------------------------------------------------------------------
@securityHandler.setter
def securityHandler(self, value):
""" sets the security handler """
if isinstance(value,( security.AGOLTokenSecurityHandler,security.PortalTokenSecurityHandler,security.ArcGISTokenSecurityHandler)):
self._securityHandler = value
else:
raise AttributeError("This object only accepts security.AGOLTokenSecurityHandler")
#----------------------------------------------------------------------
def __init(self):
""" initializes the service """
params = {
"f" : "json",
}
json_dict = self._get(self._url, params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json = json.dumps(json_dict)
self._json_dict = json_dict
self.loadAttributes(json_dict=json_dict)
#----------------------------------------------------------------------
def loadAttributes(self,json_dict):
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.items():
if k in attributes:
setattr(self, "_"+ k, json_dict[k])
else:
print( k, " - attribute not implemented AdminFeatureServiceLayer.")
del k, v
#----------------------------------------------------------------------
def refresh(self):
""" refreshes a service """
params = {"f": "json"}
uURL = self._url + "/refresh"
res = self._get(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self.__init()
return res
#----------------------------------------------------------------------
@property
def standardMaxRecordCount(self):
'''gets the property value for standardMaxRecordCount'''
if self._standardMaxRecordCount is None:
self.__init()
return self._standardMaxRecordCount
#----------------------------------------------------------------------
@property
def tileMaxRecordCount(self):
'''gets the property value for tileMaxRecordCount'''
if self._tileMaxRecordCount is None:
self.__init()
return self._tileMaxRecordCount
#----------------------------------------------------------------------
@property
def maxRecordCountFactor(self):
'''gets the property value for maxRecordCountFactor'''
if self._maxRecordCountFactor is None:
self.__init()
return self._maxRecordCountFactor
#----------------------------------------------------------------------
@property
def supportsApplyEditsWithGlobalIds(self):
'''gets the property value for supportsApplyEditsWithGlobalIds'''
if self._supportsApplyEditsWithGlobalIds is None:
self.__init()
return self._supportsApplyEditsWithGlobalIds
#----------------------------------------------------------------------
@property
def supportsValidateSql(self):
"""gets the support validate sql value"""
if self._supportsValidateSql is None:
self.__init()
return self._supportsValidateSql
#----------------------------------------------------------------------
@property
def error(self):
"""returns error message if error occurs"""
if self._error is None:
self.__init()
return self._error
#----------------------------------------------------------------------
@property
def supportsCoordinatesQuantization(self):
"""gets the supportsCoordinatesQuantization value"""
if self._supportsCoordinatesQuantization is None:
self.__init()
return self._supportsCoordinatesQuantization
#----------------------------------------------------------------------
@property
def editFieldsInfo(self):
""" returns the edit fields information """
if self._editFieldsInfo is None:
self.__init()
return self._editFieldsInfo
#----------------------------------------------------------------------
@property
def advancedQueryCapabilities(self):
""" returns the advanced query capabilities """
if self._advancedQueryCapabilities is None:
self.__init()
return self._advancedQueryCapabilities
#----------------------------------------------------------------------
@property
def supportsRollbackOnFailureParameter(self):
""" returns if rollback on failure supported """
if self._supportsRollbackOnFailureParameter is None:
self.__init()
return self._supportsRollbackOnFailureParameter
#----------------------------------------------------------------------
@property
def hasStaticData(self):
"""boolean T/F if static data is present """
if self._hasStaticData is None:
self.__init()
return self._hasStaticData
#----------------------------------------------------------------------
@property
def indexes(self):
"""gets the indexes"""
if self._indexes is None:
self.__init()
return self._indexes
#----------------------------------------------------------------------
@property
def templates(self):
""" gets the template """
if self._templates is None:
self.__init()
return self._templates
#----------------------------------------------------------------------
@property
def allowGeometryUpdates(self):
""" returns boolean if geometry updates are allowed """
if self._allowGeometryUpdates is None:
self.__init()
return self._allowGeometryUpdates
#----------------------------------------------------------------------
@property
def globalIdField(self):
""" returns the global id field """
if self._globalIdField is None:
self.__init()
return self._globalIdField
#----------------------------------------------------------------------
@property
def objectIdField(self):
if self._objectIdField is None:
self.__init()
return self._objectIdField
#----------------------------------------------------------------------
@property
def currentVersion(self):
""" returns the current version """
if self._currentVersion is None:
self.__init()
return self._currentVersion
#----------------------------------------------------------------------
@property
def id(self):
""" returns the id """
if self._id is None:
self.__init()
return self._id
#----------------------------------------------------------------------
@property
def name(self):
""" returns the name """
if self._name is None:
self.__init()
return self._name
#----------------------------------------------------------------------
@property
def type(self):
""" returns the type """
if self._type is None:
self.__init()
return self._type
#----------------------------------------------------------------------
@property
def description(self):
""" returns the layer's description """
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def definitionExpression(self):
"""returns the definitionExpression"""
if self._definitionExpression is None:
self.__init()
return self._definitionExpression
#----------------------------------------------------------------------
@property
def geometryType(self):
"""returns the geometry type"""
if self._geometryType is None:
self.__init()
return self._geometryType
#----------------------------------------------------------------------
@property
def hasZ(self):
""" returns if it has a Z value or not """
if self._hasZ is None:
self.__init()
return self._hasZ
#----------------------------------------------------------------------
@property
def hasM(self):
""" returns if it has a m value or not """
if self._hasM is None:
self.__init()
return self._hasM
#----------------------------------------------------------------------
@property
def copyrightText(self):
""" returns the copyright text """
if self._copyrightText is None:
self.__init()
return self._copyrightText
#----------------------------------------------------------------------
@property
def parentLayer(self):
""" returns information about the parent """
if self._parentLayer is None:
self.__init()
return self._parentLayer
#----------------------------------------------------------------------
@property
def subLayers(self):
""" returns sublayers for layer """
if self._subLayers is None:
self.__init()
return self._subLayers
#----------------------------------------------------------------------
@property
def minScale(self):
""" minimum scale layer will show """
if self._minScale is None:
self.__init()
return self._minScale
#----------------------------------------------------------------------
@property
def maxScale(self):
""" sets the max scale """
if self._maxScale is None:
self.__init()
return self._maxScale
#----------------------------------------------------------------------
@property
def effectiveMinScale(self):
if self._effectiveMinScale is None:
self.__init()
return self._effectiveMinScale
#----------------------------------------------------------------------
@property
def effectiveMaxScale(self):
if self._effectiveMaxScale is None:
self.__init()
return self._effectiveMaxScale
#----------------------------------------------------------------------
@property
def defaultVisibility(self):
if self._defaultVisibility is None:
self.__init()
return self._defaultVisibility
#----------------------------------------------------------------------
@property
def extent(self):
if self._extent is None:
self.__init()
return self._extent
#----------------------------------------------------------------------
@property
def timeInfo(self):
if self._timeInfo is None:
self.__init()
return self._timeInfo
#----------------------------------------------------------------------
@property
def drawingInfo(self):
if self._drawingInfo is None:
self.__init()
return self._drawingInfo
#----------------------------------------------------------------------
@property
def hasAttachments(self):
if self._hasAttachments is None:
self.__init()
return self._hasAttachments
#----------------------------------------------------------------------
@property
def htmlPopupType(self):
if self._htmlPopupType is None:
self.__init()
return self._htmlPopupType
#----------------------------------------------------------------------
@property
def displayField(self):
if self._displayField is None:
self.__init()
return self._displayField
#----------------------------------------------------------------------
@property
def typeIdField(self):
if self._typeIdField is None:
self.__init()
return self._typeIdField
#----------------------------------------------------------------------
@property
def fields(self):
if self._fields is None:
self.__init()
return self._fields
#----------------------------------------------------------------------
@property
def types(self):
if self._types is None:
self.__init()
return self._types
#----------------------------------------------------------------------
@property
def relationships(self):
if self._relationships is None:
self.__init()
return self._relationships
#----------------------------------------------------------------------
@property
def maxRecordCount(self):
if self._maxRecordCount is None:
self.__init()
if self._maxRecordCount is None:
self._maxRecordCount = 1000
return self._maxRecordCount
#----------------------------------------------------------------------
@property
def canModifyLayer(self):
if self._canModifyLayer is None:
self.__init()
return self._canModifyLayer
#----------------------------------------------------------------------
@property
def supportsStatistics(self):
if self._supportsStatistics is None:
self.__init()
return self._supportsStatistics
#----------------------------------------------------------------------
@property
def supportsAdvancedQueries(self):
if self._supportsAdvancedQueries is None:
self.__init()
return self._supportsAdvancedQueries
#----------------------------------------------------------------------
@property
def hasLabels(self):
if self._hasLabels is None:
self.__init()
return self._hasLabels
#----------------------------------------------------------------------
@property
def canScaleSymbols(self):
if self._canScaleSymbols is None:
self.__init()
return self._canScaleSymbols
#----------------------------------------------------------------------
@property
def capabilities(self):
if self._capabilities is None:
self.__init()
return self._capabilities
#----------------------------------------------------------------------
@property
def supportedQueryFormats(self):
if self._supportedQueryFormats is None:
self.__init()
return self._supportedQueryFormats
#----------------------------------------------------------------------
@property
def isDataVersioned(self):
if self._isDataVersioned is None:
self.__init()
return self._isDataVersioned
#----------------------------------------------------------------------
@property
def supportsCalculate(self):
"""gets the supportsCalculate value"""
if self._supportsCalculate is None:
self.__init()
return self._supportsCalculate
#----------------------------------------------------------------------
@property
def editingInfo(self):
"""gets the editingInfo value"""
if self._editingInfo is None:
self.__init()
return self._editingInfo
#----------------------------------------------------------------------
@property
def supportsAttachmentsByUploadId(self):
"""gets the supportsAttachmentsByUploadId value"""
if self._supportsAttachmentsByUploadId is None:
self.__init()
return self._supportsAttachmentsByUploadId
#----------------------------------------------------------------------
@property
def ownershipBasedAccessControlForFeatures(self):
if self._ownershipBasedAccessControlForFeatures is None:
self.__init()
return self._ownershipBasedAccessControlForFeatures
#----------------------------------------------------------------------
@property
def useStandardizedQueries(self):
if self._useStandardizedQueries is None:
self.__init()
return self._useStandardizedQueries
#----------------------------------------------------------------------
@property
def adminLayerInfo(self):
if self._adminLayerInfo is None:
self.__init()
return self._adminLayerInfo
#----------------------------------------------------------------------
@property
def syncCanReturnChanges(self):
if self._syncCanReturnChanges is None:
self.__init()
return self._syncCanReturnChanges
#----------------------------------------------------------------------
@property
def dateFieldsTimeReference(self):
if self._dateFieldsTimeReference is None:
self.__init()
return self._dateFieldsTimeReference
#----------------------------------------------------------------------
@property
def enableZDefaults(self):
if self._enableZDefaults is None:
self.__init()
return self._enableZDefaults
#----------------------------------------------------------------------
@property
def ogcGeometryType(self):
if self._ogcGeometryType is None:
self.__init()
return self._ogcGeometryType
#----------------------------------------------------------------------
@property
def exceedsLimitFactor(self):
if self._exceedsLimitFactor is None:
self.__init()
return self._exceedsLimitFactor
#----------------------------------------------------------------------
@property
def definitionQuery(self):
if self._definitionQuery is None:
self.__init()
return self._definitionQuery
#----------------------------------------------------------------------
@property
def zDefault(self):
if self.zDefault is None:
self.__init()
return self.zDefault
#----------------------------------------------------------------------
def addToDefinition(self, json_dict):
"""
The addToDefinition operation supports adding a definition
property to a hosted feature service. The result of this
operation is a response indicating success or failure with error
code and description.
This function will allow users to change add additional values
to an already published service.
Input:
json_dict - part to add to host service. The part format can
be derived from the asDictionary property. For
layer level modifications, run updates on each
individual feature service layer object.
Output:
JSON message as dictionary
"""
params = {
"f" : "json",
"addToDefinition" : json.dumps(json_dict),
#"async" : False
}
uURL = self._url + "/addToDefinition"
res = self._post(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self.refresh()
return res
#----------------------------------------------------------------------
def updateDefinition(self, json_dict):
"""
The updateDefinition operation supports updating a definition
property in a hosted feature service. The result of this
operation is a response indicating success or failure with error
code and description.
Input:
json_dict - part to add to host service. The part format can
be derived from the asDictionary property. For
layer level modifications, run updates on each
individual feature service layer object.
Output:
JSON Message as dictionary
"""
params = {
"f" : "json",
"updateDefinition" : json.dumps(json_dict),
"async" : False
}
uURL = self._url + "/updateDefinition"
res = self._post(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self.refresh()
return res
#----------------------------------------------------------------------
def deleteFromDefinition(self, json_dict):
"""
The deleteFromDefinition operation supports deleting a
definition property from a hosted feature service. The result of
this operation is a response indicating success or failure with
error code and description.
See: http://resources.arcgis.com/en/help/arcgis-rest-api/index.html#/Delete_From_Definition_Feature_Service/02r30000021w000000/
for additional information on this function.
Input:
json_dict - part to add to host service. The part format can
be derived from the asDictionary property. For
layer level modifications, run updates on each
individual feature service layer object. Only
include the items you want to remove from the
FeatureService or layer.
Output:
JSON Message as dictionary
"""
params = {
"f" : "json",
"deleteFromDefinition" : json.dumps(json_dict)
}
uURL = self._url + "/deleteFromDefinition"
res = self._post(url=uURL, param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self.refresh()
return res
| {
"content_hash": "b98762d8b62aaa5b53d6564764ce59d4",
"timestamp": "",
"source": "github",
"line_count": 1904,
"max_line_length": 154,
"avg_line_length": 39.14915966386555,
"alnum_prop": 0.46377783740273676,
"repo_name": "DShokes/ArcREST",
"id": "8ed24f48568d3fd9772c0a932cda19b2062be8c6",
"size": "74540",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/arcrest/hostedservice/service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "48383"
},
{
"name": "Python",
"bytes": "2174554"
}
],
"symlink_target": ""
} |
""" Set of classes and methods specific to Philips scanning environments
"""
from __future__ import print_function
from __future__ import division
import os
from os.path import join
import sys
import time
import re
import fnmatch
import glob
import json
import logging
from threading import Thread
from queue import Queue
import numpy as np
import nibabel as nib
import zmq
class Philips_DirStructure():
""" Finding the names and paths of series directories in a Philips scanning
environment
In Philips environments, using the XTC module for exporting data in
real-time, data will be exported to a shared directory that is accessible
from a remote workstation (running Pyneal Scanner). We'll refer to this
shared directory as the session directory, and the `sessionDir` variable
will refer to the full path of that directory.
Within the `sessionDir` each new series will be assigned it's own
subdirectory, which we'll refer to as the series dir (`seriesDir`). The
series dirs will be numbered sequentially in order of creation, starting
with '0000'.
Each `seriesDir` will contain Par/Rec file pairs for each volume in the
series. These file pairs will appear in real-time as the scan progresses.
This class contains methods to retrieve the most recently modified series
directories, as well as monitor for the creation of new series directories,
and stores attributes referring to the directory structure for the current
scanning session.
"""
def __init__(self, scannerSettings):
""" Initialize the class
Parameters
----------
scannerSettings : object
class attributes represent all of the settings unique to the
current scanning environment (many of them read from
`scannerConfig.yaml`)
See Also
--------
general_utils.ScannerSettings
"""
# initialize class attributes
if 'scannerSessionDir' in scannerSettings.allSettings:
self.sessionDir = scannerSettings.allSettings['scannerSessionDir']
else:
print('No scannerSessionDir found in scannerConfig file')
sys.exit()
def print_currentSeries(self):
""" Find all of the series dirs in given sessionDir, and print them
all, along with time since last modification, and directory size
"""
# find the sessionDir, if not already found
if self.sessionDir is None:
self.findSessionDir()
# get a list of all series dirs in the sessionDir
seriesDirs = self._findAllSubdirs(self.sessionDir)
if seriesDirs is not None:
# sort based on modification time
seriesDirs = sorted(seriesDirs, key=lambda x: x[1])
# print directory info to the screen
print('Existing Series Dirs: ')
currentTime = int(time.time())
for s in seriesDirs:
# get the info from this series dir
dirName = s[0].split('/')[-1]
# add to self.seriesDirs
# calculate & format directory size
dirSize = sum([os.path.getsize(join(s[0], f)) for f in os.listdir(s[0])])
if dirSize < 1000:
size_string = '{:5.1f} bytes'.format(dirSize)
elif 1000 <= dirSize < 1000000:
size_string = '{:5.1f} kB'.format(dirSize / 1000)
elif 1000000 <= dirSize:
size_string = '{:5.1f} MB'.format(dirSize / 1000000)
# calculate time (in mins/secs) since it was modified
mTime = s[1]
timeElapsed = currentTime - mTime
m, s = divmod(timeElapsed, 60)
time_string = '{} min, {} s ago'.format(int(m), int(s))
print(' {}\t{}\t{}'.format(dirName, size_string, time_string))
print('\n')
def _findAllSubdirs(self, parentDir):
""" Return a list of all subdirectories within the specified
parentDir, along with the modification time for each
Parameters
----------
parentDir : string
full path to the parent directory you want to search
Returns
-------
subDirs : list
each item in `subDirs` is itself a list containing 2-items for each
subdirectory in the `parentDir`. Each nested list will contain the
path to the subdirectory and the last modification time for that
directory. Thus, `subDirs` is structured like:
[[subDir_path, subDir_modTime]]
"""
subDirs = [join(parentDir, d) for d in os.listdir(parentDir) if os.path.isdir(join(parentDir, d))]
if not subDirs:
subDirs = None
else:
# add the modify time for each directory
subDirs = [[path, os.stat(path).st_mtime] for path in subDirs]
# return the subdirectories
return subDirs
def waitForSeriesDir(self, interval=.1):
""" Listen for the creation of a new series directory.
Once a scan starts, a new series directory will be created
in the `sessionDir`. By the time this function is called, this
class should already have the `sessionDir` defined
Parameters
----------
interval : float, optional
time, in seconds, to wait between polling for a new directory
Returns
-------
seriesDir : string
full path to the newly created directory
"""
startTime = int(time.time()) # tag the start time
keepWaiting = True
while keepWaiting:
# obtain a list of all directories in sessionDir that start with '0'
childDirs = glob.glob(join(self.sessionDir, '0*'))
# loop through all dirs, check modification time
for thisDir in childDirs:
thisDir_mTime = os.path.getmtime(thisDir)
if thisDir_mTime > startTime:
seriesDir = thisDir
keepWaiting = False
break
# pause before searching directories again
time.sleep(interval)
# return the found series directory
return seriesDir
def get_seriesDirs(self):
""" Build a list that contains the directory names of all of the
series directories currently in the `sessionDir`. Set the class
attribute for `seriesDirs`
Returns
-------
seriesDirs : list
list of all series directories (directory names ONLY) found within
the current `sessionDir`
"""
# get a list of all sub dirs in the sessionDir
subDirs = self._findAllSubdirs(self.sessionDir)
if subDirs is not None:
# extract just the dirname from subDirs and append to a list
self.seriesDirs = []
for d in subDirs:
self.seriesDirs.append(d[0].split('/')[-1])
else:
self.seriesDirs = None
return self.seriesDirs
class Philips_BuildNifti():
""" Tools to build a 3D or 4D Nifti image from all of the par/rec file
pairs for a given series
Input is a path to a series directory containing par/rec file pairs for
each volume in the series. Image parameters, like voxel spacing and
dimensions, are obtained automatically from info in the dicom tags
End result is a Nifti1 formatted 3D (anat) or 4D (func) file in RAS+
orientation.
"""
def __init__(self, seriesDir):
""" Initialize class, and set/obtain basic class attributes like file
paths and scan parameters
Parameters
----------
seriesDir : string
full path to the directory containing the raw par/rec file pairs
for each volume in the series
"""
# initialize attributes
self.seriesDir = seriesDir
self.niftiImage = None
# make a list of all of the par files in this dir
par_pattern = re.compile(fnmatch.translate('*.par'), re.IGNORECASE)
parFiles = [x for x in os.listdir(self.seriesDir) if par_pattern.match(x)]
# figure out what type of image this is, 4d or 3d
self.scanType = self._determineScanType(parFiles[0])
# build nifti image
if self.scanType == 'anat':
self.niftiImage = self.buildAnat(parFiles)
elif self.scanType == 'func':
self.niftiImage = self.buildFunc(parFiles)
def buildAnat(self, parFiles):
""" Build a 3D structural/anatomical Nifti image from the par/rec file
On Philip's scanner, entire anat image is assumed to be contained in
single par/rec file pair. Open the par file, extract the image data,
realign to RAS+ and build a nifti object
Parameters
----------
parFiles : list
list containing the file names (file names ONLY, no path) of all
par files in a the series directory. If this is an anatomical image
there should only be a single file in the list
Returns
-------
anatImage_RAS : Nifti1Image
nifti-1 formated image of the 3D anatomical data, oriented in
RAS+
See Also
--------
nibabel.nifti1.Nifti1Image()
"""
# should only be a single parFile in the list
anatImage = nib.load(join(self.seriesDir, parFiles[0]), strict_sort=True)
# convert to RAS+
anatImage_RAS = nib.as_closest_canonical(anatImage)
print('Nifti image dims: {}'.format(anatImage_RAS.shape))
return anatImage_RAS
def buildFunc(self, parFiles):
""" Build a 4D functional image from list of par files
Given a list of `parFiles`, build a 4D functional image from them. For
Philips scanners, there should be a par header file and corresponding
rec image file for each volume in the series. This function will read
each header/image pair and construct a 4D nifti object. The 4D nifti
object contain a voxel array ordered like RAS+ as well the affine
transformation to map between vox and mm space
Parameters
----------
parFiles : list
list containing the file names (file names ONLY, no path) of all
par files to be used in constructing the final nifti image
Returns
-------
funcImage_RAS : Nifti1Image
nifti-1 formated image of the 4D functional data, oriented in
RAS+
See Also
--------
nibabel.nifti1.Nifti1Image()
"""
imageMatrix = None
affine = None
TR = None
### Loop over all of the par files
nVols = len(parFiles)
for par_fname in parFiles:
# build full path to this par file
par_fname = join(self.seriesDir, par_fname)
# make sure there is a corresponding .rec file
if not os.path.isfile(par_fname.replace('.par', '.rec')):
print('No REC file found to match par: {}', par_fname)
### Build the 3d voxel array for this volume and reorder to RAS+
# nibabel will load the par/rec, but there can be multiple images
# (mag, phase, etc...) concatenated into the 4th dimension. Loading
# with the strict_sort option (I think) will make sure the first
# image is the data we want. Extract this data, then reorder the
# voxel array to RAS+
thisVol = nib.load(par_fname, strict_sort=True)
# get the vol index (0-based index) from the acq_nr field of the
# header (1-based index)
volIdx = int(thisVol.header.general_info['acq_nr']) - 1
# set TR
if TR is None:
TR = thisVol.header.general_info['repetition_time'][0]
# convert to RAS+
thisVol_RAS = nib.as_closest_canonical(thisVol)
# construct the imageMatrix if it hasn't been made yet
if imageMatrix is None:
imageMatrix = np.zeros(shape=(thisVol_RAS.shape[0],
thisVol_RAS.shape[1],
thisVol_RAS.shape[2],
nVols), dtype=np.uint16)
# construct the affine if it isn't made yet
if affine is None:
affine = thisVol_RAS.affine
# Add this data to the image matrix
imageMatrix[:, :, :, volIdx] = thisVol_RAS.get_fdata()[:, :, :, 0].astype('uint16')
### Build a Nifti object
funcImage = nib.Nifti1Image(imageMatrix, affine=affine)
# add the correct TR to the header
pixDims = np.array(funcImage.header.get_zooms())
pixDims[3] = TR
funcImage.header.set_zooms(pixDims)
return funcImage
def _determineScanType(self, parFile):
""" Figure out what type of scan this is, anat or func
This tool will determine the scan type from a given par file.
Possible scan types are either single 3D volume (anat), or a 4D dataset
built up of 2D slices (func). The scan type is determined by reading
the `scan_mode` variable within the par header
Parameters
----------
parFile : string
file name (not full path) of par file from the current session that
you would like to open to read the imaging parameters from
Returns
-------
scanType : string
either 'anat' or 'func' depending on scan type stored in dicom tag
"""
# read the parfile
par = nib.load(join(self.seriesDir, parFile), strict_sort=True)
if par.header.general_info['scan_mode'] == '3D':
scanType = 'anat'
elif par.header.general_info['scan_mode'] == '2D':
scanType = 'func'
else:
print('Header is missing a value for the "scan_mode" flag. Assuming FUNC image')
scanType = 'func'
#scanType = input('Please enter "anat" or "func": ')
if scanType not in ['anat', 'func']:
print('invalid scanType: {}'.format(scanType))
sys.exit()
return scanType
def get_scanType(self):
""" Return the scan type """
return self.scanType
def get_niftiImage(self):
""" Return the constructed Nifti Image """
return self.niftiImage
def write_nifti(self, output_path):
""" Write the nifti file to disk
Parameters
----------
outputPath : string
full path, including filename, you want to use to save the nifti
image
"""
nib.save(self.niftiImage, output_path)
print('Image saved at: {}'.format(output_path))
class Philips_monitorSeriesDir(Thread):
""" Class to monitor for new par/rec images to appear in the seriesDir.
This class will run independently in a separate thread, monitoring a
specified directory for the appearance of new par files. Each new par
header file that appears will be added to the Queue for further processing
"""
def __init__(self, seriesDir, parQ, interval=.2):
""" Initialize the class, and set basic class attributes
Parameters
----------
seriesDir : string
full path to the series directory where new dicom files will appear
parQ : object
instance of python queue class to hold new par files before they
have been processed. This class will add items to that queue.
interval : float, optional
time, in seconds, to wait before repolling the seriesDir to check
for any new files
"""
# start the thread upon completion
Thread.__init__(self)
# set up logger
self.logger = logging.getLogger(__name__)
# initialize class parameters
self.interval = interval # interval for polling for new files
self.seriesDir = seriesDir # full path to series directory
self.parQ = parQ # queue to store par header files
self.alive = True # thread status
self.numParsAdded = 0 # counter to keep track of # mosaics
self.queued_par_files = set() # empty set to store names of queued mosaic
self.par_pattern = re.compile(fnmatch.translate('*.par'), re.IGNORECASE)
def run(self):
# function that runs while the Thread is still alive
while self.alive:
# create a set of all par files currently in the series dir
#currentPars = set(glob.glob(join(self.seriesDir, '*.par')))
currentPars = set([join(self.seriesDir, x) for x in os.listdir(self.seriesDir) if self.par_pattern.match(x)])
# grab only the ones that haven't already been added to the queue
newPars = [f for f in currentPars if f not in self.queued_par_files]
# loop over each of the new mosaic files, add each to queue
for f in newPars:
par_fname = join(self.seriesDir, f)
try:
self.parQ.put(par_fname)
except:
self.logger.error('failed on: {}'.format(par_fname))
print(sys.exc_info())
sys.exit()
if len(newPars) > 0:
self.logger.debug('Put {} new par files on the queue'.format(len(newPars)))
self.numParsAdded += len(newPars)
# now update the set of pars added to the queue
self.queued_par_files.update(set(newPars))
# pause
time.sleep(self.interval)
def get_numParsAdded(self):
""" Return the cumulative number of par files added to the queue thus far """
return self.numParsAdded
def stop(self):
""" Set the `alive` flag to False, stopping thread """
self.alive = False
class Philips_processVolume(Thread):
""" Class to process each par header file in the queue.
This class will run in a separate thread. While running, it will pull
'tasks' off of the queue and process each one. Processing each task
involves reading the par header file and the corresponding rec binary file,
extracting the voxel data and relevant header information, reordering it to
RAS+, and then sending the volume and header out over the pynealSocket
"""
def __init__(self, parQ, pynealSocket, interval=.2):
""" Initialize the class
Parameters
----------
parQ : object
instance of python queue class that will store the par header file
names. This class will pull items from that queue.
pynealSocket : object
instance of ZMQ style socket that will be used to communicate with
Pyneal. This class will use this socket to send image data and
headers to Pyneal during the real-time scan.
See also: general_utils.create_pynealSocket()
interval : float, optional
time, in seconds, to wait before repolling the queue to see if
there are any new file names to process
"""
# start the threat upon creation
Thread.__init__(self)
# set up logger
self.logger = logging.getLogger(__name__)
# initialize class parameters
self.parQ = parQ
self.interval = interval # interval between polling queue for new files
self.alive = True
self.pynealSocket = pynealSocket
self.totalProcessed = 0 # counter for total number of slices processed
def run(self):
self.logger.debug('Philips_processVolume started')
# function to run on loop
while self.alive:
# if there are any mosaic files in the queue, process them
if not self.parQ.empty():
numParsInQueue = self.parQ.qsize()
# loop through all mosaics currently in queue & process
for m in range(numParsInQueue):
# retrieve file name from queue
par_fname = self.parQ.get(True, 2)
# ensure the file has copied completely
file_size = 0
while True:
file_info = os.stat(par_fname)
if file_info.st_size == 0 or file_info.st_size > file_size:
file_size = file_info.st_size
else:
break
# process this par file
self.processParFile(par_fname)
# complete this task, thereby clearing it from the queue
self.parQ.task_done()
# log how many were processed
self.totalProcessed += numParsInQueue
self.logger.debug('Processed {} tasks from the queue ({} total)'.format(numParsInQueue, self.totalProcessed))
# pause for a bit
time.sleep(self.interval)
def processParFile(self, par_fname):
""" Process a given par header file
Read the par header file and corresponding rec image file. Read in as
a nifti object that will provide the 3D voxel array for this volume.
Reorder to RAS+, and then send to the pynealSocket
Parameters
----------
par_fname : string
full path to the par file that you want to process
"""
# make sure the corresponding rec file exists
while not os.path.isfile(par_fname.replace('.par', '.rec')):
time.sleep(.01)
### Build the 3D voxel array and reorder to RAS+
# nibabel will load the par/rec, but there can be multiple images (mag,
# phase, etc...) concatenated into the 4th dimension. Loading with the
# strict_sort option (I think) will make sure the first image is the
# data we want. Extract this data, then reorder the voxel array to RAS+
thisVol = nib.load(par_fname, strict_sort=True)
# get the volume index from the acq_nr field of the header (1-based index)
volIdx = int(thisVol.header.general_info['acq_nr']) - 1
self.logger.info('Volume {} processing'.format(volIdx))
# convert to RAS+
thisVol_RAS = nib.as_closest_canonical(thisVol)
# grab the data for the first volume along the 4th dimension
# and store as contiguous array (required for ZMQ)
thisVol_RAS_data = np.ascontiguousarray(thisVol_RAS.get_fdata()[:, :, :, 0].astype('uint16'))
### Create a header with metadata info
volHeader = {
'volIdx': volIdx,
'dtype': str(thisVol_RAS_data.dtype),
'shape': thisVol_RAS_data.shape,
'affine': json.dumps(thisVol_RAS.affine.tolist()),
'TR': str(thisVol.header.general_info['repetition_time'][0])
}
### Send the voxel array and header to the pynealSocket
self.sendVolToPynealSocket(volHeader, thisVol_RAS_data)
def sendVolToPynealSocket(self, volHeader, voxelArray):
""" Send the volume data to Pyneal
Send the image data and header information for the specified volume to
Pyneal via the `pynealSocket`.
Parameters
----------
volHeader : dict
key:value pairs for all of the relevant metadata for this volume
voxelArray : numpy array
3D numpy array of voxel data from the volume, reoriented to RAS+
"""
self.logger.debug('TO pynealSocket: vol {}'.format(volHeader['volIdx']))
### Send data out the socket, listen for response
self.pynealSocket.send_json(volHeader, zmq.SNDMORE) # header as json
self.pynealSocket.send(voxelArray, flags=0, copy=False, track=False)
pynealSocketResponse = self.pynealSocket.recv_string()
# log the success
self.logger.debug('FROM pynealSocket: {}'.format(pynealSocketResponse))
# check if that was the last volume, and if so, stop
if 'STOP' in pynealSocketResponse:
self.stop()
def stop(self):
""" set the `alive` flag to False, stopping the thread """
self.alive = False
def Philips_launch_rtfMRI(scannerSettings, scannerDirs):
""" Launch a real-time session in a Philips environment.
This method should be called from pynealScanner.py before starting the
scanner. Once called, this method will take care of:
- monitoring the sessionDir for a new series directory to appear (and
then returing the name of the new series dir)
- set up the socket connection to send volume data over
- creating a Queue to store newly arriving PAR/REC files
- start a separate thread to monitor the new seriesDir
- start a separate thread to process PAR/RECs that are in the Queue
Parameters
----------
scannerSettings : object
class attributes represent all of the settings unique to the
current scanning environment (many of them read from
`scannerConfig.yaml`). Returned from `general_utils.ScannerSettings()``
scannerDirs : object
instance of `GE_utils.GE_DirStructure`. Has attributes for the relvant
paths for the current session. `scannerDirs` is one of the variables
returned by running `general_utils.initializeSession()`
See Also
--------
general_utils.ScannerSettings()
general_utils.initializeSession()
"""
# Create a reference to the logger. This assumes the logger has already
# been created and customized by pynealScanner.py
logger = logging.getLogger(__name__)
#### SET UP PYNEAL SOCKET (this is what we'll use to
#### send data (e.g. header, slice pixel data) to remote connections)
# figure out host and port number to use
host = scannerSettings.get_pynealSocketHost()
port = scannerSettings.get_pynealSocketPort()
logger.debug('Scanner Socket Host: {}'.format(host))
logger.debug('Scanner Socket Port: {}'.format(port))
# create a socket connection
from .general_utils import create_pynealSocket
pynealSocket = create_pynealSocket(host, port)
logger.debug('Created pynealSocket')
# wait for remote to connect on pynealSocket
logger.info('Connecting to pynealSocket...')
while True:
msg = 'hello from pyneal_scanner '
pynealSocket.send_string(msg)
msgResponse = pynealSocket.recv_string()
if msgResponse == msg:
break
logger.info('pynealSocket connected')
### Wait for a new series directory appear
logger.info('Waiting for new seriesDir...')
seriesDir = scannerDirs.waitForSeriesDir()
logger.info('New Series Directory: {}'.format(seriesDir))
### Start threads to A) watch for new par/rec files, and B) process
# them as they appear
# initialize the par queue to keep store newly arrived
# par header files, and keep track of which have been processed
parQ = Queue()
# create instance of class that will monitor seriesDir. Pass in
# a copy of the par queue. Start the thread going
scanWatcher = Philips_monitorSeriesDir(seriesDir, parQ)
scanWatcher.start()
# create an instance of the class that will grab par/rec files
# from the queue, reformat the data, and pass over the socket
# to pyneal. Start the thread going
volumeProcessor = Philips_processVolume(parQ, pynealSocket)
volumeProcessor.start()
| {
"content_hash": "0f7a23933586d1ee4f89db5e90fc4fcc",
"timestamp": "",
"source": "github",
"line_count": 734,
"max_line_length": 125,
"avg_line_length": 37.98228882833787,
"alnum_prop": 0.6087736288963018,
"repo_name": "jeffmacinnes/pyneal",
"id": "a70ba415577f025bc8e86647e96375747a6f5248",
"size": "27879",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyneal_scanner/utils/Philips_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7821"
},
{
"name": "HTML",
"bytes": "2219"
},
{
"name": "JavaScript",
"bytes": "23040"
},
{
"name": "MATLAB",
"bytes": "286017"
},
{
"name": "Python",
"bytes": "423023"
},
{
"name": "Shell",
"bytes": "1522"
}
],
"symlink_target": ""
} |
from ncclient.operations.third_party.juniper.rpc import *
import json
import unittest
from mock import patch
from ncclient import manager
import ncclient.manager
import ncclient.transport
from ncclient.xml_ import *
from ncclient.operations import RaiseMode
from xml.etree import ElementTree
class TestRPC(unittest.TestCase):
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_command(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = Command(session, device_handler, raise_mode=RaiseMode.ALL)
command = 'show system users'
format = 'text'
obj.request(command=command, format=format)
node = new_ele('command', {'format': format})
node.text = command
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
self.assertEqual(call.text, node.text)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_getconf(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = GetConfiguration(
session,
device_handler,
raise_mode=RaiseMode.ALL)
root_filter = new_ele('filter')
config_filter = sub_ele(root_filter, 'configuration')
system_filter = sub_ele(config_filter, 'system')
obj.request(format='xml', filter=system_filter)
node = new_ele('get-configuration', {'format': 'xml'})
node.append(system_filter)
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
self.assertEqual(call.attrib, node.attrib)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_loadconf_xml(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = LoadConfiguration(
session,
device_handler,
raise_mode=RaiseMode.ALL)
root_config = new_ele('configuration')
system_config = sub_ele(root_config, 'system')
location_config = sub_ele(system_config, 'location')
floor_config = sub_ele(location_config, 'floor').text = "7"
obj.request(format='xml', config=root_config)
node = new_ele('load-configuration', {'format': 'xml', 'action': 'merge'})
node.append(root_config)
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
self.assertEqual(call.attrib, node.attrib)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_loadconf_json(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = LoadConfiguration(
session,
device_handler,
raise_mode=RaiseMode.ALL)
location = '{ "configuration": { "system": { "location": { "floor": "7" }}}}'
config_json = json.loads(location)
config = json.dumps(config_json)
obj.request(format='json', action='merge', config=config)
node = new_ele('load-configuration', {'format': 'json', 'action': 'merge'})
sub_ele(node, 'configuration-json').text = config
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
self.assertEqual(call.attrib, node.attrib)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_loadconf_set(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = LoadConfiguration(
session,
device_handler,
raise_mode=RaiseMode.ALL)
config = 'set system location floor 7'
obj.request(format='text', action='set', config=config)
node = new_ele('load-configuration', {'format': 'text', 'action': 'set'})
sub_ele(node, 'configuration-set').text = config
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
self.assertEqual(call.attrib, node.attrib)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_loadconf_text(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = LoadConfiguration(
session,
device_handler,
raise_mode=RaiseMode.ALL)
config = 'system { location floor 7; }'
obj.request(format='text', action='merge', config=config)
node = new_ele('load-configuration', {'format': 'text', 'action': 'merge'})
sub_ele(node, 'configuration-text').text = config
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
self.assertEqual(call.attrib, node.attrib)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_loadconf_list(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = LoadConfiguration(
session,
device_handler,
raise_mode=RaiseMode.ALL)
config = ['set system location floor 7', 'set system location rack 3']
obj.request(format='text', action='set', config=config)
node = new_ele('load-configuration', {'format': 'text', 'action': 'set'})
sub_ele(node, 'configuration-set').text = '\n'.join(config)
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
self.assertEqual(call.attrib, node.attrib)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_compare_conf(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = CompareConfiguration(
session,
device_handler,
raise_mode=RaiseMode.ALL)
obj.request(rollback=2)
node = new_ele(
'get-configuration', {'compare': 'rollback', 'rollback': str(2)})
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
self.assertEqual(call.attrib, node.attrib)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_execute_rpc(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = ExecuteRpc(session, device_handler, raise_mode=RaiseMode.ALL)
rpc = new_ele('get-software-information')
obj.request(rpc)
mock_request.assert_called_once_with(rpc)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_execute_rpc_str(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = ExecuteRpc(session, device_handler, raise_mode=RaiseMode.ALL)
rpc = 'get-software-information'
obj.request(rpc)
self.assertEqual(True, isinstance(rpc, str))
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_reboot(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = Reboot(session, device_handler, raise_mode=RaiseMode.ALL)
obj.request()
node = new_ele('request-reboot')
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
def test_halt(self, mock_request, mock_session):
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = Halt(session, device_handler, raise_mode=RaiseMode.ALL)
obj.request()
node = new_ele('request-halt')
call = mock_request.call_args_list[0][0][0]
self.assertEqual(call.tag, node.tag)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._assert')
def test_commit_confirmed(self, mock_assert, mock_request, mock_session):
# mock_session.server_capabilities.return_value = [':candidate']
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = Commit(session, device_handler, raise_mode=RaiseMode.ALL)
obj.request(confirmed=True, comment="message", timeout="50")
node = new_ele("commit")
sub_ele(node, "confirmed")
sub_ele(node, "confirm-timeout").text = "50"
sub_ele(node, "log").text = "message"
xml = ElementTree.tostring(node)
call = mock_request.call_args_list[0][0][0]
call = ElementTree.tostring(call)
self.assertEqual(call, xml)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._assert')
def test_commit(self, mock_assert, mock_request, mock_session):
# mock_session.server_capabilities.return_value = [':candidate']
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = Commit(session, device_handler, raise_mode=RaiseMode.ALL)
obj.request()
node = new_ele("commit")
xml = ElementTree.tostring(node)
call = mock_request.call_args_list[0][0][0]
call = ElementTree.tostring(call)
self.assertEqual(call, xml)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._assert')
def test_commit_at_time(self, mock_assert, mock_request, mock_session):
# mock_session.server_capabilities.return_value = [':candidate']
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = Commit(session, device_handler, raise_mode=RaiseMode.ALL)
obj.request(at_time="1111-11-11 00:00:00", synchronize=True)
node = new_ele("commit")
sub_ele(node, "at-time").text = "1111-11-11 00:00:00"
sub_ele(node, "synchronize")
xml = ElementTree.tostring(node)
call = mock_request.call_args_list[0][0][0]
call = ElementTree.tostring(call)
self.assertEqual(call, xml)
@patch('ncclient.transport.SSHSession')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._request')
@patch('ncclient.operations.third_party.juniper.rpc.RPC._assert')
def test_commit_confirmed_at_time(
self, mock_assert, mock_request, mock_session):
# mock_session.server_capabilities.return_value = [':candidate']
device_handler = manager.make_device_handler({'name': 'junos'})
session = ncclient.transport.SSHSession(device_handler)
obj = Commit(session, device_handler, raise_mode=RaiseMode.ALL)
self.assertRaises(NCClientError,
obj.request, at_time="1111-11-11 00:00:00", synchronize=True,
confirmed=True)
| {
"content_hash": "a4f884b254e029377593fcdfb550f9b4",
"timestamp": "",
"source": "github",
"line_count": 258,
"max_line_length": 85,
"avg_line_length": 48.7906976744186,
"alnum_prop": 0.6545916746107404,
"repo_name": "GIC-de/ncclient",
"id": "03e6acede086efbf15e2881f0d207e608708437b",
"size": "12588",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/unit/operations/third_party/juniper/test_rpc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "197205"
}
],
"symlink_target": ""
} |
"""
Multi-class / multi-label utility function
==========================================
"""
from collections.abc import Sequence
from itertools import chain
import warnings
from scipy.sparse import issparse
from scipy.sparse import dok_matrix
from scipy.sparse import lil_matrix
import numpy as np
from .validation import check_array, _assert_all_finite
def _unique_multiclass(y):
if hasattr(y, "__array__"):
return np.unique(np.asarray(y))
else:
return set(y)
def _unique_indicator(y):
return np.arange(
check_array(y, input_name="y", accept_sparse=["csr", "csc", "coo"]).shape[1]
)
_FN_UNIQUE_LABELS = {
"binary": _unique_multiclass,
"multiclass": _unique_multiclass,
"multilabel-indicator": _unique_indicator,
}
def unique_labels(*ys):
"""Extract an ordered array of unique labels.
We don't allow:
- mix of multilabel and multiclass (single label) targets
- mix of label indicator matrix and anything else,
because there are no explicit labels)
- mix of label indicator matrices of different sizes
- mix of string and integer labels
At the moment, we also don't allow "multiclass-multioutput" input type.
Parameters
----------
*ys : array-likes
Returns
-------
out : ndarray of shape (n_unique_labels,)
An ordered array of unique labels.
Examples
--------
>>> from sklearn.utils.multiclass import unique_labels
>>> unique_labels([3, 5, 5, 5, 7, 7])
array([3, 5, 7])
>>> unique_labels([1, 2, 3, 4], [2, 2, 3, 4])
array([1, 2, 3, 4])
>>> unique_labels([1, 2, 10], [5, 11])
array([ 1, 2, 5, 10, 11])
"""
if not ys:
raise ValueError("No argument has been passed.")
# Check that we don't mix label format
ys_types = set(type_of_target(x) for x in ys)
if ys_types == {"binary", "multiclass"}:
ys_types = {"multiclass"}
if len(ys_types) > 1:
raise ValueError("Mix type of y not allowed, got types %s" % ys_types)
label_type = ys_types.pop()
# Check consistency for the indicator format
if (
label_type == "multilabel-indicator"
and len(
set(
check_array(y, accept_sparse=["csr", "csc", "coo"]).shape[1] for y in ys
)
)
> 1
):
raise ValueError(
"Multi-label binary indicator input with different numbers of labels"
)
# Get the unique set of labels
_unique_labels = _FN_UNIQUE_LABELS.get(label_type, None)
if not _unique_labels:
raise ValueError("Unknown label type: %s" % repr(ys))
ys_labels = set(chain.from_iterable(_unique_labels(y) for y in ys))
# Check that we don't mix string type with number type
if len(set(isinstance(label, str) for label in ys_labels)) > 1:
raise ValueError("Mix of label input types (string and number)")
return np.array(sorted(ys_labels))
def _is_integral_float(y):
return y.dtype.kind == "f" and np.all(y.astype(int) == y)
def is_multilabel(y):
"""Check if ``y`` is in a multilabel format.
Parameters
----------
y : ndarray of shape (n_samples,)
Target values.
Returns
-------
out : bool
Return ``True``, if ``y`` is in a multilabel format, else ```False``.
Examples
--------
>>> import numpy as np
>>> from sklearn.utils.multiclass import is_multilabel
>>> is_multilabel([0, 1, 0, 1])
False
>>> is_multilabel([[1], [0, 2], []])
False
>>> is_multilabel(np.array([[1, 0], [0, 0]]))
True
>>> is_multilabel(np.array([[1], [0], [0]]))
False
>>> is_multilabel(np.array([[1, 0, 0]]))
True
"""
if hasattr(y, "__array__") or isinstance(y, Sequence):
# DeprecationWarning will be replaced by ValueError, see NEP 34
# https://numpy.org/neps/nep-0034-infer-dtype-is-object.html
with warnings.catch_warnings():
warnings.simplefilter("error", np.VisibleDeprecationWarning)
try:
y = np.asarray(y)
except np.VisibleDeprecationWarning:
# dtype=object should be provided explicitly for ragged arrays,
# see NEP 34
y = np.array(y, dtype=object)
if not (hasattr(y, "shape") and y.ndim == 2 and y.shape[1] > 1):
return False
if issparse(y):
if isinstance(y, (dok_matrix, lil_matrix)):
y = y.tocsr()
return (
len(y.data) == 0
or np.unique(y.data).size == 1
and (
y.dtype.kind in "biu"
or _is_integral_float(np.unique(y.data)) # bool, int, uint
)
)
else:
labels = np.unique(y)
return len(labels) < 3 and (
y.dtype.kind in "biu" or _is_integral_float(labels) # bool, int, uint
)
def check_classification_targets(y):
"""Ensure that target y is of a non-regression type.
Only the following target types (as defined in type_of_target) are allowed:
'binary', 'multiclass', 'multiclass-multioutput',
'multilabel-indicator', 'multilabel-sequences'
Parameters
----------
y : array-like
"""
y_type = type_of_target(y, input_name="y")
if y_type not in [
"binary",
"multiclass",
"multiclass-multioutput",
"multilabel-indicator",
"multilabel-sequences",
]:
raise ValueError("Unknown label type: %r" % y_type)
def type_of_target(y, input_name=""):
"""Determine the type of data indicated by the target.
Note that this type is the most specific type that can be inferred.
For example:
* ``binary`` is more specific but compatible with ``multiclass``.
* ``multiclass`` of integers is more specific but compatible with
``continuous``.
* ``multilabel-indicator`` is more specific but compatible with
``multiclass-multioutput``.
Parameters
----------
y : array-like
input_name : str, default=""
The data name used to construct the error message.
.. versionadded:: 1.1.0
Returns
-------
target_type : str
One of:
* 'continuous': `y` is an array-like of floats that are not all
integers, and is 1d or a column vector.
* 'continuous-multioutput': `y` is a 2d array of floats that are
not all integers, and both dimensions are of size > 1.
* 'binary': `y` contains <= 2 discrete values and is 1d or a column
vector.
* 'multiclass': `y` contains more than two discrete values, is not a
sequence of sequences, and is 1d or a column vector.
* 'multiclass-multioutput': `y` is a 2d array that contains more
than two discrete values, is not a sequence of sequences, and both
dimensions are of size > 1.
* 'multilabel-indicator': `y` is a label indicator matrix, an array
of two dimensions with at least two columns, and at most 2 unique
values.
* 'unknown': `y` is array-like but none of the above, such as a 3d
array, sequence of sequences, or an array of non-sequence objects.
Examples
--------
>>> from sklearn.utils.multiclass import type_of_target
>>> import numpy as np
>>> type_of_target([0.1, 0.6])
'continuous'
>>> type_of_target([1, -1, -1, 1])
'binary'
>>> type_of_target(['a', 'b', 'a'])
'binary'
>>> type_of_target([1.0, 2.0])
'binary'
>>> type_of_target([1, 0, 2])
'multiclass'
>>> type_of_target([1.0, 0.0, 3.0])
'multiclass'
>>> type_of_target(['a', 'b', 'c'])
'multiclass'
>>> type_of_target(np.array([[1, 2], [3, 1]]))
'multiclass-multioutput'
>>> type_of_target([[1, 2]])
'multilabel-indicator'
>>> type_of_target(np.array([[1.5, 2.0], [3.0, 1.6]]))
'continuous-multioutput'
>>> type_of_target(np.array([[0, 1], [1, 1]]))
'multilabel-indicator'
"""
valid = (
isinstance(y, Sequence) or issparse(y) or hasattr(y, "__array__")
) and not isinstance(y, str)
if not valid:
raise ValueError(
"Expected array-like (array or non-string sequence), got %r" % y
)
sparse_pandas = y.__class__.__name__ in ["SparseSeries", "SparseArray"]
if sparse_pandas:
raise ValueError("y cannot be class 'SparseSeries' or 'SparseArray'")
if is_multilabel(y):
return "multilabel-indicator"
# DeprecationWarning will be replaced by ValueError, see NEP 34
# https://numpy.org/neps/nep-0034-infer-dtype-is-object.html
with warnings.catch_warnings():
warnings.simplefilter("error", np.VisibleDeprecationWarning)
try:
y = np.asarray(y)
except np.VisibleDeprecationWarning:
# dtype=object should be provided explicitly for ragged arrays,
# see NEP 34
y = np.asarray(y, dtype=object)
# The old sequence of sequences format
try:
if (
not hasattr(y[0], "__array__")
and isinstance(y[0], Sequence)
and not isinstance(y[0], str)
):
raise ValueError(
"You appear to be using a legacy multi-label data"
" representation. Sequence of sequences are no"
" longer supported; use a binary array or sparse"
" matrix instead - the MultiLabelBinarizer"
" transformer can convert to this format."
)
except IndexError:
pass
# Invalid inputs
if y.ndim > 2 or (y.dtype == object and len(y) and not isinstance(y.flat[0], str)):
return "unknown" # [[[1, 2]]] or [obj_1] and not ["label_1"]
if y.ndim == 2 and y.shape[1] == 0:
return "unknown" # [[]]
if y.ndim == 2 and y.shape[1] > 1:
suffix = "-multioutput" # [[1, 2], [1, 2]]
else:
suffix = "" # [1, 2, 3] or [[1], [2], [3]]
# check float and contains non-integer float values
if y.dtype.kind == "f" and np.any(y != y.astype(int)):
# [.1, .2, 3] or [[.1, .2, 3]] or [[1., .2]] and not [1., 2., 3.]
_assert_all_finite(y, input_name=input_name)
return "continuous" + suffix
if (len(np.unique(y)) > 2) or (y.ndim >= 2 and len(y[0]) > 1):
return "multiclass" + suffix # [1, 2, 3] or [[1., 2., 3]] or [[1, 2]]
else:
return "binary" # [1, 2] or [["a"], ["b"]]
def _check_partial_fit_first_call(clf, classes=None):
"""Private helper function for factorizing common classes param logic.
Estimators that implement the ``partial_fit`` API need to be provided with
the list of possible classes at the first call to partial_fit.
Subsequent calls to partial_fit should check that ``classes`` is still
consistent with a previous value of ``clf.classes_`` when provided.
This function returns True if it detects that this was the first call to
``partial_fit`` on ``clf``. In that case the ``classes_`` attribute is also
set on ``clf``.
"""
if getattr(clf, "classes_", None) is None and classes is None:
raise ValueError("classes must be passed on the first call to partial_fit.")
elif classes is not None:
if getattr(clf, "classes_", None) is not None:
if not np.array_equal(clf.classes_, unique_labels(classes)):
raise ValueError(
"`classes=%r` is not the same as on last call "
"to partial_fit, was: %r" % (classes, clf.classes_)
)
else:
# This is the first call to partial_fit
clf.classes_ = unique_labels(classes)
return True
# classes is None and clf.classes_ has already previously been set:
# nothing to do
return False
def class_distribution(y, sample_weight=None):
"""Compute class priors from multioutput-multiclass target data.
Parameters
----------
y : {array-like, sparse matrix} of size (n_samples, n_outputs)
The labels for each example.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights.
Returns
-------
classes : list of size n_outputs of ndarray of size (n_classes,)
List of classes for each column.
n_classes : list of int of size n_outputs
Number of classes in each column.
class_prior : list of size n_outputs of ndarray of size (n_classes,)
Class distribution of each column.
"""
classes = []
n_classes = []
class_prior = []
n_samples, n_outputs = y.shape
if sample_weight is not None:
sample_weight = np.asarray(sample_weight)
if issparse(y):
y = y.tocsc()
y_nnz = np.diff(y.indptr)
for k in range(n_outputs):
col_nonzero = y.indices[y.indptr[k] : y.indptr[k + 1]]
# separate sample weights for zero and non-zero elements
if sample_weight is not None:
nz_samp_weight = sample_weight[col_nonzero]
zeros_samp_weight_sum = np.sum(sample_weight) - np.sum(nz_samp_weight)
else:
nz_samp_weight = None
zeros_samp_weight_sum = y.shape[0] - y_nnz[k]
classes_k, y_k = np.unique(
y.data[y.indptr[k] : y.indptr[k + 1]], return_inverse=True
)
class_prior_k = np.bincount(y_k, weights=nz_samp_weight)
# An explicit zero was found, combine its weight with the weight
# of the implicit zeros
if 0 in classes_k:
class_prior_k[classes_k == 0] += zeros_samp_weight_sum
# If an there is an implicit zero and it is not in classes and
# class_prior, make an entry for it
if 0 not in classes_k and y_nnz[k] < y.shape[0]:
classes_k = np.insert(classes_k, 0, 0)
class_prior_k = np.insert(class_prior_k, 0, zeros_samp_weight_sum)
classes.append(classes_k)
n_classes.append(classes_k.shape[0])
class_prior.append(class_prior_k / class_prior_k.sum())
else:
for k in range(n_outputs):
classes_k, y_k = np.unique(y[:, k], return_inverse=True)
classes.append(classes_k)
n_classes.append(classes_k.shape[0])
class_prior_k = np.bincount(y_k, weights=sample_weight)
class_prior.append(class_prior_k / class_prior_k.sum())
return (classes, n_classes, class_prior)
def _ovr_decision_function(predictions, confidences, n_classes):
"""Compute a continuous, tie-breaking OvR decision function from OvO.
It is important to include a continuous value, not only votes,
to make computing AUC or calibration meaningful.
Parameters
----------
predictions : array-like of shape (n_samples, n_classifiers)
Predicted classes for each binary classifier.
confidences : array-like of shape (n_samples, n_classifiers)
Decision functions or predicted probabilities for positive class
for each binary classifier.
n_classes : int
Number of classes. n_classifiers must be
``n_classes * (n_classes - 1 ) / 2``.
"""
n_samples = predictions.shape[0]
votes = np.zeros((n_samples, n_classes))
sum_of_confidences = np.zeros((n_samples, n_classes))
k = 0
for i in range(n_classes):
for j in range(i + 1, n_classes):
sum_of_confidences[:, i] -= confidences[:, k]
sum_of_confidences[:, j] += confidences[:, k]
votes[predictions[:, k] == 0, i] += 1
votes[predictions[:, k] == 1, j] += 1
k += 1
# Monotonically transform the sum_of_confidences to (-1/3, 1/3)
# and add it with votes. The monotonic transformation is
# f: x -> x / (3 * (|x| + 1)), it uses 1/3 instead of 1/2
# to ensure that we won't reach the limits and change vote order.
# The motivation is to use confidence levels as a way to break ties in
# the votes without switching any decision made based on a difference
# of 1 vote.
transformed_confidences = sum_of_confidences / (
3 * (np.abs(sum_of_confidences) + 1)
)
return votes + transformed_confidences
| {
"content_hash": "ecb43e6b6073966c56dd62f25ca2d054",
"timestamp": "",
"source": "github",
"line_count": 489,
"max_line_length": 88,
"avg_line_length": 33.40286298568507,
"alnum_prop": 0.5817313579037591,
"repo_name": "sergeyf/scikit-learn",
"id": "4e5981042f2777c50026bc96e91999fa1bc0729e",
"size": "16412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sklearn/utils/multiclass.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "42335"
},
{
"name": "C++",
"bytes": "147316"
},
{
"name": "Cython",
"bytes": "718114"
},
{
"name": "Makefile",
"bytes": "1711"
},
{
"name": "Python",
"bytes": "9906683"
},
{
"name": "Shell",
"bytes": "49565"
}
],
"symlink_target": ""
} |
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['rst2pdf.pdfbuilder']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Issue 169'
copyright = u'2009, RA'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'test'
# The full version, including alpha/beta/rc tags.
release = 'test'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Issue169doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Issue169.tex', u'Issue 169 Documentation',
u'RA', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# -- Options for PDF output --------------------------------------------------
# Grouping the document tree into PDF files. List of tuples
# (source start file, target name, title, author).
pdf_documents = [
('index', u'MyProject', u'My Project', u'Author Name'),
]
# A comma-separated list of custom stylesheets. Example:
pdf_stylesheets = ['borland']
# Create a compressed PDF
# Use True/False or 1/0
# Example: compressed=True
#pdf_compressed=False
# A colon-separated list of folders to search for fonts. Example:
# pdf_font_path=['/usr/share/fonts', '/usr/share/texmf-dist/fonts/']
# Language to be used for hyphenation support
pdf_language="en_US"
# If false, no index is generated.
pdf_use_index = False
# If false, no modindex is generated.
pdf_use_modindex = False
# If false, no coverpage is generated.
pdf_use_coverpage = False
pdf_verbosity=0
pdf_invariant = True
| {
"content_hash": "78f76ca72583cd013c42c028a46bf4db",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 80,
"avg_line_length": 31.49767441860465,
"alnum_prop": 0.7058476077968104,
"repo_name": "openpolis/rst2pdf-patched-docutils-0.8",
"id": "53f01159efc80f8b19725f9245af842d92e082f7",
"size": "7192",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "rst2pdf/tests/input/sphinx-issue169/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1113952"
},
{
"name": "Shell",
"bytes": "37941"
}
],
"symlink_target": ""
} |
from src.utility import HMCClientLogger,HTTPClient,HmcHeaders
from src.generated_src import UOM
import xml.etree.ElementTree as etree
log = HMCClientLogger.HMCClientLogger(__name__)
ROOT = "LogicalPartition"
CONTENT_TYPE = "application/vnd.ibm.powervm.uom+xml;type=SRIOVEthernetLogicalPort"
SCHEMA_VER = "V1_3_0"
ADAPTER_ID = 1
PHYSICALPORT_ID = 2
class CreateSRIOVLogicalPort(object):
def __init__(self):
"""
initializes root and content-type
"""
self.root = ROOT
self.content_type = CONTENT_TYPE
def create_sriov_logicalport(self, ip, logicalpartition_uuid, x_api_session):
"""
Creates SRIOV Logical Port for a given LogicaPartition
Args:
ip:ip address of hmc
logicalpartition_uuid : UUID of partition the LoicalPort to be created
x_api_session :session to be used
"""
log.log_debug("starting SRIOV LogicalPort creation")
header_object = HmcHeaders.HmcHeaders("web")
ns = header_object.ns["xmlns"]
sriov_logical_port_object = UOM.SRIOVEthernetLogicalPort()
sriov_logical_port_object.AdapterID = ADAPTER_ID
sriov_logical_port_object.PhysicalPortID = PHYSICALPORT_ID
sriov_logical_port_object.schemaVersion = SCHEMA_VER
xml = sriov_logical_port_object.toxml()
http_object = HTTPClient.HTTPClient("uom", ip, self.root, self.content_type, x_api_session)
http_object.HTTPPut(xml, append = logicalpartition_uuid+"/SRIOVEthernetLogicalPort")
log.log_debug("response of SRIOV logical port creation %s"%(http_object.response))
if http_object.response_b:
print("SRIOV Logical Port created successfully")
else :
root = etree.fromstring(http_object.response.content)
error = root.findall(".//{%s}Message"%(ns))[0]
log.log_error(error.text)
| {
"content_hash": "8f3f8f0c1a25ba12b8e11f68e64c83de",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 99,
"avg_line_length": 42.266666666666666,
"alnum_prop": 0.6703470031545742,
"repo_name": "PowerHMC/HmcRestClient",
"id": "d046d16d2d352664957513ef5c5f0e1306e1c8f8",
"size": "2536",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/logical_partition/sriov_logical_port/CreateSRIOVLogicalPort.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13780311"
}
],
"symlink_target": ""
} |
import re
from pip import __version__
from pip.commands.show import search_packages_info
def test_show(script):
"""
Test end to end test for show command.
"""
result = script.pip('show', 'pip')
lines = result.stdout.split('\n')
assert len(lines) == 6
assert lines[0] == '---', lines[0]
assert lines[1] == 'Name: pip', lines[1]
assert lines[2] == 'Version: %s' % __version__, lines[2]
assert lines[3].startswith('Location: '), lines[3]
assert lines[4] == 'Requires: '
def test_show_with_files_not_found(script):
"""
Test for show command with installed files listing enabled and
installed-files.txt not found.
"""
result = script.pip('show', '-f', 'pip')
lines = result.stdout.split('\n')
assert len(lines) == 8
assert lines[0] == '---', lines[0]
assert lines[1] == 'Name: pip', lines[1]
assert lines[2] == 'Version: %s' % __version__, lines[2]
assert lines[3].startswith('Location: '), lines[3]
assert lines[4] == 'Requires: '
assert lines[5] == 'Files:', lines[4]
assert lines[6] == 'Cannot locate installed-files.txt', lines[5]
def test_show_with_all_files(script):
"""
Test listing all files in the show command.
"""
result = script.pip('install', 'initools==0.2')
result = script.pip('show', '--files', 'initools')
assert re.search(r"Files:\n( .+\n)+", result.stdout)
def test_missing_argument(script):
"""
Test show command with no arguments.
"""
result = script.pip('show')
assert 'ERROR: Please provide a package name or names.' in result.stdout
def test_find_package_not_found():
"""
Test trying to get info about a nonexistent package.
"""
result = search_packages_info(['abcd3'])
assert len(list(result)) == 0
def test_search_any_case():
"""
Search for a package in any case.
"""
result = list(search_packages_info(['PIP']))
assert len(result) == 1
assert 'pip' == result[0]['name']
def test_more_than_one_package():
"""
Search for more than one package.
"""
result = list(search_packages_info(['Pip', 'pytest', 'Virtualenv']))
assert len(result) == 3
| {
"content_hash": "eaa9a93ff3f0357a3d8cd01229bd4674",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 76,
"avg_line_length": 27.696202531645568,
"alnum_prop": 0.606489945155393,
"repo_name": "minrk/pip",
"id": "cf68e0404faa306bbd3e0a9166ea1109b89d249d",
"size": "2188",
"binary": false,
"copies": "6",
"ref": "refs/heads/develop",
"path": "tests/functional/test_show.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1176781"
},
{
"name": "Shell",
"bytes": "4534"
}
],
"symlink_target": ""
} |
import numpy as np
import math
from libcore import Img
from libcore import RestructuringMethod
def main():
img = Img.load_image('../gletscher.jpg')
# 45 Grad
rotation_matrix = Img.get_2d_rotation_matrix(np.radians(45))
translation_vector = np.array([50, 600])
RestructuringMethod.affine_transform(img,
rotation_matrix,
translation_vector)
if __name__ == "__main__": main() | {
"content_hash": "d0afd4b2af16f49eccb97b46a550d060",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 64,
"avg_line_length": 21.043478260869566,
"alnum_prop": 0.5764462809917356,
"repo_name": "StefanoD/ComputerVision",
"id": "8a61a9f2810498ff3dcfe2f21d96662145ee8a7b",
"size": "484",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Uebung/Uebung1/src/exercise_a.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Matlab",
"bytes": "5661"
},
{
"name": "Python",
"bytes": "94858"
}
],
"symlink_target": ""
} |
from django.http import HttpResponse
from vodkamartiniqa.views.helpers import get_answers
from vodkamartiniqa.views.helpers import get_questions
import json
#from django.core.serializers.json import DjangoJSONEncoder
# TODO get answers via ajax with start and end
def get_answers_ajax(request, question_id, start=0, end=8):
objects = get_answers(question_id, start, end)
answers = []
for object in objects:
submit_date = object.submit_date.strftime("%B %e, %Y")
answers.append({
'id': object.id,
'answer': object.answer,
'user': object.user.username,
'user_picture': object.user.drupaluser.picture,
'votes_up': object.votes_up,
'votes_down': object.votes_down,
'posted_by_expert': object.posted_by_expert,
'submit_date': submit_date,
})
return HttpResponse(json.dumps(answers), mimetype='application/json')
# use DjangoJSONEncoder to pass datetime objects to json
#return HttpResponse(json.dumps(answers, cls=DjangoJSONEncoder), mimetype='application/json')
| {
"content_hash": "9801b405749037740c3f3ff722c0bddb",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 97,
"avg_line_length": 48,
"alnum_prop": 0.5969551282051282,
"repo_name": "alexisbellido/django-vodkamartini-qa",
"id": "a26bfe2236101ab242d44880d2f367ae452e9f10",
"size": "1248",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vodkamartiniqa/views/answers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "4320"
},
{
"name": "Python",
"bytes": "146082"
}
],
"symlink_target": ""
} |
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test_db.sqlite'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.request',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
)
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap3',
'sorl.thumbnail',
'millionmilestogether',
)
ROOT_URLCONF = 'tests.urls'
# Using these instead of the default setting speeds test runs up by
# about 75%.
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
)
STATIC_URL = '/fakestatictrees/'
MEDIA_ROOT = 'tests/testmedia/'
TIME_ZONE = 'Europe/London'
LANGUAGE_CODE = 'en-gb'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
USE_TZ = True
SECRET_KEY = 'thisbagismadefromrecycledmaterial'
LOGIN_URL = '/auth/login/'
# Using these instead of the default setting speeds test runs up by
# about 10%.
DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage'
THUMBNAIL_DEBUG = False
THUMBNAIL_DUMMY = True
TEMPLATE_DEBUG = True
AUTH_USER_MODEL = 'millionmilestogether.RegisteredUser'
| {
"content_hash": "2e7c471d475232b909ee9101a2077193",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 67,
"avg_line_length": 27.253521126760564,
"alnum_prop": 0.7250645994832041,
"repo_name": "dominicrodger/millionmilestogether",
"id": "ae91c48b80461f6d1aefb2d9b2a7f69282a947dc",
"size": "1935",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/settings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "5430"
},
{
"name": "JavaScript",
"bytes": "5691"
},
{
"name": "Makefile",
"bytes": "8249"
},
{
"name": "Python",
"bytes": "194236"
},
{
"name": "Shell",
"bytes": "6467"
}
],
"symlink_target": ""
} |
"""Redirect middleware for Django CMS."""
from urlparse import urlparse
from cms_redirects.models import CMSRedirect
from django import http
from django.conf import settings
class RedirectMiddleware(object):
"""Middleware for handling redirects."""
def get_possible_paths(self, parsed_path):
"""Get a list of possible url paths to look for."""
# Get the usable url parts
path = parsed_path.path
# We'll always lookup the exact path
possible_paths = [path]
# Sometimes people define urls without the trailing slash when
# settings.APPEND_SLASH is True.
if settings.APPEND_SLASH and path.endswith('/'):
possible_paths.append(path[:-1])
return possible_paths
def get_query(self, parsed_path):
"""Get and format query parameters."""
if parsed_path.query:
query = parsed_path.query
else:
query = ''
return query
def get_cms_redirect(self, possible_paths):
"""Get the latest redirect for the specified path."""
try:
redirect = CMSRedirect.objects.filter(
site__id__exact=settings.SITE_ID,
old_path__in=possible_paths
).latest('pk')
except CMSRedirect.DoesNotExist:
redirect = None
return redirect
def get_cms_redirect_response_class(self, redirect):
"""Get the appropriate redirect class."""
if int(redirect.response_code) == 302:
return http.HttpResponseRedirect
else:
return http.HttpResponsePermanentRedirect
def cms_redirect(self, redirect, query):
"""Returns the response object."""
if not redirect.page and not redirect.new_path:
return http.HttpResponseGone()
response_class = self.get_cms_redirect_response_class(redirect)
if redirect.page:
if query:
query = '?{query}'.format(query=query)
redirect_to = '%s%s' % (redirect.page.get_absolute_url(), query)
else:
if query and '?' in redirect.new_path:
query = '&{query}'.format(query=query)
elif query:
query = '?{query}'.format(query=query)
redirect_to = '%s%s' % (redirect.new_path, query)
return response_class(redirect_to)
def process_exception(self, request, exception):
"""Handle 404 exceptions and check for redirects."""
if not isinstance(exception, http.Http404):
return
parsed_path = urlparse(request.get_full_path())
possible_paths = self.get_possible_paths(parsed_path)
cms_redirect = self.get_cms_redirect(possible_paths)
if cms_redirect:
query = self.get_query(parsed_path)
return self.cms_redirect(cms_redirect, query)
| {
"content_hash": "b495fab25f686d6023775de78f40a580",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 76,
"avg_line_length": 34.55421686746988,
"alnum_prop": 0.606694560669456,
"repo_name": "hzlf/django-cms-redirects",
"id": "5a23cc4946d8358e133fdd8332871acc2b22e3ac",
"size": "2868",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "cms_redirects/middleware.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "82"
},
{
"name": "Python",
"bytes": "43015"
}
],
"symlink_target": ""
} |
import logging
import os
import platform
import requests
import socket
import fcntl
import struct
logger = logging.getLogger(__name__)
def address_by_route():
logger.debug("Finding address by querying local routing table")
addr = os.popen("/sbin/ip route get 8.8.8.8 | awk '{print $NF;exit}'").read().strip()
logger.debug("Address found: {}".format(addr))
return addr
def address_by_query():
logger.debug("Finding address by querying remote service")
addr = requests.get('https://api.ipify.org').text
logger.debug("Address found: {}".format(addr))
return addr
def address_by_hostname():
logger.debug("Finding address by using local hostname")
addr = platform.node()
logger.debug("Address found: {}".format(addr))
return addr
def address_by_interface(ifname):
"""Returns the IP address of the given interface name, e.g. 'eth0'
Parameters
----------
ifname : str
Name of the interface whose address is to be returned. Required.
Taken from this Stack Overflow answer: https://stackoverflow.com/questions/24196932/how-can-i-get-the-ip-address-of-eth0-in-python#24196955
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', bytes(ifname[:15], 'utf-8'))
)[20:24])
| {
"content_hash": "dfbd8d8ec8ef5593108b981dc2294d68",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 143,
"avg_line_length": 28.770833333333332,
"alnum_prop": 0.6734250543084721,
"repo_name": "swift-lang/swift-e-lab",
"id": "8caa00b1f51733bdc256a090cef49f1bceb3f0fe",
"size": "1381",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "parsl/addresses.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "59197"
},
{
"name": "Python",
"bytes": "104539"
},
{
"name": "Shell",
"bytes": "1283"
}
],
"symlink_target": ""
} |
"""Miscellaneous utility functions for use with Swift."""
from __future__ import print_function
import errno
import fcntl
import grp
import hmac
import json
import operator
import os
import pwd
import re
import sys
import time
import uuid
import functools
import email.parser
from hashlib import md5, sha1
from random import random, shuffle
from contextlib import contextmanager, closing
import ctypes
import ctypes.util
from optparse import OptionParser
from tempfile import mkstemp, NamedTemporaryFile
import glob
import itertools
import stat
import datetime
import eventlet
import eventlet.semaphore
from eventlet import GreenPool, sleep, Timeout, tpool, greenthread, \
greenio, event
from eventlet.green import socket, threading
import eventlet.queue
import netifaces
import codecs
utf8_decoder = codecs.getdecoder('utf-8')
utf8_encoder = codecs.getencoder('utf-8')
import six
from six.moves import cPickle as pickle
from six.moves.configparser import (ConfigParser, NoSectionError,
NoOptionError, RawConfigParser)
from six.moves import range
from six.moves.urllib.parse import ParseResult
from six.moves.urllib.parse import quote as _quote
from six.moves.urllib.parse import urlparse as stdlib_urlparse
from swift import gettext_ as _
import swift.common.exceptions
from swift.common.http import is_success, is_redirection, HTTP_NOT_FOUND, \
HTTP_PRECONDITION_FAILED, HTTP_REQUESTED_RANGE_NOT_SATISFIABLE
if six.PY3:
stdlib_queue = eventlet.patcher.original('queue')
else:
stdlib_queue = eventlet.patcher.original('Queue')
stdlib_threading = eventlet.patcher.original('threading')
# logging doesn't import patched as cleanly as one would like
from logging.handlers import SysLogHandler
import logging
logging.thread = eventlet.green.thread
logging.threading = eventlet.green.threading
logging._lock = logging.threading.RLock()
# setup notice level logging
NOTICE = 25
logging.addLevelName(NOTICE, 'NOTICE')
SysLogHandler.priority_map['NOTICE'] = 'notice'
# These are lazily pulled from libc elsewhere
_sys_fallocate = None
_posix_fadvise = None
_libc_socket = None
_libc_bind = None
_libc_accept = None
# If set to non-zero, fallocate routines will fail based on free space
# available being at or below this amount, in bytes.
FALLOCATE_RESERVE = 0
# Used by hash_path to offer a bit more security when generating hashes for
# paths. It simply appends this value to all paths; guessing the hash a path
# will end up with would also require knowing this suffix.
HASH_PATH_SUFFIX = ''
HASH_PATH_PREFIX = ''
SWIFT_CONF_FILE = '/etc/swift/swift.conf'
# These constants are Linux-specific, and Python doesn't seem to know
# about them. We ask anyway just in case that ever gets fixed.
#
# The values were copied from the Linux 3.0 kernel headers.
AF_ALG = getattr(socket, 'AF_ALG', 38)
F_SETPIPE_SZ = getattr(fcntl, 'F_SETPIPE_SZ', 1031)
class InvalidHashPathConfigError(ValueError):
def __str__(self):
return "[swift-hash]: both swift_hash_path_suffix and " \
"swift_hash_path_prefix are missing from %s" % SWIFT_CONF_FILE
def validate_hash_conf():
global HASH_PATH_SUFFIX
global HASH_PATH_PREFIX
if not HASH_PATH_SUFFIX and not HASH_PATH_PREFIX:
hash_conf = ConfigParser()
if hash_conf.read(SWIFT_CONF_FILE):
try:
HASH_PATH_SUFFIX = hash_conf.get('swift-hash',
'swift_hash_path_suffix')
except (NoSectionError, NoOptionError):
pass
try:
HASH_PATH_PREFIX = hash_conf.get('swift-hash',
'swift_hash_path_prefix')
except (NoSectionError, NoOptionError):
pass
if not HASH_PATH_SUFFIX and not HASH_PATH_PREFIX:
raise InvalidHashPathConfigError()
try:
validate_hash_conf()
except InvalidHashPathConfigError:
# could get monkey patched or lazy loaded
pass
def get_hmac(request_method, path, expires, key):
"""
Returns the hexdigest string of the HMAC-SHA1 (RFC 2104) for
the request.
:param request_method: Request method to allow.
:param path: The path to the resource to allow access to.
:param expires: Unix timestamp as an int for when the URL
expires.
:param key: HMAC shared secret.
:returns: hexdigest str of the HMAC-SHA1 for the request.
"""
return hmac.new(
key, '%s\n%s\n%s' % (request_method, expires, path), sha1).hexdigest()
# Used by get_swift_info and register_swift_info to store information about
# the swift cluster.
_swift_info = {}
_swift_admin_info = {}
def get_swift_info(admin=False, disallowed_sections=None):
"""
Returns information about the swift cluster that has been previously
registered with the register_swift_info call.
:param admin: boolean value, if True will additionally return an 'admin'
section with information previously registered as admin
info.
:param disallowed_sections: list of section names to be withheld from the
information returned.
:returns: dictionary of information about the swift cluster.
"""
disallowed_sections = disallowed_sections or []
info = dict(_swift_info)
for section in disallowed_sections:
key_to_pop = None
sub_section_dict = info
for sub_section in section.split('.'):
if key_to_pop:
sub_section_dict = sub_section_dict.get(key_to_pop, {})
if not isinstance(sub_section_dict, dict):
sub_section_dict = {}
break
key_to_pop = sub_section
sub_section_dict.pop(key_to_pop, None)
if admin:
info['admin'] = dict(_swift_admin_info)
info['admin']['disallowed_sections'] = list(disallowed_sections)
return info
def register_swift_info(name='swift', admin=False, **kwargs):
"""
Registers information about the swift cluster to be retrieved with calls
to get_swift_info.
NOTE: Do not use "." in the param: name or any keys in kwargs. "." is used
in the disallowed_sections to remove unwanted keys from /info.
:param name: string, the section name to place the information under.
:param admin: boolean, if True, information will be registered to an
admin section which can optionally be withheld when
requesting the information.
:param kwargs: key value arguments representing the information to be
added.
:raises ValueError: if name or any of the keys in kwargs has "." in it
"""
if name == 'admin' or name == 'disallowed_sections':
raise ValueError('\'{0}\' is reserved name.'.format(name))
if admin:
dict_to_use = _swift_admin_info
else:
dict_to_use = _swift_info
if name not in dict_to_use:
if "." in name:
raise ValueError('Cannot use "." in a swift_info key: %s' % name)
dict_to_use[name] = {}
for key, val in kwargs.items():
if "." in key:
raise ValueError('Cannot use "." in a swift_info key: %s' % key)
dict_to_use[name][key] = val
def backward(f, blocksize=4096):
"""
A generator returning lines from a file starting with the last line,
then the second last line, etc. i.e., it reads lines backwards.
Stops when the first line (if any) is read.
This is useful when searching for recent activity in very
large files.
:param f: file object to read
:param blocksize: no of characters to go backwards at each block
"""
f.seek(0, os.SEEK_END)
if f.tell() == 0:
return
last_row = b''
while f.tell() != 0:
try:
f.seek(-blocksize, os.SEEK_CUR)
except IOError:
blocksize = f.tell()
f.seek(-blocksize, os.SEEK_CUR)
block = f.read(blocksize)
f.seek(-blocksize, os.SEEK_CUR)
rows = block.split(b'\n')
rows[-1] = rows[-1] + last_row
while rows:
last_row = rows.pop(-1)
if rows and last_row:
yield last_row
yield last_row
# Used when reading config values
TRUE_VALUES = set(('true', '1', 'yes', 'on', 't', 'y'))
def config_true_value(value):
"""
Returns True if the value is either True or a string in TRUE_VALUES.
Returns False otherwise.
"""
return value is True or \
(isinstance(value, six.string_types) and value.lower() in TRUE_VALUES)
def config_auto_int_value(value, default):
"""
Returns default if value is None or 'auto'.
Returns value as an int or raises ValueError otherwise.
"""
if value is None or \
(isinstance(value, six.string_types) and value.lower() == 'auto'):
return default
try:
value = int(value)
except (TypeError, ValueError):
raise ValueError('Config option must be an integer or the '
'string "auto", not "%s".' % value)
return value
def append_underscore(prefix):
if prefix and not prefix.endswith('_'):
prefix += '_'
return prefix
def config_read_reseller_options(conf, defaults):
"""
Read reseller_prefix option and associated options from configuration
Reads the reseller_prefix option, then reads options that may be
associated with a specific reseller prefix. Reads options such that an
option without a prefix applies to all reseller prefixes unless an option
has an explicit prefix.
:param conf: the configuration
:param defaults: a dict of default values. The key is the option
name. The value is either an array of strings or a string
:return: tuple of an array of reseller prefixes and a dict of option values
"""
reseller_prefix_opt = conf.get('reseller_prefix', 'AUTH').split(',')
reseller_prefixes = []
for prefix in [pre.strip() for pre in reseller_prefix_opt if pre.strip()]:
if prefix == "''":
prefix = ''
prefix = append_underscore(prefix)
if prefix not in reseller_prefixes:
reseller_prefixes.append(prefix)
if len(reseller_prefixes) == 0:
reseller_prefixes.append('')
# Get prefix-using config options
associated_options = {}
for prefix in reseller_prefixes:
associated_options[prefix] = dict(defaults)
associated_options[prefix].update(
config_read_prefixed_options(conf, '', defaults))
prefix_name = prefix if prefix != '' else "''"
associated_options[prefix].update(
config_read_prefixed_options(conf, prefix_name, defaults))
return reseller_prefixes, associated_options
def config_read_prefixed_options(conf, prefix_name, defaults):
"""
Read prefixed options from configuration
:param conf: the configuration
:param prefix_name: the prefix (including, if needed, an underscore)
:param defaults: a dict of default values. The dict supplies the
option name and type (string or comma separated string)
:return: a dict containing the options
"""
params = {}
for option_name in defaults.keys():
value = conf.get('%s%s' % (prefix_name, option_name))
if value:
if isinstance(defaults.get(option_name), list):
params[option_name] = []
for role in value.lower().split(','):
params[option_name].append(role.strip())
else:
params[option_name] = value.strip()
return params
def noop_libc_function(*args):
return 0
def validate_configuration():
try:
validate_hash_conf()
except InvalidHashPathConfigError as e:
sys.exit("Error: %s" % e)
def load_libc_function(func_name, log_error=True,
fail_if_missing=False):
"""
Attempt to find the function in libc, otherwise return a no-op func.
:param func_name: name of the function to pull from libc.
:param log_error: log an error when a function can't be found
:param fail_if_missing: raise an exception when a function can't be found.
Default behavior is to return a no-op function.
"""
try:
libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
return getattr(libc, func_name)
except AttributeError:
if fail_if_missing:
raise
if log_error:
logging.warn(_("Unable to locate %s in libc. Leaving as a "
"no-op."), func_name)
return noop_libc_function
def generate_trans_id(trans_id_suffix):
return 'tx%s-%010x%s' % (
uuid.uuid4().hex[:21], time.time(), quote(trans_id_suffix))
def get_policy_index(req_headers, res_headers):
"""
Returns the appropriate index of the storage policy for the request from
a proxy server
:param req: dict of the request headers.
:param res: dict of the response headers.
:returns: string index of storage policy, or None
"""
header = 'X-Backend-Storage-Policy-Index'
policy_index = res_headers.get(header, req_headers.get(header))
return str(policy_index) if policy_index is not None else None
def get_log_line(req, res, trans_time, additional_info):
"""
Make a line for logging that matches the documented log line format
for backend servers.
:param req: the request.
:param res: the response.
:param trans_time: the time the request took to complete, a float.
:param additional_info: a string to log at the end of the line
:returns: a properly formated line for logging.
"""
policy_index = get_policy_index(req.headers, res.headers)
return '%s - - [%s] "%s %s" %s %s "%s" "%s" "%s" %.4f "%s" %d %s' % (
req.remote_addr,
time.strftime('%d/%b/%Y:%H:%M:%S +0000', time.gmtime()),
req.method, req.path, res.status.split()[0],
res.content_length or '-', req.referer or '-',
req.headers.get('x-trans-id', '-'),
req.user_agent or '-', trans_time, additional_info or '-',
os.getpid(), policy_index or '-')
def get_trans_id_time(trans_id):
if len(trans_id) >= 34 and \
trans_id.startswith('tx') and trans_id[23] == '-':
try:
return int(trans_id[24:34], 16)
except ValueError:
pass
return None
class FileLikeIter(object):
def __init__(self, iterable):
"""
Wraps an iterable to behave as a file-like object.
"""
self.iterator = iter(iterable)
self.buf = None
self.closed = False
def __iter__(self):
return self
def next(self):
"""
next(x) -> the next value, or raise StopIteration
"""
if self.closed:
raise ValueError('I/O operation on closed file')
if self.buf:
rv = self.buf
self.buf = None
return rv
else:
return next(self.iterator)
def read(self, size=-1):
"""
read([size]) -> read at most size bytes, returned as a string.
If the size argument is negative or omitted, read until EOF is reached.
Notice that when in non-blocking mode, less data than what was
requested may be returned, even if no size parameter was given.
"""
if self.closed:
raise ValueError('I/O operation on closed file')
if size < 0:
return ''.join(self)
elif not size:
chunk = ''
elif self.buf:
chunk = self.buf
self.buf = None
else:
try:
chunk = next(self.iterator)
except StopIteration:
return ''
if len(chunk) > size:
self.buf = chunk[size:]
chunk = chunk[:size]
return chunk
def readline(self, size=-1):
"""
readline([size]) -> next line from the file, as a string.
Retain newline. A non-negative size argument limits the maximum
number of bytes to return (an incomplete line may be returned then).
Return an empty string at EOF.
"""
if self.closed:
raise ValueError('I/O operation on closed file')
data = ''
while '\n' not in data and (size < 0 or len(data) < size):
if size < 0:
chunk = self.read(1024)
else:
chunk = self.read(size - len(data))
if not chunk:
break
data += chunk
if '\n' in data:
data, sep, rest = data.partition('\n')
data += sep
if self.buf:
self.buf = rest + self.buf
else:
self.buf = rest
return data
def readlines(self, sizehint=-1):
"""
readlines([size]) -> list of strings, each a line from the file.
Call readline() repeatedly and return a list of the lines so read.
The optional size argument, if given, is an approximate bound on the
total number of bytes in the lines returned.
"""
if self.closed:
raise ValueError('I/O operation on closed file')
lines = []
while True:
line = self.readline(sizehint)
if not line:
break
lines.append(line)
if sizehint >= 0:
sizehint -= len(line)
if sizehint <= 0:
break
return lines
def close(self):
"""
close() -> None or (perhaps) an integer. Close the file.
Sets data attribute .closed to True. A closed file cannot be used for
further I/O operations. close() may be called more than once without
error. Some kinds of file objects (for example, opened by popen())
may return an exit status upon closing.
"""
self.iterator = None
self.closed = True
class FallocateWrapper(object):
def __init__(self, noop=False):
if noop:
self.func_name = 'posix_fallocate'
self.fallocate = noop_libc_function
return
# fallocate is preferred because we need the on-disk size to match
# the allocated size. Older versions of sqlite require that the
# two sizes match. However, fallocate is Linux only.
for func in ('fallocate', 'posix_fallocate'):
self.func_name = func
self.fallocate = load_libc_function(func, log_error=False)
if self.fallocate is not noop_libc_function:
break
if self.fallocate is noop_libc_function:
logging.warn(_("Unable to locate fallocate, posix_fallocate in "
"libc. Leaving as a no-op."))
def __call__(self, fd, mode, offset, length):
"""The length parameter must be a ctypes.c_uint64."""
if FALLOCATE_RESERVE > 0:
st = os.fstatvfs(fd)
free = st.f_frsize * st.f_bavail - length.value
if free <= FALLOCATE_RESERVE:
raise OSError('FALLOCATE_RESERVE fail %s <= %s' % (
free, FALLOCATE_RESERVE))
args = {
'fallocate': (fd, mode, offset, length),
'posix_fallocate': (fd, offset, length)
}
return self.fallocate(*args[self.func_name])
def disable_fallocate():
global _sys_fallocate
_sys_fallocate = FallocateWrapper(noop=True)
def fallocate(fd, size):
"""
Pre-allocate disk space for a file.
:param fd: file descriptor
:param size: size to allocate (in bytes)
"""
global _sys_fallocate
if _sys_fallocate is None:
_sys_fallocate = FallocateWrapper()
if size < 0:
size = 0
# 1 means "FALLOC_FL_KEEP_SIZE", which means it pre-allocates invisibly
ret = _sys_fallocate(fd, 1, 0, ctypes.c_uint64(size))
err = ctypes.get_errno()
if ret and err not in (0, errno.ENOSYS, errno.EOPNOTSUPP,
errno.EINVAL):
raise OSError(err, 'Unable to fallocate(%s)' % size)
def fsync(fd):
"""
Sync modified file data and metadata to disk.
:param fd: file descriptor
"""
if hasattr(fcntl, 'F_FULLSYNC'):
try:
fcntl.fcntl(fd, fcntl.F_FULLSYNC)
except IOError as e:
raise OSError(e.errno, 'Unable to F_FULLSYNC(%s)' % fd)
else:
os.fsync(fd)
def fdatasync(fd):
"""
Sync modified file data to disk.
:param fd: file descriptor
"""
try:
os.fdatasync(fd)
except AttributeError:
fsync(fd)
def fsync_dir(dirpath):
"""
Sync directory entries to disk.
:param dirpath: Path to the directory to be synced.
"""
dirfd = None
try:
dirfd = os.open(dirpath, os.O_DIRECTORY | os.O_RDONLY)
fsync(dirfd)
except OSError as err:
if err.errno == errno.ENOTDIR:
# Raise error if someone calls fsync_dir on a non-directory
raise
logging.warn(_("Unable to perform fsync() on directory %s: %s"),
dirpath, os.strerror(err.errno))
finally:
if dirfd:
os.close(dirfd)
def drop_buffer_cache(fd, offset, length):
"""
Drop 'buffer' cache for the given range of the given file.
:param fd: file descriptor
:param offset: start offset
:param length: length
"""
global _posix_fadvise
if _posix_fadvise is None:
_posix_fadvise = load_libc_function('posix_fadvise64')
# 4 means "POSIX_FADV_DONTNEED"
ret = _posix_fadvise(fd, ctypes.c_uint64(offset),
ctypes.c_uint64(length), 4)
if ret != 0:
logging.warn("posix_fadvise64(%(fd)s, %(offset)s, %(length)s, 4) "
"-> %(ret)s", {'fd': fd, 'offset': offset,
'length': length, 'ret': ret})
NORMAL_FORMAT = "%016.05f"
INTERNAL_FORMAT = NORMAL_FORMAT + '_%016x'
MAX_OFFSET = (16 ** 16) - 1
PRECISION = 1e-5
# Setting this to True will cause the internal format to always display
# extended digits - even when the value is equivalent to the normalized form.
# This isn't ideal during an upgrade when some servers might not understand
# the new time format - but flipping it to True works great for testing.
FORCE_INTERNAL = False # or True
class Timestamp(object):
"""
Internal Representation of Swift Time.
The normalized form of the X-Timestamp header looks like a float
with a fixed width to ensure stable string sorting - normalized
timestamps look like "1402464677.04188"
To support overwrites of existing data without modifying the original
timestamp but still maintain consistency a second internal offset vector
is append to the normalized timestamp form which compares and sorts
greater than the fixed width float format but less than a newer timestamp.
The internalized format of timestamps looks like
"1402464677.04188_0000000000000000" - the portion after the underscore is
the offset and is a formatted hexadecimal integer.
The internalized form is not exposed to clients in responses from
Swift. Normal client operations will not create a timestamp with an
offset.
The Timestamp class in common.utils supports internalized and
normalized formatting of timestamps and also comparison of timestamp
values. When the offset value of a Timestamp is 0 - it's considered
insignificant and need not be represented in the string format; to
support backwards compatibility during a Swift upgrade the
internalized and normalized form of a Timestamp with an
insignificant offset are identical. When a timestamp includes an
offset it will always be represented in the internalized form, but
is still excluded from the normalized form. Timestamps with an
equivalent timestamp portion (the float part) will compare and order
by their offset. Timestamps with a greater timestamp portion will
always compare and order greater than a Timestamp with a lesser
timestamp regardless of it's offset. String comparison and ordering
is guaranteed for the internalized string format, and is backwards
compatible for normalized timestamps which do not include an offset.
"""
def __init__(self, timestamp, offset=0, delta=0):
"""
Create a new Timestamp.
:param timestamp: time in seconds since the Epoch, may be any of:
* a float or integer
* normalized/internalized string
* another instance of this class (offset is preserved)
:param offset: the second internal offset vector, an int
:param delta: deca-microsecond difference from the base timestamp
param, an int
"""
if isinstance(timestamp, six.string_types):
parts = timestamp.split('_', 1)
self.timestamp = float(parts.pop(0))
if parts:
self.offset = int(parts[0], 16)
else:
self.offset = 0
else:
self.timestamp = float(timestamp)
self.offset = getattr(timestamp, 'offset', 0)
# increment offset
if offset >= 0:
self.offset += offset
else:
raise ValueError('offset must be non-negative')
if self.offset > MAX_OFFSET:
raise ValueError('offset must be smaller than %d' % MAX_OFFSET)
self.raw = int(round(self.timestamp / PRECISION))
# add delta
if delta:
self.raw = self.raw + delta
if self.raw <= 0:
raise ValueError(
'delta must be greater than %d' % (-1 * self.raw))
self.timestamp = float(self.raw * PRECISION)
def __repr__(self):
return INTERNAL_FORMAT % (self.timestamp, self.offset)
def __str__(self):
raise TypeError('You must specify which string format is required')
def __float__(self):
return self.timestamp
def __int__(self):
return int(self.timestamp)
def __nonzero__(self):
return bool(self.timestamp or self.offset)
def __bool__(self):
return self.__nonzero__()
@property
def normal(self):
return NORMAL_FORMAT % self.timestamp
@property
def internal(self):
if self.offset or FORCE_INTERNAL:
return INTERNAL_FORMAT % (self.timestamp, self.offset)
else:
return self.normal
@property
def isoformat(self):
isoformat = datetime.datetime.utcfromtimestamp(
float(self.normal)).isoformat()
# python isoformat() doesn't include msecs when zero
if len(isoformat) < len("1970-01-01T00:00:00.000000"):
isoformat += ".000000"
return isoformat
def __eq__(self, other):
if not isinstance(other, Timestamp):
other = Timestamp(other)
return self.internal == other.internal
def __ne__(self, other):
if not isinstance(other, Timestamp):
other = Timestamp(other)
return self.internal != other.internal
def __cmp__(self, other):
if not isinstance(other, Timestamp):
other = Timestamp(other)
return cmp(self.internal, other.internal)
def normalize_timestamp(timestamp):
"""
Format a timestamp (string or numeric) into a standardized
xxxxxxxxxx.xxxxx (10.5) format.
Note that timestamps using values greater than or equal to November 20th,
2286 at 17:46 UTC will use 11 digits to represent the number of
seconds.
:param timestamp: unix timestamp
:returns: normalized timestamp as a string
"""
return Timestamp(timestamp).normal
EPOCH = datetime.datetime(1970, 1, 1)
def last_modified_date_to_timestamp(last_modified_date_str):
"""
Convert a last modified date (like you'd get from a container listing,
e.g. 2014-02-28T23:22:36.698390) to a float.
"""
start = datetime.datetime.strptime(last_modified_date_str,
'%Y-%m-%dT%H:%M:%S.%f')
delta = start - EPOCH
# TODO(sam): after we no longer support py2.6, this expression can
# simplify to Timestamp(delta.total_seconds()).
#
# This calculation is based on Python 2.7's Modules/datetimemodule.c,
# function delta_to_microseconds(), but written in Python.
return Timestamp(delta.days * 86400 +
delta.seconds +
delta.microseconds / 1000000.0)
def normalize_delete_at_timestamp(timestamp):
"""
Format a timestamp (string or numeric) into a standardized
xxxxxxxxxx (10) format.
Note that timestamps less than 0000000000 are raised to
0000000000 and values greater than November 20th, 2286 at
17:46:39 UTC will be capped at that date and time, resulting in
no return value exceeding 9999999999.
This cap is because the expirer is already working through a
sorted list of strings that were all a length of 10. Adding
another digit would mess up the sort and cause the expirer to
break from processing early. By 2286, this problem will need to
be fixed, probably by creating an additional .expiring_objects
account to work from with 11 (or more) digit container names.
:param timestamp: unix timestamp
:returns: normalized timestamp as a string
"""
return '%010d' % min(max(0, float(timestamp)), 9999999999)
def mkdirs(path):
"""
Ensures the path is a directory or makes it if not. Errors if the path
exists but is a file or on permissions failure.
:param path: path to create
"""
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError as err:
if err.errno != errno.EEXIST or not os.path.isdir(path):
raise
def makedirs_count(path, count=0):
"""
Same as os.makedirs() except that this method returns the number of
new directories that had to be created.
Also, this does not raise an error if target directory already exists.
This behaviour is similar to Python 3.x's os.makedirs() called with
exist_ok=True. Also similar to swift.common.utils.mkdirs()
https://hg.python.org/cpython/file/v3.4.2/Lib/os.py#l212
"""
head, tail = os.path.split(path)
if not tail:
head, tail = os.path.split(head)
if head and tail and not os.path.exists(head):
count = makedirs_count(head, count)
if tail == os.path.curdir:
return
try:
os.mkdir(path)
except OSError as e:
# EEXIST may also be raised if path exists as a file
# Do not let that pass.
if e.errno != errno.EEXIST or not os.path.isdir(path):
raise
else:
count += 1
return count
def renamer(old, new, fsync=True):
"""
Attempt to fix / hide race conditions like empty object directories
being removed by backend processes during uploads, by retrying.
The containing directory of 'new' and of all newly created directories are
fsync'd by default. This _will_ come at a performance penalty. In cases
where these additional fsyncs are not necessary, it is expected that the
caller of renamer() turn it off explicitly.
:param old: old path to be renamed
:param new: new path to be renamed to
:param fsync: fsync on containing directory of new and also all
the newly created directories.
"""
dirpath = os.path.dirname(new)
try:
count = makedirs_count(dirpath)
os.rename(old, new)
except OSError:
count = makedirs_count(dirpath)
os.rename(old, new)
if fsync:
# If count=0, no new directories were created. But we still need to
# fsync leaf dir after os.rename().
# If count>0, starting from leaf dir, fsync parent dirs of all
# directories created by makedirs_count()
for i in range(0, count + 1):
fsync_dir(dirpath)
dirpath = os.path.dirname(dirpath)
def split_path(path, minsegs=1, maxsegs=None, rest_with_last=False):
"""
Validate and split the given HTTP request path.
**Examples**::
['a'] = split_path('/a')
['a', None] = split_path('/a', 1, 2)
['a', 'c'] = split_path('/a/c', 1, 2)
['a', 'c', 'o/r'] = split_path('/a/c/o/r', 1, 3, True)
:param path: HTTP Request path to be split
:param minsegs: Minimum number of segments to be extracted
:param maxsegs: Maximum number of segments to be extracted
:param rest_with_last: If True, trailing data will be returned as part
of last segment. If False, and there is
trailing data, raises ValueError.
:returns: list of segments with a length of maxsegs (non-existent
segments will return as None)
:raises: ValueError if given an invalid path
"""
if not maxsegs:
maxsegs = minsegs
if minsegs > maxsegs:
raise ValueError('minsegs > maxsegs: %d > %d' % (minsegs, maxsegs))
if rest_with_last:
segs = path.split('/', maxsegs)
minsegs += 1
maxsegs += 1
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs or
'' in segs[1:minsegs]):
raise ValueError('Invalid path: %s' % quote(path))
else:
minsegs += 1
maxsegs += 1
segs = path.split('/', maxsegs)
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs + 1 or
'' in segs[1:minsegs] or
(count == maxsegs + 1 and segs[maxsegs])):
raise ValueError('Invalid path: %s' % quote(path))
segs = segs[1:maxsegs]
segs.extend([None] * (maxsegs - 1 - len(segs)))
return segs
def validate_device_partition(device, partition):
"""
Validate that a device and a partition are valid and won't lead to
directory traversal when used.
:param device: device to validate
:param partition: partition to validate
:raises: ValueError if given an invalid device or partition
"""
if not device or '/' in device or device in ['.', '..']:
raise ValueError('Invalid device: %s' % quote(device or ''))
if not partition or '/' in partition or partition in ['.', '..']:
raise ValueError('Invalid partition: %s' % quote(partition or ''))
class RateLimitedIterator(object):
"""
Wrap an iterator to only yield elements at a rate of N per second.
:param iterable: iterable to wrap
:param elements_per_second: the rate at which to yield elements
:param limit_after: rate limiting kicks in only after yielding
this many elements; default is 0 (rate limit
immediately)
"""
def __init__(self, iterable, elements_per_second, limit_after=0):
self.iterator = iter(iterable)
self.elements_per_second = elements_per_second
self.limit_after = limit_after
self.running_time = 0
def __iter__(self):
return self
def next(self):
if self.limit_after > 0:
self.limit_after -= 1
else:
self.running_time = ratelimit_sleep(self.running_time,
self.elements_per_second)
return next(self.iterator)
class GreenthreadSafeIterator(object):
"""
Wrap an iterator to ensure that only one greenthread is inside its next()
method at a time.
This is useful if an iterator's next() method may perform network IO, as
that may trigger a greenthread context switch (aka trampoline), which can
give another greenthread a chance to call next(). At that point, you get
an error like "ValueError: generator already executing". By wrapping calls
to next() with a mutex, we avoid that error.
"""
def __init__(self, unsafe_iterable):
self.unsafe_iter = iter(unsafe_iterable)
self.semaphore = eventlet.semaphore.Semaphore(value=1)
def __iter__(self):
return self
def next(self):
with self.semaphore:
return next(self.unsafe_iter)
class NullLogger(object):
"""A no-op logger for eventlet wsgi."""
def write(self, *args):
# "Logs" the args to nowhere
pass
class LoggerFileObject(object):
def __init__(self, logger, log_type='STDOUT'):
self.logger = logger
self.log_type = log_type
def write(self, value):
value = value.strip()
if value:
if 'Connection reset by peer' in value:
self.logger.error(
_('%s: Connection reset by peer'), self.log_type)
else:
self.logger.error(_('%s: %s'), self.log_type, value)
def writelines(self, values):
self.logger.error(_('%s: %s'), self.log_type, '#012'.join(values))
def close(self):
pass
def flush(self):
pass
def __iter__(self):
return self
def next(self):
raise IOError(errno.EBADF, 'Bad file descriptor')
def read(self, size=-1):
raise IOError(errno.EBADF, 'Bad file descriptor')
def readline(self, size=-1):
raise IOError(errno.EBADF, 'Bad file descriptor')
def tell(self):
return 0
def xreadlines(self):
return self
class StatsdClient(object):
def __init__(self, host, port, base_prefix='', tail_prefix='',
default_sample_rate=1, sample_rate_factor=1, logger=None):
self._host = host
self._port = port
self._base_prefix = base_prefix
self.set_prefix(tail_prefix)
self._default_sample_rate = default_sample_rate
self._sample_rate_factor = sample_rate_factor
self._target = (self._host, self._port)
self.random = random
self.logger = logger
def set_prefix(self, new_prefix):
if new_prefix and self._base_prefix:
self._prefix = '.'.join([self._base_prefix, new_prefix, ''])
elif new_prefix:
self._prefix = new_prefix + '.'
elif self._base_prefix:
self._prefix = self._base_prefix + '.'
else:
self._prefix = ''
def _send(self, m_name, m_value, m_type, sample_rate):
if sample_rate is None:
sample_rate = self._default_sample_rate
sample_rate = sample_rate * self._sample_rate_factor
parts = ['%s%s:%s' % (self._prefix, m_name, m_value), m_type]
if sample_rate < 1:
if self.random() < sample_rate:
parts.append('@%s' % (sample_rate,))
else:
return
# Ideally, we'd cache a sending socket in self, but that
# results in a socket getting shared by multiple green threads.
with closing(self._open_socket()) as sock:
try:
return sock.sendto('|'.join(parts), self._target)
except IOError as err:
if self.logger:
self.logger.warn(
'Error sending UDP message to %r: %s',
self._target, err)
def _open_socket(self):
return socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def update_stats(self, m_name, m_value, sample_rate=None):
return self._send(m_name, m_value, 'c', sample_rate)
def increment(self, metric, sample_rate=None):
return self.update_stats(metric, 1, sample_rate)
def decrement(self, metric, sample_rate=None):
return self.update_stats(metric, -1, sample_rate)
def timing(self, metric, timing_ms, sample_rate=None):
return self._send(metric, timing_ms, 'ms', sample_rate)
def timing_since(self, metric, orig_time, sample_rate=None):
return self.timing(metric, (time.time() - orig_time) * 1000,
sample_rate)
def transfer_rate(self, metric, elapsed_time, byte_xfer, sample_rate=None):
if byte_xfer:
return self.timing(metric,
elapsed_time * 1000 / byte_xfer * 1000,
sample_rate)
def server_handled_successfully(status_int):
"""
True for successful responses *or* error codes that are not Swift's fault,
False otherwise. For example, 500 is definitely the server's fault, but
412 is an error code (4xx are all errors) that is due to a header the
client sent.
If one is tracking error rates to monitor server health, one would be
advised to use a function like this one, lest a client cause a flurry of
404s or 416s and make a spurious spike in your errors graph.
"""
return (is_success(status_int) or
is_redirection(status_int) or
status_int == HTTP_NOT_FOUND or
status_int == HTTP_PRECONDITION_FAILED or
status_int == HTTP_REQUESTED_RANGE_NOT_SATISFIABLE)
def timing_stats(**dec_kwargs):
"""
Returns a decorator that logs timing events or errors for public methods in
swift's wsgi server controllers, based on response code.
"""
def decorating_func(func):
method = func.func_name
@functools.wraps(func)
def _timing_stats(ctrl, *args, **kwargs):
start_time = time.time()
resp = func(ctrl, *args, **kwargs)
if server_handled_successfully(resp.status_int):
ctrl.logger.timing_since(method + '.timing',
start_time, **dec_kwargs)
else:
ctrl.logger.timing_since(method + '.errors.timing',
start_time, **dec_kwargs)
return resp
return _timing_stats
return decorating_func
# double inheritance to support property with setter
class LogAdapter(logging.LoggerAdapter, object):
"""
A Logger like object which performs some reformatting on calls to
:meth:`exception`. Can be used to store a threadlocal transaction id and
client ip.
"""
_cls_thread_local = threading.local()
def __init__(self, logger, server):
logging.LoggerAdapter.__init__(self, logger, {})
self.server = server
setattr(self, 'warn', self.warning)
@property
def txn_id(self):
if hasattr(self._cls_thread_local, 'txn_id'):
return self._cls_thread_local.txn_id
@txn_id.setter
def txn_id(self, value):
self._cls_thread_local.txn_id = value
@property
def client_ip(self):
if hasattr(self._cls_thread_local, 'client_ip'):
return self._cls_thread_local.client_ip
@client_ip.setter
def client_ip(self, value):
self._cls_thread_local.client_ip = value
@property
def thread_locals(self):
return (self.txn_id, self.client_ip)
@thread_locals.setter
def thread_locals(self, value):
self.txn_id, self.client_ip = value
def getEffectiveLevel(self):
return self.logger.getEffectiveLevel()
def process(self, msg, kwargs):
"""
Add extra info to message
"""
kwargs['extra'] = {'server': self.server, 'txn_id': self.txn_id,
'client_ip': self.client_ip}
return msg, kwargs
def notice(self, msg, *args, **kwargs):
"""
Convenience function for syslog priority LOG_NOTICE. The python
logging lvl is set to 25, just above info. SysLogHandler is
monkey patched to map this log lvl to the LOG_NOTICE syslog
priority.
"""
self.log(NOTICE, msg, *args, **kwargs)
def _exception(self, msg, *args, **kwargs):
logging.LoggerAdapter.exception(self, msg, *args, **kwargs)
def exception(self, msg, *args, **kwargs):
_junk, exc, _junk = sys.exc_info()
call = self.error
emsg = ''
if isinstance(exc, (OSError, socket.error)):
if exc.errno in (errno.EIO, errno.ENOSPC):
emsg = str(exc)
elif exc.errno == errno.ECONNREFUSED:
emsg = _('Connection refused')
elif exc.errno == errno.EHOSTUNREACH:
emsg = _('Host unreachable')
elif exc.errno == errno.ETIMEDOUT:
emsg = _('Connection timeout')
else:
call = self._exception
elif isinstance(exc, eventlet.Timeout):
emsg = exc.__class__.__name__
if hasattr(exc, 'seconds'):
emsg += ' (%ss)' % exc.seconds
if isinstance(exc, swift.common.exceptions.MessageTimeout):
if exc.msg:
emsg += ' %s' % exc.msg
else:
call = self._exception
call('%s: %s' % (msg, emsg), *args, **kwargs)
def set_statsd_prefix(self, prefix):
"""
The StatsD client prefix defaults to the "name" of the logger. This
method may override that default with a specific value. Currently used
in the proxy-server to differentiate the Account, Container, and Object
controllers.
"""
if self.logger.statsd_client:
self.logger.statsd_client.set_prefix(prefix)
def statsd_delegate(statsd_func_name):
"""
Factory to create methods which delegate to methods on
self.logger.statsd_client (an instance of StatsdClient). The
created methods conditionally delegate to a method whose name is given
in 'statsd_func_name'. The created delegate methods are a no-op when
StatsD logging is not configured.
:param statsd_func_name: the name of a method on StatsdClient.
"""
func = getattr(StatsdClient, statsd_func_name)
@functools.wraps(func)
def wrapped(self, *a, **kw):
if getattr(self.logger, 'statsd_client'):
return func(self.logger.statsd_client, *a, **kw)
return wrapped
update_stats = statsd_delegate('update_stats')
increment = statsd_delegate('increment')
decrement = statsd_delegate('decrement')
timing = statsd_delegate('timing')
timing_since = statsd_delegate('timing_since')
transfer_rate = statsd_delegate('transfer_rate')
class SwiftLogFormatter(logging.Formatter):
"""
Custom logging.Formatter will append txn_id to a log message if the
record has one and the message does not. Optionally it can shorten
overly long log lines.
"""
def __init__(self, fmt=None, datefmt=None, max_line_length=0):
logging.Formatter.__init__(self, fmt=fmt, datefmt=datefmt)
self.max_line_length = max_line_length
def format(self, record):
if not hasattr(record, 'server'):
# Catch log messages that were not initiated by swift
# (for example, the keystone auth middleware)
record.server = record.name
# Included from Python's logging.Formatter and then altered slightly to
# replace \n with #012
record.message = record.getMessage()
if self._fmt.find('%(asctime)') >= 0:
record.asctime = self.formatTime(record, self.datefmt)
msg = (self._fmt % record.__dict__).replace('\n', '#012')
if record.exc_info:
# Cache the traceback text to avoid converting it multiple times
# (it's constant anyway)
if not record.exc_text:
record.exc_text = self.formatException(
record.exc_info).replace('\n', '#012')
if record.exc_text:
if not msg.endswith('#012'):
msg = msg + '#012'
msg = msg + record.exc_text
if (hasattr(record, 'txn_id') and record.txn_id and
record.levelno != logging.INFO and
record.txn_id not in msg):
msg = "%s (txn: %s)" % (msg, record.txn_id)
if (hasattr(record, 'client_ip') and record.client_ip and
record.levelno != logging.INFO and
record.client_ip not in msg):
msg = "%s (client_ip: %s)" % (msg, record.client_ip)
if self.max_line_length > 0 and len(msg) > self.max_line_length:
if self.max_line_length < 7:
msg = msg[:self.max_line_length]
else:
approxhalf = (self.max_line_length - 5) // 2
msg = msg[:approxhalf] + " ... " + msg[-approxhalf:]
return msg
def get_logger(conf, name=None, log_to_console=False, log_route=None,
fmt="%(server)s: %(message)s"):
"""
Get the current system logger using config settings.
**Log config and defaults**::
log_facility = LOG_LOCAL0
log_level = INFO
log_name = swift
log_max_line_length = 0
log_udp_host = (disabled)
log_udp_port = logging.handlers.SYSLOG_UDP_PORT
log_address = /dev/log
log_statsd_host = (disabled)
log_statsd_port = 8125
log_statsd_default_sample_rate = 1.0
log_statsd_sample_rate_factor = 1.0
log_statsd_metric_prefix = (empty-string)
:param conf: Configuration dict to read settings from
:param name: Name of the logger
:param log_to_console: Add handler which writes to console on stderr
:param log_route: Route for the logging, not emitted to the log, just used
to separate logging configurations
:param fmt: Override log format
"""
if not conf:
conf = {}
if name is None:
name = conf.get('log_name', 'swift')
if not log_route:
log_route = name
logger = logging.getLogger(log_route)
logger.propagate = False
# all new handlers will get the same formatter
formatter = SwiftLogFormatter(
fmt=fmt, max_line_length=int(conf.get('log_max_line_length', 0)))
# get_logger will only ever add one SysLog Handler to a logger
if not hasattr(get_logger, 'handler4logger'):
get_logger.handler4logger = {}
if logger in get_logger.handler4logger:
logger.removeHandler(get_logger.handler4logger[logger])
# facility for this logger will be set by last call wins
facility = getattr(SysLogHandler, conf.get('log_facility', 'LOG_LOCAL0'),
SysLogHandler.LOG_LOCAL0)
udp_host = conf.get('log_udp_host')
if udp_host:
udp_port = int(conf.get('log_udp_port',
logging.handlers.SYSLOG_UDP_PORT))
handler = SysLogHandler(address=(udp_host, udp_port),
facility=facility)
else:
log_address = conf.get('log_address', '/dev/log')
try:
handler = SysLogHandler(address=log_address, facility=facility)
except socket.error as e:
# Either /dev/log isn't a UNIX socket or it does not exist at all
if e.errno not in [errno.ENOTSOCK, errno.ENOENT]:
raise e
handler = SysLogHandler(facility=facility)
handler.setFormatter(formatter)
logger.addHandler(handler)
get_logger.handler4logger[logger] = handler
# setup console logging
if log_to_console or hasattr(get_logger, 'console_handler4logger'):
# remove pre-existing console handler for this logger
if not hasattr(get_logger, 'console_handler4logger'):
get_logger.console_handler4logger = {}
if logger in get_logger.console_handler4logger:
logger.removeHandler(get_logger.console_handler4logger[logger])
console_handler = logging.StreamHandler(sys.__stderr__)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
get_logger.console_handler4logger[logger] = console_handler
# set the level for the logger
logger.setLevel(
getattr(logging, conf.get('log_level', 'INFO').upper(), logging.INFO))
# Setup logger with a StatsD client if so configured
statsd_host = conf.get('log_statsd_host')
if statsd_host:
statsd_port = int(conf.get('log_statsd_port', 8125))
base_prefix = conf.get('log_statsd_metric_prefix', '')
default_sample_rate = float(conf.get(
'log_statsd_default_sample_rate', 1))
sample_rate_factor = float(conf.get(
'log_statsd_sample_rate_factor', 1))
statsd_client = StatsdClient(statsd_host, statsd_port, base_prefix,
name, default_sample_rate,
sample_rate_factor, logger=logger)
logger.statsd_client = statsd_client
else:
logger.statsd_client = None
adapted_logger = LogAdapter(logger, name)
other_handlers = conf.get('log_custom_handlers', None)
if other_handlers:
log_custom_handlers = [s.strip() for s in other_handlers.split(',')
if s.strip()]
for hook in log_custom_handlers:
try:
mod, fnc = hook.rsplit('.', 1)
logger_hook = getattr(__import__(mod, fromlist=[fnc]), fnc)
logger_hook(conf, name, log_to_console, log_route, fmt,
logger, adapted_logger)
except (AttributeError, ImportError):
print('Error calling custom handler [%s]' % hook,
file=sys.stderr)
except ValueError:
print('Invalid custom handler format [%s]' % hook,
file=sys.stderr)
return adapted_logger
def get_hub():
"""
Checks whether poll is available and falls back
on select if it isn't.
Note about epoll:
Review: https://review.openstack.org/#/c/18806/
There was a problem where once out of every 30 quadrillion
connections, a coroutine wouldn't wake up when the client
closed its end. Epoll was not reporting the event or it was
getting swallowed somewhere. Then when that file descriptor
was re-used, eventlet would freak right out because it still
thought it was waiting for activity from it in some other coro.
"""
try:
import select
if hasattr(select, "poll"):
return "poll"
return "selects"
except ImportError:
return None
def drop_privileges(user, call_setsid=True):
"""
Sets the userid/groupid of the current process, get session leader, etc.
:param user: User name to change privileges to
"""
if os.geteuid() == 0:
groups = [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
os.setgroups(groups)
user = pwd.getpwnam(user)
os.setgid(user[3])
os.setuid(user[2])
os.environ['HOME'] = user[5]
if call_setsid:
try:
os.setsid()
except OSError:
pass
os.chdir('/') # in case you need to rmdir on where you started the daemon
os.umask(0o22) # ensure files are created with the correct privileges
def capture_stdio(logger, **kwargs):
"""
Log unhandled exceptions, close stdio, capture stdout and stderr.
param logger: Logger object to use
"""
# log uncaught exceptions
sys.excepthook = lambda * exc_info: \
logger.critical(_('UNCAUGHT EXCEPTION'), exc_info=exc_info)
# collect stdio file desc not in use for logging
stdio_files = [sys.stdin, sys.stdout, sys.stderr]
console_fds = [h.stream.fileno() for _junk, h in getattr(
get_logger, 'console_handler4logger', {}).items()]
stdio_files = [f for f in stdio_files if f.fileno() not in console_fds]
with open(os.devnull, 'r+b') as nullfile:
# close stdio (excludes fds open for logging)
for f in stdio_files:
# some platforms throw an error when attempting an stdin flush
try:
f.flush()
except IOError:
pass
try:
os.dup2(nullfile.fileno(), f.fileno())
except OSError:
pass
# redirect stdio
if kwargs.pop('capture_stdout', True):
sys.stdout = LoggerFileObject(logger)
if kwargs.pop('capture_stderr', True):
sys.stderr = LoggerFileObject(logger, 'STDERR')
def parse_options(parser=None, once=False, test_args=None):
"""
Parse standard swift server/daemon options with optparse.OptionParser.
:param parser: OptionParser to use. If not sent one will be created.
:param once: Boolean indicating the "once" option is available
:param test_args: Override sys.argv; used in testing
:returns : Tuple of (config, options); config is an absolute path to the
config file, options is the parser options as a dictionary.
:raises SystemExit: First arg (CONFIG) is required, file must exist
"""
if not parser:
parser = OptionParser(usage="%prog CONFIG [options]")
parser.add_option("-v", "--verbose", default=False, action="store_true",
help="log to console")
if once:
parser.add_option("-o", "--once", default=False, action="store_true",
help="only run one pass of daemon")
# if test_args is None, optparse will use sys.argv[:1]
options, args = parser.parse_args(args=test_args)
if not args:
parser.print_usage()
print(_("Error: missing config path argument"))
sys.exit(1)
config = os.path.abspath(args.pop(0))
if not os.path.exists(config):
parser.print_usage()
print(_("Error: unable to locate %s") % config)
sys.exit(1)
extra_args = []
# if any named options appear in remaining args, set the option to True
for arg in args:
if arg in options.__dict__:
setattr(options, arg, True)
else:
extra_args.append(arg)
options = vars(options)
if extra_args:
options['extra_args'] = extra_args
return config, options
def expand_ipv6(address):
"""
Expand ipv6 address.
:param address: a string indicating valid ipv6 address
:returns: a string indicating fully expanded ipv6 address
"""
packed_ip = socket.inet_pton(socket.AF_INET6, address)
return socket.inet_ntop(socket.AF_INET6, packed_ip)
def whataremyips(bind_ip=None):
"""
Get "our" IP addresses ("us" being the set of services configured by
one `*.conf` file). If our REST listens on a specific address, return it.
Otherwise, if listen on '0.0.0.0' or '::' return all addresses, including
the loopback.
:param str bind_ip: Optional bind_ip from a config file; may be IP address
or hostname.
:returns: list of Strings of ip addresses
"""
if bind_ip:
# See if bind_ip is '0.0.0.0'/'::'
try:
_, _, _, _, sockaddr = socket.getaddrinfo(
bind_ip, None, 0, socket.SOCK_STREAM, 0,
socket.AI_NUMERICHOST)[0]
if sockaddr[0] not in ('0.0.0.0', '::'):
return [bind_ip]
except socket.gaierror:
pass
addresses = []
for interface in netifaces.interfaces():
try:
iface_data = netifaces.ifaddresses(interface)
for family in iface_data:
if family not in (netifaces.AF_INET, netifaces.AF_INET6):
continue
for address in iface_data[family]:
addr = address['addr']
# If we have an ipv6 address remove the
# %ether_interface at the end
if family == netifaces.AF_INET6:
addr = expand_ipv6(addr.split('%')[0])
addresses.append(addr)
except ValueError:
pass
return addresses
def storage_directory(datadir, partition, name_hash):
"""
Get the storage directory
:param datadir: Base data directory
:param partition: Partition
:param name_hash: Account, container or object name hash
:returns: Storage directory
"""
return os.path.join(datadir, str(partition), name_hash[-3:], name_hash)
def hash_path(account, container=None, object=None, raw_digest=False):
"""
Get the canonical hash for an account/container/object
:param account: Account
:param container: Container
:param object: Object
:param raw_digest: If True, return the raw version rather than a hex digest
:returns: hash string
"""
if object and not container:
raise ValueError('container is required if object is provided')
paths = [account]
if container:
paths.append(container)
if object:
paths.append(object)
if raw_digest:
return md5(HASH_PATH_PREFIX + '/' + '/'.join(paths)
+ HASH_PATH_SUFFIX).digest()
else:
return md5(HASH_PATH_PREFIX + '/' + '/'.join(paths)
+ HASH_PATH_SUFFIX).hexdigest()
@contextmanager
def lock_path(directory, timeout=10, timeout_class=None):
"""
Context manager that acquires a lock on a directory. This will block until
the lock can be acquired, or the timeout time has expired (whichever occurs
first).
For locking exclusively, file or directory has to be opened in Write mode.
Python doesn't allow directories to be opened in Write Mode. So we
workaround by locking a hidden file in the directory.
:param directory: directory to be locked
:param timeout: timeout (in seconds)
:param timeout_class: The class of the exception to raise if the
lock cannot be granted within the timeout. Will be
constructed as timeout_class(timeout, lockpath). Default:
LockTimeout
"""
if timeout_class is None:
timeout_class = swift.common.exceptions.LockTimeout
mkdirs(directory)
lockpath = '%s/.lock' % directory
fd = os.open(lockpath, os.O_WRONLY | os.O_CREAT)
sleep_time = 0.01
slower_sleep_time = max(timeout * 0.01, sleep_time)
slowdown_at = timeout * 0.01
time_slept = 0
try:
with timeout_class(timeout, lockpath):
while True:
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
break
except IOError as err:
if err.errno != errno.EAGAIN:
raise
if time_slept > slowdown_at:
sleep_time = slower_sleep_time
sleep(sleep_time)
time_slept += sleep_time
yield True
finally:
os.close(fd)
@contextmanager
def lock_file(filename, timeout=10, append=False, unlink=True):
"""
Context manager that acquires a lock on a file. This will block until
the lock can be acquired, or the timeout time has expired (whichever occurs
first).
:param filename: file to be locked
:param timeout: timeout (in seconds)
:param append: True if file should be opened in append mode
:param unlink: True if the file should be unlinked at the end
"""
flags = os.O_CREAT | os.O_RDWR
if append:
flags |= os.O_APPEND
mode = 'a+'
else:
mode = 'r+'
while True:
fd = os.open(filename, flags)
file_obj = os.fdopen(fd, mode)
try:
with swift.common.exceptions.LockTimeout(timeout, filename):
while True:
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
break
except IOError as err:
if err.errno != errno.EAGAIN:
raise
sleep(0.01)
try:
if os.stat(filename).st_ino != os.fstat(fd).st_ino:
continue
except OSError as err:
if err.errno == errno.ENOENT:
continue
raise
yield file_obj
if unlink:
os.unlink(filename)
break
finally:
file_obj.close()
def lock_parent_directory(filename, timeout=10):
"""
Context manager that acquires a lock on the parent directory of the given
file path. This will block until the lock can be acquired, or the timeout
time has expired (whichever occurs first).
:param filename: file path of the parent directory to be locked
:param timeout: timeout (in seconds)
"""
return lock_path(os.path.dirname(filename), timeout=timeout)
def get_time_units(time_amount):
"""
Get a nomralized length of time in the largest unit of time (hours,
minutes, or seconds.)
:param time_amount: length of time in seconds
:returns: A touple of (length of time, unit of time) where unit of time is
one of ('h', 'm', 's')
"""
time_unit = 's'
if time_amount > 60:
time_amount /= 60
time_unit = 'm'
if time_amount > 60:
time_amount /= 60
time_unit = 'h'
return time_amount, time_unit
def compute_eta(start_time, current_value, final_value):
"""
Compute an ETA. Now only if we could also have a progress bar...
:param start_time: Unix timestamp when the operation began
:param current_value: Current value
:param final_value: Final value
:returns: ETA as a tuple of (length of time, unit of time) where unit of
time is one of ('h', 'm', 's')
"""
elapsed = time.time() - start_time
completion = (float(current_value) / final_value) or 0.00001
return get_time_units(1.0 / completion * elapsed - elapsed)
def unlink_older_than(path, mtime):
"""
Remove any file in a given path that that was last modified before mtime.
:param path: path to remove file from
:mtime: timestamp of oldest file to keep
"""
for fname in listdir(path):
fpath = os.path.join(path, fname)
try:
if os.path.getmtime(fpath) < mtime:
os.unlink(fpath)
except OSError:
pass
def item_from_env(env, item_name, allow_none=False):
"""
Get a value from the wsgi environment
:param env: wsgi environment dict
:param item_name: name of item to get
:returns: the value from the environment
"""
item = env.get(item_name, None)
if item is None and not allow_none:
logging.error("ERROR: %s could not be found in env!", item_name)
return item
def cache_from_env(env, allow_none=False):
"""
Get memcache connection pool from the environment (which had been
previously set by the memcache middleware
:param env: wsgi environment dict
:returns: swift.common.memcached.MemcacheRing from environment
"""
return item_from_env(env, 'swift.cache', allow_none)
def read_conf_dir(parser, conf_dir):
conf_files = []
for f in os.listdir(conf_dir):
if f.endswith('.conf') and not f.startswith('.'):
conf_files.append(os.path.join(conf_dir, f))
return parser.read(sorted(conf_files))
def readconf(conf_path, section_name=None, log_name=None, defaults=None,
raw=False):
"""
Read config file(s) and return config items as a dict
:param conf_path: path to config file/directory, or a file-like object
(hasattr readline)
:param section_name: config section to read (will return all sections if
not defined)
:param log_name: name to be used with logging (will use section_name if
not defined)
:param defaults: dict of default values to pre-populate the config with
:returns: dict of config items
"""
if defaults is None:
defaults = {}
if raw:
c = RawConfigParser(defaults)
else:
c = ConfigParser(defaults)
if hasattr(conf_path, 'readline'):
c.readfp(conf_path)
else:
if os.path.isdir(conf_path):
# read all configs in directory
success = read_conf_dir(c, conf_path)
else:
success = c.read(conf_path)
if not success:
print(_("Unable to read config from %s") % conf_path)
sys.exit(1)
if section_name:
if c.has_section(section_name):
conf = dict(c.items(section_name))
else:
print(_("Unable to find %s config section in %s") %
(section_name, conf_path))
sys.exit(1)
if "log_name" not in conf:
if log_name is not None:
conf['log_name'] = log_name
else:
conf['log_name'] = section_name
else:
conf = {}
for s in c.sections():
conf.update({s: dict(c.items(s))})
if 'log_name' not in conf:
conf['log_name'] = log_name
conf['__file__'] = conf_path
return conf
def write_pickle(obj, dest, tmp=None, pickle_protocol=0):
"""
Ensure that a pickle file gets written to disk. The file
is first written to a tmp location, ensure it is synced to disk, then
perform a move to its final location
:param obj: python object to be pickled
:param dest: path of final destination file
:param tmp: path to tmp to use, defaults to None
:param pickle_protocol: protocol to pickle the obj with, defaults to 0
"""
if tmp is None:
tmp = os.path.dirname(dest)
fd, tmppath = mkstemp(dir=tmp, suffix='.tmp')
with os.fdopen(fd, 'wb') as fo:
pickle.dump(obj, fo, pickle_protocol)
fo.flush()
os.fsync(fd)
renamer(tmppath, dest)
def search_tree(root, glob_match, ext='', exts=None, dir_ext=None):
"""Look in root, for any files/dirs matching glob, recursively traversing
any found directories looking for files ending with ext
:param root: start of search path
:param glob_match: glob to match in root, matching dirs are traversed with
os.walk
:param ext: only files that end in ext will be returned
:param exts: a list of file extensions; only files that end in one of these
extensions will be returned; if set this list overrides any
extension specified using the 'ext' param.
:param dir_ext: if present directories that end with dir_ext will not be
traversed and instead will be returned as a matched path
:returns: list of full paths to matching files, sorted
"""
exts = exts or [ext]
found_files = []
for path in glob.glob(os.path.join(root, glob_match)):
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
if dir_ext and root.endswith(dir_ext):
found_files.append(root)
# the root is a config dir, descend no further
break
for file_ in files:
if any(exts) and not any(file_.endswith(e) for e in exts):
continue
found_files.append(os.path.join(root, file_))
found_dir = False
for dir_ in dirs:
if dir_ext and dir_.endswith(dir_ext):
found_dir = True
found_files.append(os.path.join(root, dir_))
if found_dir:
# do not descend further into matching directories
break
else:
if ext and not path.endswith(ext):
continue
found_files.append(path)
return sorted(found_files)
def write_file(path, contents):
"""Write contents to file at path
:param path: any path, subdirs will be created as needed
:param contents: data to write to file, will be converted to string
"""
dirname, name = os.path.split(path)
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
except OSError as err:
if err.errno == errno.EACCES:
sys.exit('Unable to create %s. Running as '
'non-root?' % dirname)
with open(path, 'w') as f:
f.write('%s' % contents)
def remove_file(path):
"""Quiet wrapper for os.unlink, OSErrors are suppressed
:param path: first and only argument passed to os.unlink
"""
try:
os.unlink(path)
except OSError:
pass
def audit_location_generator(devices, datadir, suffix='',
mount_check=True, logger=None):
'''
Given a devices path and a data directory, yield (path, device,
partition) for all files in that directory
:param devices: parent directory of the devices to be audited
:param datadir: a directory located under self.devices. This should be
one of the DATADIR constants defined in the account,
container, and object servers.
:param suffix: path name suffix required for all names returned
:param mount_check: Flag to check if a mount check should be performed
on devices
:param logger: a logger object
'''
device_dir = listdir(devices)
# randomize devices in case of process restart before sweep completed
shuffle(device_dir)
for device in device_dir:
if mount_check and not ismount(os.path.join(devices, device)):
if logger:
logger.warning(
_('Skipping %s as it is not mounted'), device)
continue
datadir_path = os.path.join(devices, device, datadir)
try:
partitions = listdir(datadir_path)
except OSError as e:
if logger:
logger.warning('Skipping %s because %s', datadir_path, e)
continue
for partition in partitions:
part_path = os.path.join(datadir_path, partition)
try:
suffixes = listdir(part_path)
except OSError as e:
if e.errno != errno.ENOTDIR:
raise
continue
for asuffix in suffixes:
suff_path = os.path.join(part_path, asuffix)
try:
hashes = listdir(suff_path)
except OSError as e:
if e.errno != errno.ENOTDIR:
raise
continue
for hsh in hashes:
hash_path = os.path.join(suff_path, hsh)
try:
files = sorted(listdir(hash_path), reverse=True)
except OSError as e:
if e.errno != errno.ENOTDIR:
raise
continue
for fname in files:
if suffix and not fname.endswith(suffix):
continue
path = os.path.join(hash_path, fname)
yield path, device, partition
def ratelimit_sleep(running_time, max_rate, incr_by=1, rate_buffer=5):
'''
Will eventlet.sleep() for the appropriate time so that the max_rate
is never exceeded. If max_rate is 0, will not ratelimit. The
maximum recommended rate should not exceed (1000 * incr_by) a second
as eventlet.sleep() does involve some overhead. Returns running_time
that should be used for subsequent calls.
:param running_time: the running time in milliseconds of the next
allowable request. Best to start at zero.
:param max_rate: The maximum rate per second allowed for the process.
:param incr_by: How much to increment the counter. Useful if you want
to ratelimit 1024 bytes/sec and have differing sizes
of requests. Must be > 0 to engage rate-limiting
behavior.
:param rate_buffer: Number of seconds the rate counter can drop and be
allowed to catch up (at a faster than listed rate).
A larger number will result in larger spikes in rate
but better average accuracy. Must be > 0 to engage
rate-limiting behavior.
'''
if max_rate <= 0 or incr_by <= 0:
return running_time
# 1,000 milliseconds = 1 second
clock_accuracy = 1000.0
# Convert seconds to milliseconds
now = time.time() * clock_accuracy
# Calculate time per request in milliseconds
time_per_request = clock_accuracy * (float(incr_by) / max_rate)
# Convert rate_buffer to milliseconds and compare
if now - running_time > rate_buffer * clock_accuracy:
running_time = now
elif running_time - now > time_per_request:
# Convert diff back to a floating point number of seconds and sleep
eventlet.sleep((running_time - now) / clock_accuracy)
# Return the absolute time for the next interval in milliseconds; note
# that time could have passed well beyond that point, but the next call
# will catch that and skip the sleep.
return running_time + time_per_request
class ContextPool(GreenPool):
"GreenPool subclassed to kill its coros when it gets gc'ed"
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
for coro in list(self.coroutines_running):
coro.kill()
class GreenAsyncPileWaitallTimeout(Timeout):
pass
class GreenAsyncPile(object):
"""
Runs jobs in a pool of green threads, and the results can be retrieved by
using this object as an iterator.
This is very similar in principle to eventlet.GreenPile, except it returns
results as they become available rather than in the order they were
launched.
Correlating results with jobs (if necessary) is left to the caller.
"""
def __init__(self, size_or_pool):
"""
:param size_or_pool: thread pool size or a pool to use
"""
if isinstance(size_or_pool, GreenPool):
self._pool = size_or_pool
size = self._pool.size
else:
self._pool = GreenPool(size_or_pool)
size = size_or_pool
self._responses = eventlet.queue.LightQueue(size)
self._inflight = 0
self._pending = 0
def _run_func(self, func, args, kwargs):
try:
self._responses.put(func(*args, **kwargs))
finally:
self._inflight -= 1
def spawn(self, func, *args, **kwargs):
"""
Spawn a job in a green thread on the pile.
"""
self._pending += 1
self._inflight += 1
self._pool.spawn(self._run_func, func, args, kwargs)
def waitall(self, timeout):
"""
Wait timeout seconds for any results to come in.
:param timeout: seconds to wait for results
:returns: list of results accrued in that time
"""
results = []
try:
with GreenAsyncPileWaitallTimeout(timeout):
while True:
results.append(next(self))
except (GreenAsyncPileWaitallTimeout, StopIteration):
pass
return results
def __iter__(self):
return self
def next(self):
try:
rv = self._responses.get_nowait()
except eventlet.queue.Empty:
if self._inflight == 0:
raise StopIteration()
rv = self._responses.get()
self._pending -= 1
return rv
class ModifiedParseResult(ParseResult):
"Parse results class for urlparse."
@property
def hostname(self):
netloc = self.netloc.split('@', 1)[-1]
if netloc.startswith('['):
return netloc[1:].split(']')[0]
elif ':' in netloc:
return netloc.rsplit(':')[0]
return netloc
@property
def port(self):
netloc = self.netloc.split('@', 1)[-1]
if netloc.startswith('['):
netloc = netloc.rsplit(']')[1]
if ':' in netloc:
return int(netloc.rsplit(':')[1])
return None
def urlparse(url):
"""
urlparse augmentation.
This is necessary because urlparse can't handle RFC 2732 URLs.
:param url: URL to parse.
"""
return ModifiedParseResult(*stdlib_urlparse(url))
def validate_sync_to(value, allowed_sync_hosts, realms_conf):
"""
Validates an X-Container-Sync-To header value, returning the
validated endpoint, realm, and realm_key, or an error string.
:param value: The X-Container-Sync-To header value to validate.
:param allowed_sync_hosts: A list of allowed hosts in endpoints,
if realms_conf does not apply.
:param realms_conf: A instance of
swift.common.container_sync_realms.ContainerSyncRealms to
validate against.
:returns: A tuple of (error_string, validated_endpoint, realm,
realm_key). The error_string will None if the rest of the
values have been validated. The validated_endpoint will be
the validated endpoint to sync to. The realm and realm_key
will be set if validation was done through realms_conf.
"""
orig_value = value
value = value.rstrip('/')
if not value:
return (None, None, None, None)
if value.startswith('//'):
if not realms_conf:
return (None, None, None, None)
data = value[2:].split('/')
if len(data) != 4:
return (
_('Invalid X-Container-Sync-To format %r') % orig_value,
None, None, None)
realm, cluster, account, container = data
realm_key = realms_conf.key(realm)
if not realm_key:
return (_('No realm key for %r') % realm, None, None, None)
endpoint = realms_conf.endpoint(realm, cluster)
if not endpoint:
return (
_('No cluster endpoint for %r %r') % (realm, cluster),
None, None, None)
return (
None,
'%s/%s/%s' % (endpoint.rstrip('/'), account, container),
realm.upper(), realm_key)
p = urlparse(value)
if p.scheme not in ('http', 'https'):
return (
_('Invalid scheme %r in X-Container-Sync-To, must be "//", '
'"http", or "https".') % p.scheme,
None, None, None)
if not p.path:
return (_('Path required in X-Container-Sync-To'), None, None, None)
if p.params or p.query or p.fragment:
return (
_('Params, queries, and fragments not allowed in '
'X-Container-Sync-To'),
None, None, None)
if p.hostname not in allowed_sync_hosts:
return (
_('Invalid host %r in X-Container-Sync-To') % p.hostname,
None, None, None)
return (None, value, None, None)
def affinity_key_function(affinity_str):
"""Turns an affinity config value into a function suitable for passing to
sort(). After doing so, the array will be sorted with respect to the given
ordering.
For example, if affinity_str is "r1=1, r2z7=2, r2z8=2", then the array
will be sorted with all nodes from region 1 (r1=1) first, then all the
nodes from region 2 zones 7 and 8 (r2z7=2 and r2z8=2), then everything
else.
Note that the order of the pieces of affinity_str is irrelevant; the
priority values are what comes after the equals sign.
If affinity_str is empty or all whitespace, then the resulting function
will not alter the ordering of the nodes.
:param affinity_str: affinity config value, e.g. "r1z2=3"
or "r1=1, r2z1=2, r2z2=2"
:returns: single-argument function
:raises: ValueError if argument invalid
"""
affinity_str = affinity_str.strip()
if not affinity_str:
return lambda x: 0
priority_matchers = []
pieces = [s.strip() for s in affinity_str.split(',')]
for piece in pieces:
# matches r<number>=<number> or r<number>z<number>=<number>
match = re.match("r(\d+)(?:z(\d+))?=(\d+)$", piece)
if match:
region, zone, priority = match.groups()
region = int(region)
priority = int(priority)
zone = int(zone) if zone else None
matcher = {'region': region, 'priority': priority}
if zone is not None:
matcher['zone'] = zone
priority_matchers.append(matcher)
else:
raise ValueError("Invalid affinity value: %r" % affinity_str)
priority_matchers.sort(key=operator.itemgetter('priority'))
def keyfn(ring_node):
for matcher in priority_matchers:
if (matcher['region'] == ring_node['region']
and ('zone' not in matcher
or matcher['zone'] == ring_node['zone'])):
return matcher['priority']
return 4294967296 # 2^32, i.e. "a big number"
return keyfn
def affinity_locality_predicate(write_affinity_str):
"""
Turns a write-affinity config value into a predicate function for nodes.
The returned value will be a 1-arg function that takes a node dictionary
and returns a true value if it is "local" and a false value otherwise. The
definition of "local" comes from the affinity_str argument passed in here.
For example, if affinity_str is "r1, r2z2", then only nodes where region=1
or where (region=2 and zone=2) are considered local.
If affinity_str is empty or all whitespace, then the resulting function
will consider everything local
:param affinity_str: affinity config value, e.g. "r1z2"
or "r1, r2z1, r2z2"
:returns: single-argument function, or None if affinity_str is empty
:raises: ValueError if argument invalid
"""
affinity_str = write_affinity_str.strip()
if not affinity_str:
return None
matchers = []
pieces = [s.strip() for s in affinity_str.split(',')]
for piece in pieces:
# matches r<number> or r<number>z<number>
match = re.match("r(\d+)(?:z(\d+))?$", piece)
if match:
region, zone = match.groups()
region = int(region)
zone = int(zone) if zone else None
matcher = {'region': region}
if zone is not None:
matcher['zone'] = zone
matchers.append(matcher)
else:
raise ValueError("Invalid write-affinity value: %r" % affinity_str)
def is_local(ring_node):
for matcher in matchers:
if (matcher['region'] == ring_node['region']
and ('zone' not in matcher
or matcher['zone'] == ring_node['zone'])):
return True
return False
return is_local
def get_remote_client(req):
# remote host for zeus
client = req.headers.get('x-cluster-client-ip')
if not client and 'x-forwarded-for' in req.headers:
# remote host for other lbs
client = req.headers['x-forwarded-for'].split(',')[0].strip()
if not client:
client = req.remote_addr
return client
def human_readable(value):
"""
Returns the number in a human readable format; for example 1048576 = "1Mi".
"""
value = float(value)
index = -1
suffixes = 'KMGTPEZY'
while value >= 1024 and index + 1 < len(suffixes):
index += 1
value = round(value / 1024)
if index == -1:
return '%d' % value
return '%d%si' % (round(value), suffixes[index])
def put_recon_cache_entry(cache_entry, key, item):
"""
Function that will check if item is a dict, and if so put it under
cache_entry[key]. We use nested recon cache entries when the object
auditor runs in parallel or else in 'once' mode with a specified
subset of devices.
"""
if isinstance(item, dict):
if key not in cache_entry or key in cache_entry and not \
isinstance(cache_entry[key], dict):
cache_entry[key] = {}
elif key in cache_entry and item == {}:
cache_entry.pop(key, None)
return
for k, v in item.items():
if v == {}:
cache_entry[key].pop(k, None)
else:
cache_entry[key][k] = v
else:
cache_entry[key] = item
def dump_recon_cache(cache_dict, cache_file, logger, lock_timeout=2):
"""Update recon cache values
:param cache_dict: Dictionary of cache key/value pairs to write out
:param cache_file: cache file to update
:param logger: the logger to use to log an encountered error
:param lock_timeout: timeout (in seconds)
"""
try:
with lock_file(cache_file, lock_timeout, unlink=False) as cf:
cache_entry = {}
try:
existing_entry = cf.readline()
if existing_entry:
cache_entry = json.loads(existing_entry)
except ValueError:
# file doesn't have a valid entry, we'll recreate it
pass
for cache_key, cache_value in cache_dict.items():
put_recon_cache_entry(cache_entry, cache_key, cache_value)
try:
with NamedTemporaryFile(dir=os.path.dirname(cache_file),
delete=False) as tf:
tf.write(json.dumps(cache_entry) + '\n')
renamer(tf.name, cache_file, fsync=False)
finally:
try:
os.unlink(tf.name)
except OSError as err:
if err.errno != errno.ENOENT:
raise
except (Exception, Timeout):
logger.exception(_('Exception dumping recon cache'))
def listdir(path):
try:
return os.listdir(path)
except OSError as err:
if err.errno != errno.ENOENT:
raise
return []
def streq_const_time(s1, s2):
"""Constant-time string comparison.
:params s1: the first string
:params s2: the second string
:return: True if the strings are equal.
This function takes two strings and compares them. It is intended to be
used when doing a comparison for authentication purposes to help guard
against timing attacks.
"""
if len(s1) != len(s2):
return False
result = 0
for (a, b) in zip(s1, s2):
result |= ord(a) ^ ord(b)
return result == 0
def pairs(item_list):
"""
Returns an iterator of all pairs of elements from item_list.
:param items: items (no duplicates allowed)
"""
for i, item1 in enumerate(item_list):
for item2 in item_list[(i + 1):]:
yield (item1, item2)
def replication(func):
"""
Decorator to declare which methods are accessible for different
type of servers:
* If option replication_server is None then this decorator
doesn't matter.
* If option replication_server is True then ONLY decorated with
this decorator methods will be started.
* If option replication_server is False then decorated with this
decorator methods will NOT be started.
:param func: function to mark accessible for replication
"""
func.replication = True
return func
def public(func):
"""
Decorator to declare which methods are publicly accessible as HTTP
requests
:param func: function to make public
"""
func.publicly_accessible = True
@functools.wraps(func)
def wrapped(*a, **kw):
return func(*a, **kw)
return wrapped
def quorum_size(n):
"""
quorum size as it applies to services that use 'replication' for data
integrity (Account/Container services). Object quorum_size is defined
on a storage policy basis.
Number of successful backend requests needed for the proxy to consider
the client request successful.
"""
return (n // 2) + 1
def rsync_ip(ip):
"""
Transform ip string to an rsync-compatible form
Will return ipv4 addresses unchanged, but will nest ipv6 addresses
inside square brackets.
:param ip: an ip string (ipv4 or ipv6)
:returns: a string ip address
"""
try:
socket.inet_pton(socket.AF_INET6, ip)
except socket.error: # it's IPv4
return ip
else:
return '[%s]' % ip
def rsync_module_interpolation(template, device):
"""
Interpolate devices variables inside a rsync module template
:param template: rsync module template as a string
:param device: a device from a ring
:returns: a string with all variables replaced by device attributes
"""
replacements = {
'ip': rsync_ip(device.get('ip', '')),
'port': device.get('port', ''),
'replication_ip': rsync_ip(device.get('replication_ip', '')),
'replication_port': device.get('replication_port', ''),
'region': device.get('region', ''),
'zone': device.get('zone', ''),
'device': device.get('device', ''),
'meta': device.get('meta', ''),
}
try:
module = template.format(**replacements)
except KeyError as e:
raise ValueError('Cannot interpolate rsync_module, invalid variable: '
'%s' % e)
return module
def get_valid_utf8_str(str_or_unicode):
"""
Get valid parts of utf-8 str from str, unicode and even invalid utf-8 str
:param str_or_unicode: a string or an unicode which can be invalid utf-8
"""
if isinstance(str_or_unicode, six.text_type):
(str_or_unicode, _len) = utf8_encoder(str_or_unicode, 'replace')
(valid_utf8_str, _len) = utf8_decoder(str_or_unicode, 'replace')
return valid_utf8_str.encode('utf-8')
def list_from_csv(comma_separated_str):
"""
Splits the str given and returns a properly stripped list of the comma
separated values.
"""
if comma_separated_str:
return [v.strip() for v in comma_separated_str.split(',') if v.strip()]
return []
def csv_append(csv_string, item):
"""
Appends an item to a comma-separated string.
If the comma-separated string is empty/None, just returns item.
"""
if csv_string:
return ",".join((csv_string, item))
else:
return item
class CloseableChain(object):
"""
Like itertools.chain, but with a close method that will attempt to invoke
its sub-iterators' close methods, if any.
"""
def __init__(self, *iterables):
self.iterables = iterables
def __iter__(self):
return iter(itertools.chain(*(self.iterables)))
def close(self):
for it in self.iterables:
close_method = getattr(it, 'close', None)
if close_method:
close_method()
def reiterate(iterable):
"""
Consume the first item from an iterator, then re-chain it to the rest of
the iterator. This is useful when you want to make sure the prologue to
downstream generators have been executed before continuing.
:param iterable: an iterable object
"""
if isinstance(iterable, (list, tuple)):
return iterable
else:
iterator = iter(iterable)
try:
chunk = ''
while not chunk:
chunk = next(iterator)
return CloseableChain([chunk], iterator)
except StopIteration:
return []
class InputProxy(object):
"""
File-like object that counts bytes read.
To be swapped in for wsgi.input for accounting purposes.
"""
def __init__(self, wsgi_input):
"""
:param wsgi_input: file-like object to wrap the functionality of
"""
self.wsgi_input = wsgi_input
self.bytes_received = 0
self.client_disconnect = False
def read(self, *args, **kwargs):
"""
Pass read request to the underlying file-like object and
add bytes read to total.
"""
try:
chunk = self.wsgi_input.read(*args, **kwargs)
except Exception:
self.client_disconnect = True
raise
self.bytes_received += len(chunk)
return chunk
def readline(self, *args, **kwargs):
"""
Pass readline request to the underlying file-like object and
add bytes read to total.
"""
try:
line = self.wsgi_input.readline(*args, **kwargs)
except Exception:
self.client_disconnect = True
raise
self.bytes_received += len(line)
return line
class LRUCache(object):
"""
Decorator for size/time bound memoization that evicts the least
recently used members.
"""
PREV, NEXT, KEY, CACHED_AT, VALUE = 0, 1, 2, 3, 4 # link fields
def __init__(self, maxsize=1000, maxtime=3600):
self.maxsize = maxsize
self.maxtime = maxtime
self.reset()
def reset(self):
self.mapping = {}
self.head = [None, None, None, None, None] # oldest
self.tail = [self.head, None, None, None, None] # newest
self.head[self.NEXT] = self.tail
def set_cache(self, value, *key):
while len(self.mapping) >= self.maxsize:
old_next, old_key = self.head[self.NEXT][self.NEXT:self.NEXT + 2]
self.head[self.NEXT], old_next[self.PREV] = old_next, self.head
del self.mapping[old_key]
last = self.tail[self.PREV]
link = [last, self.tail, key, time.time(), value]
self.mapping[key] = last[self.NEXT] = self.tail[self.PREV] = link
return value
def get_cached(self, link, *key):
link_prev, link_next, key, cached_at, value = link
if cached_at + self.maxtime < time.time():
raise KeyError('%r has timed out' % (key,))
link_prev[self.NEXT] = link_next
link_next[self.PREV] = link_prev
last = self.tail[self.PREV]
last[self.NEXT] = self.tail[self.PREV] = link
link[self.PREV] = last
link[self.NEXT] = self.tail
return value
def __call__(self, f):
class LRUCacheWrapped(object):
@functools.wraps(f)
def __call__(im_self, *key):
link = self.mapping.get(key, self.head)
if link is not self.head:
try:
return self.get_cached(link, *key)
except KeyError:
pass
value = f(*key)
self.set_cache(value, *key)
return value
def size(im_self):
"""
Return the size of the cache
"""
return len(self.mapping)
def reset(im_self):
return self.reset()
def get_maxsize(im_self):
return self.maxsize
def set_maxsize(im_self, i):
self.maxsize = i
def get_maxtime(im_self):
return self.maxtime
def set_maxtime(im_self, i):
self.maxtime = i
maxsize = property(get_maxsize, set_maxsize)
maxtime = property(get_maxtime, set_maxtime)
def __repr__(im_self):
return '<%s %r>' % (im_self.__class__.__name__, f)
return LRUCacheWrapped()
def tpool_reraise(func, *args, **kwargs):
"""
Hack to work around Eventlet's tpool not catching and reraising Timeouts.
"""
def inner():
try:
return func(*args, **kwargs)
except BaseException as err:
return err
resp = tpool.execute(inner)
if isinstance(resp, BaseException):
raise resp
return resp
class ThreadPool(object):
"""
Perform blocking operations in background threads.
Call its methods from within greenlets to green-wait for results without
blocking the eventlet reactor (hopefully).
"""
BYTE = 'a'.encode('utf-8')
def __init__(self, nthreads=2):
self.nthreads = nthreads
self._run_queue = stdlib_queue.Queue()
self._result_queue = stdlib_queue.Queue()
self._threads = []
self._alive = True
if nthreads <= 0:
return
# We spawn a greenthread whose job it is to pull results from the
# worker threads via a real Queue and send them to eventlet Events so
# that the calling greenthreads can be awoken.
#
# Since each OS thread has its own collection of greenthreads, it
# doesn't work to have the worker thread send stuff to the event, as
# it then notifies its own thread-local eventlet hub to wake up, which
# doesn't do anything to help out the actual calling greenthread over
# in the main thread.
#
# Thus, each worker sticks its results into a result queue and then
# writes a byte to a pipe, signaling the result-consuming greenlet (in
# the main thread) to wake up and consume results.
#
# This is all stuff that eventlet.tpool does, but that code can't have
# multiple instances instantiated. Since the object server uses one
# pool per disk, we have to reimplement this stuff.
_raw_rpipe, self.wpipe = os.pipe()
self.rpipe = greenio.GreenPipe(_raw_rpipe, 'rb')
for _junk in range(nthreads):
thr = stdlib_threading.Thread(
target=self._worker,
args=(self._run_queue, self._result_queue))
thr.daemon = True
thr.start()
self._threads.append(thr)
# This is the result-consuming greenthread that runs in the main OS
# thread, as described above.
self._consumer_coro = greenthread.spawn_n(self._consume_results,
self._result_queue)
def _worker(self, work_queue, result_queue):
"""
Pulls an item from the queue and runs it, then puts the result into
the result queue. Repeats forever.
:param work_queue: queue from which to pull work
:param result_queue: queue into which to place results
"""
while True:
item = work_queue.get()
if item is None:
break
ev, func, args, kwargs = item
try:
result = func(*args, **kwargs)
result_queue.put((ev, True, result))
except BaseException:
result_queue.put((ev, False, sys.exc_info()))
finally:
work_queue.task_done()
os.write(self.wpipe, self.BYTE)
def _consume_results(self, queue):
"""
Runs as a greenthread in the same OS thread as callers of
run_in_thread().
Takes results from the worker OS threads and sends them to the waiting
greenthreads.
"""
while True:
try:
self.rpipe.read(1)
except ValueError:
# can happen at process shutdown when pipe is closed
break
while True:
try:
ev, success, result = queue.get(block=False)
except stdlib_queue.Empty:
break
try:
if success:
ev.send(result)
else:
ev.send_exception(*result)
finally:
queue.task_done()
def run_in_thread(self, func, *args, **kwargs):
"""
Runs ``func(*args, **kwargs)`` in a thread. Blocks the current greenlet
until results are available.
Exceptions thrown will be reraised in the calling thread.
If the threadpool was initialized with nthreads=0, it invokes
``func(*args, **kwargs)`` directly, followed by eventlet.sleep() to
ensure the eventlet hub has a chance to execute. It is more likely the
hub will be invoked when queuing operations to an external thread.
:returns: result of calling func
:raises: whatever func raises
"""
if not self._alive:
raise swift.common.exceptions.ThreadPoolDead()
if self.nthreads <= 0:
result = func(*args, **kwargs)
sleep()
return result
ev = event.Event()
self._run_queue.put((ev, func, args, kwargs), block=False)
# blocks this greenlet (and only *this* greenlet) until the real
# thread calls ev.send().
result = ev.wait()
return result
def _run_in_eventlet_tpool(self, func, *args, **kwargs):
"""
Really run something in an external thread, even if we haven't got any
threads of our own.
"""
def inner():
try:
return (True, func(*args, **kwargs))
except (Timeout, BaseException) as err:
return (False, err)
success, result = tpool.execute(inner)
if success:
return result
else:
raise result
def force_run_in_thread(self, func, *args, **kwargs):
"""
Runs ``func(*args, **kwargs)`` in a thread. Blocks the current greenlet
until results are available.
Exceptions thrown will be reraised in the calling thread.
If the threadpool was initialized with nthreads=0, uses eventlet.tpool
to run the function. This is in contrast to run_in_thread(), which
will (in that case) simply execute func in the calling thread.
:returns: result of calling func
:raises: whatever func raises
"""
if not self._alive:
raise swift.common.exceptions.ThreadPoolDead()
if self.nthreads <= 0:
return self._run_in_eventlet_tpool(func, *args, **kwargs)
else:
return self.run_in_thread(func, *args, **kwargs)
def terminate(self):
"""
Releases the threadpool's resources (OS threads, greenthreads, pipes,
etc.) and renders it unusable.
Don't call run_in_thread() or force_run_in_thread() after calling
terminate().
"""
self._alive = False
if self.nthreads <= 0:
return
for _junk in range(self.nthreads):
self._run_queue.put(None)
for thr in self._threads:
thr.join()
self._threads = []
self.nthreads = 0
greenthread.kill(self._consumer_coro)
self.rpipe.close()
os.close(self.wpipe)
def ismount(path):
"""
Test whether a path is a mount point. This will catch any
exceptions and translate them into a False return value
Use ismount_raw to have the exceptions raised instead.
"""
try:
return ismount_raw(path)
except OSError:
return False
def ismount_raw(path):
"""
Test whether a path is a mount point. Whereas ismount will catch
any exceptions and just return False, this raw version will not
catch exceptions.
This is code hijacked from C Python 2.6.8, adapted to remove the extra
lstat() system call.
"""
try:
s1 = os.lstat(path)
except os.error as err:
if err.errno == errno.ENOENT:
# It doesn't exist -- so not a mount point :-)
return False
raise
if stat.S_ISLNK(s1.st_mode):
# A symlink can never be a mount point
return False
s2 = os.lstat(os.path.join(path, '..'))
dev1 = s1.st_dev
dev2 = s2.st_dev
if dev1 != dev2:
# path/.. on a different device as path
return True
ino1 = s1.st_ino
ino2 = s2.st_ino
if ino1 == ino2:
# path/.. is the same i-node as path
return True
return False
def close_if_possible(maybe_closable):
close_method = getattr(maybe_closable, 'close', None)
if callable(close_method):
return close_method()
@contextmanager
def closing_if_possible(maybe_closable):
"""
Like contextlib.closing(), but doesn't crash if the object lacks a close()
method.
PEP 333 (WSGI) says: "If the iterable returned by the application has a
close() method, the server or gateway must call that method upon
completion of the current request[.]" This function makes that easier.
"""
try:
yield maybe_closable
finally:
close_if_possible(maybe_closable)
_rfc_token = r'[^()<>@,;:\"/\[\]?={}\x00-\x20\x7f]+'
_rfc_extension_pattern = re.compile(
r'(?:\s*;\s*(' + _rfc_token + r")\s*(?:=\s*(" + _rfc_token +
r'|"(?:[^"\\]|\\.)*"))?)')
_content_range_pattern = re.compile(r'^bytes (\d+)-(\d+)/(\d+)$')
def parse_content_range(content_range):
"""
Parse a content-range header into (first_byte, last_byte, total_size).
See RFC 7233 section 4.2 for details on the header format, but it's
basically "Content-Range: bytes ${start}-${end}/${total}".
:param content_range: Content-Range header value to parse,
e.g. "bytes 100-1249/49004"
:returns: 3-tuple (start, end, total)
:raises: ValueError if malformed
"""
found = re.search(_content_range_pattern, content_range)
if not found:
raise ValueError("malformed Content-Range %r" % (content_range,))
return tuple(int(x) for x in found.groups())
def parse_content_type(content_type):
"""
Parse a content-type and its parameters into values.
RFC 2616 sec 14.17 and 3.7 are pertinent.
**Examples**::
'text/plain; charset=UTF-8' -> ('text/plain', [('charset, 'UTF-8')])
'text/plain; charset=UTF-8; level=1' ->
('text/plain', [('charset, 'UTF-8'), ('level', '1')])
:param content_type: content_type to parse
:returns: a tuple containing (content type, list of k, v parameter tuples)
"""
parm_list = []
if ';' in content_type:
content_type, parms = content_type.split(';', 1)
parms = ';' + parms
for m in _rfc_extension_pattern.findall(parms):
key = m[0].strip()
value = m[1].strip()
parm_list.append((key, value))
return content_type, parm_list
def override_bytes_from_content_type(listing_dict, logger=None):
"""
Takes a dict from a container listing and overrides the content_type,
bytes fields if swift_bytes is set.
"""
content_type, params = parse_content_type(listing_dict['content_type'])
for key, value in params:
if key == 'swift_bytes':
try:
listing_dict['bytes'] = int(value)
except ValueError:
if logger:
logger.exception("Invalid swift_bytes")
else:
content_type += ';%s=%s' % (key, value)
listing_dict['content_type'] = content_type
def clean_content_type(value):
if ';' in value:
left, right = value.rsplit(';', 1)
if right.lstrip().startswith('swift_bytes='):
return left
return value
def quote(value, safe='/'):
"""
Patched version of urllib.quote that encodes utf-8 strings before quoting
"""
return _quote(get_valid_utf8_str(value), safe)
def get_expirer_container(x_delete_at, expirer_divisor, acc, cont, obj):
"""
Returns a expiring object container name for given X-Delete-At and
a/c/o.
"""
shard_int = int(hash_path(acc, cont, obj), 16) % 100
return normalize_delete_at_timestamp(
int(x_delete_at) / expirer_divisor * expirer_divisor - shard_int)
class _MultipartMimeFileLikeObject(object):
def __init__(self, wsgi_input, boundary, input_buffer, read_chunk_size):
self.no_more_data_for_this_file = False
self.no_more_files = False
self.wsgi_input = wsgi_input
self.boundary = boundary
self.input_buffer = input_buffer
self.read_chunk_size = read_chunk_size
def read(self, length=None):
if not length:
length = self.read_chunk_size
if self.no_more_data_for_this_file:
return ''
# read enough data to know whether we're going to run
# into a boundary in next [length] bytes
if len(self.input_buffer) < length + len(self.boundary) + 2:
to_read = length + len(self.boundary) + 2
while to_read > 0:
try:
chunk = self.wsgi_input.read(to_read)
except (IOError, ValueError) as e:
raise swift.common.exceptions.ChunkReadError(str(e))
to_read -= len(chunk)
self.input_buffer += chunk
if not chunk:
self.no_more_files = True
break
boundary_pos = self.input_buffer.find(self.boundary)
# boundary does not exist in the next (length) bytes
if boundary_pos == -1 or boundary_pos > length:
ret = self.input_buffer[:length]
self.input_buffer = self.input_buffer[length:]
# if it does, just return data up to the boundary
else:
ret, self.input_buffer = self.input_buffer.split(self.boundary, 1)
self.no_more_files = self.input_buffer.startswith('--')
self.no_more_data_for_this_file = True
self.input_buffer = self.input_buffer[2:]
return ret
def readline(self):
if self.no_more_data_for_this_file:
return ''
boundary_pos = newline_pos = -1
while newline_pos < 0 and boundary_pos < 0:
try:
chunk = self.wsgi_input.read(self.read_chunk_size)
except (IOError, ValueError) as e:
raise swift.common.exceptions.ChunkReadError(str(e))
self.input_buffer += chunk
newline_pos = self.input_buffer.find('\r\n')
boundary_pos = self.input_buffer.find(self.boundary)
if not chunk:
self.no_more_files = True
break
# found a newline
if newline_pos >= 0 and \
(boundary_pos < 0 or newline_pos < boundary_pos):
# Use self.read to ensure any logic there happens...
ret = ''
to_read = newline_pos + 2
while to_read > 0:
chunk = self.read(to_read)
# Should never happen since we're reading from input_buffer,
# but just for completeness...
if not chunk:
break
to_read -= len(chunk)
ret += chunk
return ret
else: # no newlines, just return up to next boundary
return self.read(len(self.input_buffer))
def iter_multipart_mime_documents(wsgi_input, boundary, read_chunk_size=4096):
"""
Given a multi-part-mime-encoded input file object and boundary,
yield file-like objects for each part. Note that this does not
split each part into headers and body; the caller is responsible
for doing that if necessary.
:param wsgi_input: The file-like object to read from.
:param boundary: The mime boundary to separate new file-like
objects on.
:returns: A generator of file-like objects for each part.
:raises: MimeInvalid if the document is malformed
"""
boundary = '--' + boundary
blen = len(boundary) + 2 # \r\n
try:
got = wsgi_input.readline(blen)
while got == '\r\n':
got = wsgi_input.readline(blen)
except (IOError, ValueError) as e:
raise swift.common.exceptions.ChunkReadError(str(e))
if got.strip() != boundary:
raise swift.common.exceptions.MimeInvalid(
'invalid starting boundary: wanted %r, got %r', (boundary, got))
boundary = '\r\n' + boundary
input_buffer = ''
done = False
while not done:
it = _MultipartMimeFileLikeObject(wsgi_input, boundary, input_buffer,
read_chunk_size)
yield it
done = it.no_more_files
input_buffer = it.input_buffer
def parse_mime_headers(doc_file):
"""
Takes a file-like object containing a MIME document and returns a
HeaderKeyDict containing the headers. The body of the message is not
consumed: the position in doc_file is left at the beginning of the body.
This function was inspired by the Python standard library's
http.client.parse_headers.
:param doc_file: binary file-like object containing a MIME document
:returns: a swift.common.swob.HeaderKeyDict containing the headers
"""
from swift.common.swob import HeaderKeyDict # avoid circular import
headers = []
while True:
line = doc_file.readline()
headers.append(line)
if line in (b'\r\n', b'\n', b''):
break
header_string = b''.join(headers)
return HeaderKeyDict(email.parser.Parser().parsestr(header_string))
def mime_to_document_iters(input_file, boundary, read_chunk_size=4096):
"""
Takes a file-like object containing a multipart MIME document and
returns an iterator of (headers, body-file) tuples.
:param input_file: file-like object with the MIME doc in it
:param boundary: MIME boundary, sans dashes
(e.g. "divider", not "--divider")
:param read_chunk_size: size of strings read via input_file.read()
"""
doc_files = iter_multipart_mime_documents(input_file, boundary,
read_chunk_size)
for i, doc_file in enumerate(doc_files):
# this consumes the headers and leaves just the body in doc_file
headers = parse_mime_headers(doc_file)
yield (headers, doc_file)
def maybe_multipart_byteranges_to_document_iters(app_iter, content_type):
"""
Takes an iterator that may or may not contain a multipart MIME document
as well as content type and returns an iterator of body iterators.
:param app_iter: iterator that may contain a multipart MIME document
:param content_type: content type of the app_iter, used to determine
whether it conains a multipart document and, if
so, what the boundary is between documents
"""
content_type, params_list = parse_content_type(content_type)
if content_type != 'multipart/byteranges':
yield app_iter
return
body_file = FileLikeIter(app_iter)
boundary = dict(params_list)['boundary']
for _headers, body in mime_to_document_iters(body_file, boundary):
yield (chunk for chunk in iter(lambda: body.read(65536), ''))
def document_iters_to_multipart_byteranges(ranges_iter, boundary):
"""
Takes an iterator of range iters and yields a multipart/byteranges MIME
document suitable for sending as the body of a multi-range 206 response.
See document_iters_to_http_response_body for parameter descriptions.
"""
divider = "--" + boundary + "\r\n"
terminator = "--" + boundary + "--"
for range_spec in ranges_iter:
start_byte = range_spec["start_byte"]
end_byte = range_spec["end_byte"]
entity_length = range_spec.get("entity_length", "*")
content_type = range_spec["content_type"]
part_iter = range_spec["part_iter"]
part_header = ''.join((
divider,
"Content-Type: ", str(content_type), "\r\n",
"Content-Range: ", "bytes %d-%d/%s\r\n" % (
start_byte, end_byte, entity_length),
"\r\n"
))
yield part_header
for chunk in part_iter:
yield chunk
yield "\r\n"
yield terminator
def document_iters_to_http_response_body(ranges_iter, boundary, multipart,
logger):
"""
Takes an iterator of range iters and turns it into an appropriate
HTTP response body, whether that's multipart/byteranges or not.
This is almost, but not quite, the inverse of
http_response_to_document_iters(). This function only yields chunks of
the body, not any headers.
:param ranges_iter: an iterator of dictionaries, one per range.
Each dictionary must contain at least the following key:
"part_iter": iterator yielding the bytes in the range
Additionally, if multipart is True, then the following other keys
are required:
"start_byte": index of the first byte in the range
"end_byte": index of the last byte in the range
"content_type": value for the range's Content-Type header
Finally, there is one optional key that is used in the
multipart/byteranges case:
"entity_length": length of the requested entity (not necessarily
equal to the response length). If omitted, "*" will be used.
Each part_iter will be exhausted prior to calling next(ranges_iter).
:param boundary: MIME boundary to use, sans dashes (e.g. "boundary", not
"--boundary").
:param multipart: True if the response should be multipart/byteranges,
False otherwise. This should be True if and only if you have 2 or
more ranges.
:param logger: a logger
"""
if multipart:
return document_iters_to_multipart_byteranges(ranges_iter, boundary)
else:
try:
response_body_iter = next(ranges_iter)['part_iter']
except StopIteration:
return ''
# We need to make sure ranges_iter does not get garbage-collected
# before response_body_iter is exhausted. The reason is that
# ranges_iter has a finally block that calls close_swift_conn, and
# so if that finally block fires before we read response_body_iter,
# there's nothing there.
def string_along(useful_iter, useless_iter_iter, logger):
for x in useful_iter:
yield x
try:
next(useless_iter_iter)
except StopIteration:
pass
else:
logger.warn("More than one part in a single-part response?")
return string_along(response_body_iter, ranges_iter, logger)
def multipart_byteranges_to_document_iters(input_file, boundary,
read_chunk_size=4096):
"""
Takes a file-like object containing a multipart/byteranges MIME document
(see RFC 7233, Appendix A) and returns an iterator of (first-byte,
last-byte, length, document-headers, body-file) 5-tuples.
:param input_file: file-like object with the MIME doc in it
:param boundary: MIME boundary, sans dashes
(e.g. "divider", not "--divider")
:param read_chunk_size: size of strings read via input_file.read()
"""
for headers, body in mime_to_document_iters(input_file, boundary,
read_chunk_size):
first_byte, last_byte, length = parse_content_range(
headers.get('content-range'))
yield (first_byte, last_byte, length, headers.items(), body)
def http_response_to_document_iters(response, read_chunk_size=4096):
"""
Takes a successful object-GET HTTP response and turns it into an
iterator of (first-byte, last-byte, length, headers, body-file)
5-tuples.
The response must either be a 200 or a 206; if you feed in a 204 or
something similar, this probably won't work.
:param response: HTTP response, like from bufferedhttp.http_connect(),
not a swob.Response.
"""
if response.status == 200:
# Single "range" that's the whole object
content_length = int(response.getheader('Content-Length'))
return iter([(0, content_length - 1, content_length,
response.getheaders(), response)])
content_type, params_list = parse_content_type(
response.getheader('Content-Type'))
if content_type != 'multipart/byteranges':
# Single range; no MIME framing, just the bytes. The start and end
# byte indices are in the Content-Range header.
start, end, length = parse_content_range(
response.getheader('Content-Range'))
return iter([(start, end, length, response.getheaders(), response)])
else:
# Multiple ranges; the response body is a multipart/byteranges MIME
# document, and we have to parse it using the MIME boundary
# extracted from the Content-Type header.
params = dict(params_list)
return multipart_byteranges_to_document_iters(
response, params['boundary'], read_chunk_size)
#: Regular expression to match form attributes.
ATTRIBUTES_RE = re.compile(r'(\w+)=(".*?"|[^";]+)(; ?|$)')
def parse_content_disposition(header):
"""
Given the value of a header like:
Content-Disposition: form-data; name="somefile"; filename="test.html"
Return data like
("form-data", {"name": "somefile", "filename": "test.html"})
:param header: Value of a header (the part after the ': ').
:returns: (value name, dict) of the attribute data parsed (see above).
"""
attributes = {}
attrs = ''
if ';' in header:
header, attrs = [x.strip() for x in header.split(';', 1)]
m = True
while m:
m = ATTRIBUTES_RE.match(attrs)
if m:
attrs = attrs[len(m.group(0)):]
attributes[m.group(1)] = m.group(2).strip('"')
return header, attributes
class sockaddr_alg(ctypes.Structure):
_fields_ = [("salg_family", ctypes.c_ushort),
("salg_type", ctypes.c_ubyte * 14),
("salg_feat", ctypes.c_uint),
("salg_mask", ctypes.c_uint),
("salg_name", ctypes.c_ubyte * 64)]
_bound_md5_sockfd = None
def get_md5_socket():
"""
Get an MD5 socket file descriptor. One can MD5 data with it by writing it
to the socket with os.write, then os.read the 16 bytes of the checksum out
later.
NOTE: It is the caller's responsibility to ensure that os.close() is
called on the returned file descriptor. This is a bare file descriptor,
not a Python object. It doesn't close itself.
"""
# Linux's AF_ALG sockets work like this:
#
# First, initialize a socket with socket() and bind(). This tells the
# socket what algorithm to use, as well as setting up any necessary bits
# like crypto keys. Of course, MD5 doesn't need any keys, so it's just the
# algorithm name.
#
# Second, to hash some data, get a second socket by calling accept() on
# the first socket. Write data to the socket, then when finished, read the
# checksum from the socket and close it. This lets you checksum multiple
# things without repeating all the setup code each time.
#
# Since we only need to bind() one socket, we do that here and save it for
# future re-use. That way, we only use one file descriptor to get an MD5
# socket instead of two, and we also get to save some syscalls.
global _bound_md5_sockfd
global _libc_socket
global _libc_bind
global _libc_accept
if _libc_accept is None:
_libc_accept = load_libc_function('accept', fail_if_missing=True)
if _libc_socket is None:
_libc_socket = load_libc_function('socket', fail_if_missing=True)
if _libc_bind is None:
_libc_bind = load_libc_function('bind', fail_if_missing=True)
# Do this at first call rather than at import time so that we don't use a
# file descriptor on systems that aren't using any MD5 sockets.
if _bound_md5_sockfd is None:
sockaddr_setup = sockaddr_alg(
AF_ALG,
(ord('h'), ord('a'), ord('s'), ord('h'), 0),
0, 0,
(ord('m'), ord('d'), ord('5'), 0))
hash_sockfd = _libc_socket(ctypes.c_int(AF_ALG),
ctypes.c_int(socket.SOCK_SEQPACKET),
ctypes.c_int(0))
if hash_sockfd < 0:
raise IOError(ctypes.get_errno(),
"Failed to initialize MD5 socket")
bind_result = _libc_bind(ctypes.c_int(hash_sockfd),
ctypes.pointer(sockaddr_setup),
ctypes.c_int(ctypes.sizeof(sockaddr_alg)))
if bind_result < 0:
os.close(hash_sockfd)
raise IOError(ctypes.get_errno(), "Failed to bind MD5 socket")
_bound_md5_sockfd = hash_sockfd
md5_sockfd = _libc_accept(ctypes.c_int(_bound_md5_sockfd), None, 0)
if md5_sockfd < 0:
raise IOError(ctypes.get_errno(), "Failed to accept MD5 socket")
return md5_sockfd
| {
"content_hash": "3089f2bffd6759e969a29bdfdc79f01e",
"timestamp": "",
"source": "github",
"line_count": 3711,
"max_line_length": 79,
"avg_line_length": 34.65642683912692,
"alnum_prop": 0.5988492341186533,
"repo_name": "thiagodasilva/swift",
"id": "d6cc5d7afb4700ff218e30ff5845c33f7cb8c387",
"size": "129205",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "swift/common/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6810284"
},
{
"name": "Shell",
"bytes": "1452"
}
],
"symlink_target": ""
} |
"""Canonicalizes for loops into while loops.
This canonicalizer uses the len function on its argument. That should be
converted to a tf.shape separately.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.py2tf.pyct import anno
from tensorflow.contrib.py2tf.pyct import templates
from tensorflow.contrib.py2tf.pyct import transformer
from tensorflow.contrib.py2tf.pyct.static_analysis.annos import NodeAnno
class ForLoopCanonicalizationTransformer(transformer.Base):
"""Canonicalizes for loops (e.g. into while loops)."""
def __init__(self, context):
super(ForLoopCanonicalizationTransformer, self).__init__(context)
def visit_For(self, node):
self.generic_visit(node)
body_scope = anno.getanno(node, NodeAnno.BODY_SCOPE)
i_var = self.context.namer.new_symbol('i', body_scope.referenced)
n_var = self.context.namer.new_symbol('n', body_scope.referenced)
iterated_var = self.context.namer.new_symbol('iterated',
body_scope.referenced)
# TODO(mdan): Use TensorListFromTensor(loop_iter) here.
if anno.hasanno(node, 'extra_cond'):
template = """
i = 0
iterated = loop_iter
n = len(iterated)
while i < n and extra_cond:
target = iterated[i]
body
i += 1
"""
return templates.replace(
template,
loop_iter=node.iter,
target=node.target,
body=node.body,
i=i_var,
n=n_var,
iterated=iterated_var,
extra_cond=anno.getanno(node, 'extra_cond'))
else:
template = """
i = 0
iterated = loop_iter
n = len(iterated)
while i < n:
target = iterated[i]
body
i += 1
"""
repl = templates.replace(
template,
loop_iter=node.iter,
target=node.target,
body=node.body,
i=i_var,
n=n_var,
iterated=iterated_var)
return repl
def visit_Continue(self, node):
assert False, 'continue statement should be desugared at this point'
def visit_Break(self, node):
assert False, 'break statement should be desugared at this point'
def transform(node, context):
return ForLoopCanonicalizationTransformer(context).visit(node)
| {
"content_hash": "05bb3573b07eea21b128083714d5fda8",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 72,
"avg_line_length": 30.76923076923077,
"alnum_prop": 0.62625,
"repo_name": "Xeralux/tensorflow",
"id": "4297c1cf2a3632e097973280cc985fc48da64475",
"size": "3089",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/py2tf/converters/for_loops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9274"
},
{
"name": "C",
"bytes": "340972"
},
{
"name": "C++",
"bytes": "39479562"
},
{
"name": "CMake",
"bytes": "194702"
},
{
"name": "Go",
"bytes": "1046987"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "567239"
},
{
"name": "Jupyter Notebook",
"bytes": "1940883"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "48231"
},
{
"name": "Objective-C",
"bytes": "12456"
},
{
"name": "Objective-C++",
"bytes": "94385"
},
{
"name": "PHP",
"bytes": "2140"
},
{
"name": "Perl",
"bytes": "6179"
},
{
"name": "Perl 6",
"bytes": "1357"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "33675501"
},
{
"name": "Ruby",
"bytes": "533"
},
{
"name": "Shell",
"bytes": "425916"
}
],
"symlink_target": ""
} |
"""Calculates mold growth indication from temperature and humidity."""
import logging
import math
import voluptuous as vol
from homeassistant import util
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
CONF_NAME,
EVENT_HOMEASSISTANT_START,
PERCENTAGE,
STATE_UNKNOWN,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_state_change_event
_LOGGER = logging.getLogger(__name__)
ATTR_CRITICAL_TEMP = "estimated_critical_temp"
ATTR_DEWPOINT = "dewpoint"
CONF_CALIBRATION_FACTOR = "calibration_factor"
CONF_INDOOR_HUMIDITY = "indoor_humidity_sensor"
CONF_INDOOR_TEMP = "indoor_temp_sensor"
CONF_OUTDOOR_TEMP = "outdoor_temp_sensor"
DEFAULT_NAME = "Mold Indicator"
MAGNUS_K2 = 17.62
MAGNUS_K3 = 243.12
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_INDOOR_TEMP): cv.entity_id,
vol.Required(CONF_OUTDOOR_TEMP): cv.entity_id,
vol.Required(CONF_INDOOR_HUMIDITY): cv.entity_id,
vol.Optional(CONF_CALIBRATION_FACTOR): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up MoldIndicator sensor."""
name = config.get(CONF_NAME, DEFAULT_NAME)
indoor_temp_sensor = config.get(CONF_INDOOR_TEMP)
outdoor_temp_sensor = config.get(CONF_OUTDOOR_TEMP)
indoor_humidity_sensor = config.get(CONF_INDOOR_HUMIDITY)
calib_factor = config.get(CONF_CALIBRATION_FACTOR)
async_add_entities(
[
MoldIndicator(
name,
hass.config.units.is_metric,
indoor_temp_sensor,
outdoor_temp_sensor,
indoor_humidity_sensor,
calib_factor,
)
],
False,
)
class MoldIndicator(Entity):
"""Represents a MoldIndication sensor."""
def __init__(
self,
name,
is_metric,
indoor_temp_sensor,
outdoor_temp_sensor,
indoor_humidity_sensor,
calib_factor,
):
"""Initialize the sensor."""
self._state = None
self._name = name
self._indoor_temp_sensor = indoor_temp_sensor
self._indoor_humidity_sensor = indoor_humidity_sensor
self._outdoor_temp_sensor = outdoor_temp_sensor
self._calib_factor = calib_factor
self._is_metric = is_metric
self._available = False
self._entities = {
self._indoor_temp_sensor,
self._indoor_humidity_sensor,
self._outdoor_temp_sensor,
}
self._dewpoint = None
self._indoor_temp = None
self._outdoor_temp = None
self._indoor_hum = None
self._crit_temp = None
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def mold_indicator_sensors_state_listener(event):
"""Handle for state changes for dependent sensors."""
new_state = event.data.get("new_state")
old_state = event.data.get("old_state")
entity = event.data.get("entity_id")
_LOGGER.debug(
"Sensor state change for %s that had old state %s and new state %s",
entity,
old_state,
new_state,
)
if self._update_sensor(entity, old_state, new_state):
self.async_schedule_update_ha_state(True)
@callback
def mold_indicator_startup(event):
"""Add listeners and get 1st state."""
_LOGGER.debug("Startup for %s", self.entity_id)
async_track_state_change_event(
self.hass, list(self._entities), mold_indicator_sensors_state_listener
)
# Read initial state
indoor_temp = self.hass.states.get(self._indoor_temp_sensor)
outdoor_temp = self.hass.states.get(self._outdoor_temp_sensor)
indoor_hum = self.hass.states.get(self._indoor_humidity_sensor)
schedule_update = self._update_sensor(
self._indoor_temp_sensor, None, indoor_temp
)
schedule_update = (
False
if not self._update_sensor(
self._outdoor_temp_sensor, None, outdoor_temp
)
else schedule_update
)
schedule_update = (
False
if not self._update_sensor(
self._indoor_humidity_sensor, None, indoor_hum
)
else schedule_update
)
if schedule_update:
self.async_schedule_update_ha_state(True)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, mold_indicator_startup
)
def _update_sensor(self, entity, old_state, new_state):
"""Update information based on new sensor states."""
_LOGGER.debug("Sensor update for %s", entity)
if new_state is None:
return False
# If old_state is not set and new state is unknown then it means
# that the sensor just started up
if old_state is None and new_state.state == STATE_UNKNOWN:
return False
if entity == self._indoor_temp_sensor:
self._indoor_temp = MoldIndicator._update_temp_sensor(new_state)
elif entity == self._outdoor_temp_sensor:
self._outdoor_temp = MoldIndicator._update_temp_sensor(new_state)
elif entity == self._indoor_humidity_sensor:
self._indoor_hum = MoldIndicator._update_hum_sensor(new_state)
return True
@staticmethod
def _update_temp_sensor(state):
"""Parse temperature sensor value."""
_LOGGER.debug("Updating temp sensor with value %s", state.state)
# Return an error if the sensor change its state to Unknown.
if state.state == STATE_UNKNOWN:
_LOGGER.error(
"Unable to parse temperature sensor %s with state: %s",
state.entity_id,
state.state,
)
return None
unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
temp = util.convert(state.state, float)
if temp is None:
_LOGGER.error(
"Unable to parse temperature sensor %s with state: %s",
state.entity_id,
state.state,
)
return None
# convert to celsius if necessary
if unit == TEMP_FAHRENHEIT:
return util.temperature.fahrenheit_to_celsius(temp)
if unit == TEMP_CELSIUS:
return temp
_LOGGER.error(
"Temp sensor %s has unsupported unit: %s (allowed: %s, %s)",
state.entity_id,
unit,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
return None
@staticmethod
def _update_hum_sensor(state):
"""Parse humidity sensor value."""
_LOGGER.debug("Updating humidity sensor with value %s", state.state)
# Return an error if the sensor change its state to Unknown.
if state.state == STATE_UNKNOWN:
_LOGGER.error(
"Unable to parse humidity sensor %s, state: %s",
state.entity_id,
state.state,
)
return None
unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
hum = util.convert(state.state, float)
if hum is None:
_LOGGER.error(
"Unable to parse humidity sensor %s, state: %s",
state.entity_id,
state.state,
)
return None
if unit != PERCENTAGE:
_LOGGER.error(
"Humidity sensor %s has unsupported unit: %s %s",
state.entity_id,
unit,
" (allowed: %)",
)
return None
if hum > 100 or hum < 0:
_LOGGER.error(
"Humidity sensor %s is out of range: %s %s",
state.entity_id,
hum,
"(allowed: 0-100%)",
)
return None
return hum
async def async_update(self):
"""Calculate latest state."""
_LOGGER.debug("Update state for %s", self.entity_id)
# check all sensors
if None in (self._indoor_temp, self._indoor_hum, self._outdoor_temp):
self._available = False
self._dewpoint = None
self._crit_temp = None
return
# re-calculate dewpoint and mold indicator
self._calc_dewpoint()
self._calc_moldindicator()
if self._state is None:
self._available = False
self._dewpoint = None
self._crit_temp = None
else:
self._available = True
def _calc_dewpoint(self):
"""Calculate the dewpoint for the indoor air."""
# Use magnus approximation to calculate the dew point
alpha = MAGNUS_K2 * self._indoor_temp / (MAGNUS_K3 + self._indoor_temp)
beta = MAGNUS_K2 * MAGNUS_K3 / (MAGNUS_K3 + self._indoor_temp)
if self._indoor_hum == 0:
self._dewpoint = -50 # not defined, assume very low value
else:
self._dewpoint = (
MAGNUS_K3
* (alpha + math.log(self._indoor_hum / 100.0))
/ (beta - math.log(self._indoor_hum / 100.0))
)
_LOGGER.debug("Dewpoint: %f %s", self._dewpoint, TEMP_CELSIUS)
def _calc_moldindicator(self):
"""Calculate the humidity at the (cold) calibration point."""
if None in (self._dewpoint, self._calib_factor) or self._calib_factor == 0:
_LOGGER.debug(
"Invalid inputs - dewpoint: %s, calibration-factor: %s",
self._dewpoint,
self._calib_factor,
)
self._state = None
self._available = False
self._crit_temp = None
return
# first calculate the approximate temperature at the calibration point
self._crit_temp = (
self._outdoor_temp
+ (self._indoor_temp - self._outdoor_temp) / self._calib_factor
)
_LOGGER.debug(
"Estimated Critical Temperature: %f %s", self._crit_temp, TEMP_CELSIUS
)
# Then calculate the humidity at this point
alpha = MAGNUS_K2 * self._crit_temp / (MAGNUS_K3 + self._crit_temp)
beta = MAGNUS_K2 * MAGNUS_K3 / (MAGNUS_K3 + self._crit_temp)
crit_humidity = (
math.exp(
(self._dewpoint * beta - MAGNUS_K3 * alpha)
/ (self._dewpoint + MAGNUS_K3)
)
* 100.0
)
# check bounds and format
if crit_humidity > 100:
self._state = "100"
elif crit_humidity < 0:
self._state = "0"
else:
self._state = f"{int(crit_humidity):d}"
_LOGGER.debug("Mold indicator humidity: %s", self._state)
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def name(self):
"""Return the name."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return PERCENTAGE
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def available(self):
"""Return the availability of this sensor."""
return self._available
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._is_metric:
return {
ATTR_DEWPOINT: round(self._dewpoint, 2),
ATTR_CRITICAL_TEMP: round(self._crit_temp, 2),
}
dewpoint = (
util.temperature.celsius_to_fahrenheit(self._dewpoint)
if self._dewpoint is not None
else None
)
crit_temp = (
util.temperature.celsius_to_fahrenheit(self._crit_temp)
if self._crit_temp is not None
else None
)
return {
ATTR_DEWPOINT: round(dewpoint, 2),
ATTR_CRITICAL_TEMP: round(crit_temp, 2),
}
| {
"content_hash": "e9795df02cdf12cfe16d7e05d7e98704",
"timestamp": "",
"source": "github",
"line_count": 401,
"max_line_length": 86,
"avg_line_length": 31.591022443890274,
"alnum_prop": 0.5558888538048626,
"repo_name": "turbokongen/home-assistant",
"id": "e2d9909c7ca1d47c104e74d513e89684108b401b",
"size": "12668",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/mold_indicator/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "30405146"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_AimsQueueWidget(object):
def setupUi(self, AimsQueueWidget):
AimsQueueWidget.setObjectName(_fromUtf8("AimsQueueWidget"))
AimsQueueWidget.resize(661, 428)
self.verticalLayout = QtGui.QVBoxLayout(AimsQueueWidget)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.tabWidget = QtGui.QTabWidget(AimsQueueWidget)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.uEditFeatureTab = EditFeatureWidget()
self.uEditFeatureTab.setObjectName(_fromUtf8("uEditFeatureTab"))
self.tabWidget.addTab(self.uEditFeatureTab, _fromUtf8(""))
self.uResolutionTab = ReviewQueueWidget()
self.uResolutionTab.setObjectName(_fromUtf8("uResolutionTab"))
self.tabWidget.addTab(self.uResolutionTab, _fromUtf8(""))
self.verticalLayout.addWidget(self.tabWidget)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.uMessageLabel = QtGui.QLabel(AimsQueueWidget)
self.uMessageLabel.setText(_fromUtf8(""))
self.uMessageLabel.setObjectName(_fromUtf8("uMessageLabel"))
self.horizontalLayout_5.addWidget(self.uMessageLabel)
self.horizontalLayout_5.setStretch(0, 1)
self.verticalLayout.addLayout(self.horizontalLayout_5)
self.retranslateUi(AimsQueueWidget)
self.tabWidget.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(AimsQueueWidget)
def retranslateUi(self, AimsQueueWidget):
AimsQueueWidget.setWindowTitle(_translate("AimsQueueWidget", "Form", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.uEditFeatureTab), _translate("AimsQueueWidget", "Edit Feature", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.uResolutionTab), _translate("AimsQueueWidget", "Review", None))
from ReviewQueueWidget import ReviewQueueWidget
from EditFeatureWidget import EditFeatureWidget
| {
"content_hash": "0b7afd2018ba92f53fda525bc6b18331",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 132,
"avg_line_length": 48.01960784313726,
"alnum_prop": 0.735810534912209,
"repo_name": "linz/QGIS-AIMS-Plugin",
"id": "4b83db545fea9cede0b7bf4c5c58c7b2ce6295f4",
"size": "2711",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "AimsUI/AimsClient/Gui/Ui_AimsQueueWidget.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1572"
},
{
"name": "Python",
"bytes": "594004"
},
{
"name": "QML",
"bytes": "112051"
}
],
"symlink_target": ""
} |
import logging
from six.moves.urllib import parse as urlparse
import testtools
from tempest.api.compute import base
from tempest.common import compute
from tempest.common.utils import data_utils
from tempest.common.utils.linux import remote_client
from tempest.common import waiters
from tempest import config
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class ServerActionsTestJSON(base.BaseV2ComputeTest):
run_ssh = CONF.validation.run_validation
def setUp(self):
# NOTE(afazekas): Normally we use the same server with all test cases,
# but if it has an issue, we build a new one
super(ServerActionsTestJSON, self).setUp()
# Check if the server is in a clean state after test
try:
waiters.wait_for_server_status(self.client,
self.server_id, 'ACTIVE')
except lib_exc.NotFound:
# The server was deleted by previous test, create a new one
server = self.create_test_server(
validatable=True,
wait_until='ACTIVE')
self.__class__.server_id = server['id']
except Exception:
# Rebuild server if something happened to it during a test
self.__class__.server_id = self.rebuild_server(
self.server_id, validatable=True)
def tearDown(self):
self.server_check_teardown()
super(ServerActionsTestJSON, self).tearDown()
@classmethod
def setup_credentials(cls):
cls.prepare_instance_network()
super(ServerActionsTestJSON, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(ServerActionsTestJSON, cls).setup_clients()
cls.client = cls.servers_client
@classmethod
def resource_setup(cls):
cls.set_validation_resources()
super(ServerActionsTestJSON, cls).resource_setup()
cls.server_id = cls.rebuild_server(None, validatable=True)
@test.idempotent_id('6158df09-4b82-4ab3-af6d-29cf36af858d')
@testtools.skipUnless(CONF.compute_feature_enabled.change_password,
'Change password not available.')
def test_change_server_password(self):
# Since this test messes with the password and makes the
# server unreachable, it should create its own server
newserver = self.create_test_server(
validatable=True,
wait_until='ACTIVE')
# The server's password should be set to the provided password
new_password = 'Newpass1234'
self.client.change_password(newserver['id'], adminPass=new_password)
waiters.wait_for_server_status(self.client, newserver['id'], 'ACTIVE')
if CONF.validation.run_validation:
# Verify that the user can authenticate with the new password
server = self.client.show_server(newserver['id'])['server']
linux_client = remote_client.RemoteClient(
self.get_server_ip(server),
self.ssh_user,
new_password,
server=server,
servers_client=self.client)
linux_client.validate_authentication()
def _test_reboot_server(self, reboot_type):
if CONF.validation.run_validation:
# Get the time the server was last rebooted,
server = self.client.show_server(self.server_id)['server']
linux_client = remote_client.RemoteClient(
self.get_server_ip(server),
self.ssh_user,
self.password,
self.validation_resources['keypair']['private_key'],
server=server,
servers_client=self.client)
boot_time = linux_client.get_boot_time()
# NOTE: This sync is for avoiding the loss of pub key data
# in a server
linux_client.exec_command("sync")
self.client.reboot_server(self.server_id, type=reboot_type)
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
if CONF.validation.run_validation:
# Log in and verify the boot time has changed
linux_client = remote_client.RemoteClient(
self.get_server_ip(server),
self.ssh_user,
self.password,
self.validation_resources['keypair']['private_key'],
server=server,
servers_client=self.client)
new_boot_time = linux_client.get_boot_time()
self.assertTrue(new_boot_time > boot_time,
'%s > %s' % (new_boot_time, boot_time))
@test.attr(type='smoke')
@test.idempotent_id('2cb1baf6-ac8d-4429-bf0d-ba8a0ba53e32')
def test_reboot_server_hard(self):
# The server should be power cycled
self._test_reboot_server('HARD')
@decorators.skip_because(bug="1014647")
@test.idempotent_id('4640e3ef-a5df-482e-95a1-ceeeb0faa84d')
def test_reboot_server_soft(self):
# The server should be signaled to reboot gracefully
self._test_reboot_server('SOFT')
def _rebuild_server_and_check(self, image_ref):
rebuilt_server = (self.client.rebuild_server(self.server_id, image_ref)
['server'])
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
msg = ('Server was not rebuilt to the original image. '
'The original image: {0}. The current image: {1}'
.format(image_ref, rebuilt_server['image']['id']))
self.assertEqual(image_ref, rebuilt_server['image']['id'], msg)
@test.idempotent_id('aaa6cdf3-55a7-461a-add9-1c8596b9a07c')
def test_rebuild_server(self):
# The server should be rebuilt using the provided image and data
meta = {'rebuild': 'server'}
new_name = data_utils.rand_name(self.__class__.__name__ + '-server')
password = 'rebuildPassw0rd'
rebuilt_server = self.client.rebuild_server(
self.server_id,
self.image_ref_alt,
name=new_name,
metadata=meta,
adminPass=password)['server']
# If the server was rebuilt on a different image, restore it to the
# original image once the test ends
if self.image_ref_alt != self.image_ref:
self.addCleanup(self._rebuild_server_and_check, self.image_ref)
# Verify the properties in the initial response are correct
self.assertEqual(self.server_id, rebuilt_server['id'])
rebuilt_image_id = rebuilt_server['image']['id']
self.assertTrue(self.image_ref_alt.endswith(rebuilt_image_id))
self.assertEqual(self.flavor_ref, rebuilt_server['flavor']['id'])
# Verify the server properties after the rebuild completes
waiters.wait_for_server_status(self.client,
rebuilt_server['id'], 'ACTIVE')
server = self.client.show_server(rebuilt_server['id'])['server']
rebuilt_image_id = server['image']['id']
self.assertTrue(self.image_ref_alt.endswith(rebuilt_image_id))
self.assertEqual(new_name, server['name'])
if CONF.validation.run_validation:
# Authentication is attempted in the following order of priority:
# 1.The key passed in, if one was passed in.
# 2.Any key we can find through an SSH agent (if allowed).
# 3.Any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in
# ~/.ssh/ (if allowed).
# 4.Plain username/password auth, if a password was given.
linux_client = remote_client.RemoteClient(
self.get_server_ip(rebuilt_server),
self.ssh_user,
password,
self.validation_resources['keypair']['private_key'],
server=rebuilt_server,
servers_client=self.client)
linux_client.validate_authentication()
@test.idempotent_id('30449a88-5aff-4f9b-9866-6ee9b17f906d')
def test_rebuild_server_in_stop_state(self):
# The server in stop state should be rebuilt using the provided
# image and remain in SHUTOFF state
server = self.client.show_server(self.server_id)['server']
old_image = server['image']['id']
new_image = (self.image_ref_alt
if old_image == self.image_ref else self.image_ref)
self.client.stop_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'SHUTOFF')
rebuilt_server = (self.client.rebuild_server(self.server_id, new_image)
['server'])
# If the server was rebuilt on a different image, restore it to the
# original image once the test ends
if self.image_ref_alt != self.image_ref:
self.addCleanup(self._rebuild_server_and_check, old_image)
# Verify the properties in the initial response are correct
self.assertEqual(self.server_id, rebuilt_server['id'])
rebuilt_image_id = rebuilt_server['image']['id']
self.assertEqual(new_image, rebuilt_image_id)
self.assertEqual(self.flavor_ref, rebuilt_server['flavor']['id'])
# Verify the server properties after the rebuild completes
waiters.wait_for_server_status(self.client,
rebuilt_server['id'], 'SHUTOFF')
server = self.client.show_server(rebuilt_server['id'])['server']
rebuilt_image_id = server['image']['id']
self.assertEqual(new_image, rebuilt_image_id)
self.client.start_server(self.server_id)
@test.idempotent_id('b68bd8d6-855d-4212-b59b-2e704044dace')
@test.services('volume')
def test_rebuild_server_with_volume_attached(self):
# create a new volume and attach it to the server
volume = self.create_volume()
server = self.client.show_server(self.server_id)['server']
self.attach_volume(server, volume)
# run general rebuild test
self.test_rebuild_server()
# make sure the volume is attached to the instance after rebuild
vol_after_rebuild = self.volumes_client.show_volume(volume['id'])
vol_after_rebuild = vol_after_rebuild['volume']
self.assertEqual('in-use', vol_after_rebuild['status'])
self.assertEqual(self.server_id,
vol_after_rebuild['attachments'][0]['server_id'])
def _test_resize_server_confirm(self, stop=False):
# The server's RAM and disk space should be modified to that of
# the provided flavor
if stop:
self.client.stop_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id,
'SHUTOFF')
self.client.resize_server(self.server_id, self.flavor_ref_alt)
# NOTE(jlk): Explicitly delete the server to get a new one for later
# tests. Avoids resize down race issues.
self.addCleanup(self.delete_server, self.server_id)
waiters.wait_for_server_status(self.client, self.server_id,
'VERIFY_RESIZE')
self.client.confirm_resize_server(self.server_id)
expected_status = 'SHUTOFF' if stop else 'ACTIVE'
waiters.wait_for_server_status(self.client, self.server_id,
expected_status)
server = self.client.show_server(self.server_id)['server']
self.assertEqual(self.flavor_ref_alt, server['flavor']['id'])
if stop:
# NOTE(mriedem): tearDown requires the server to be started.
self.client.start_server(self.server_id)
@test.idempotent_id('1499262a-9328-4eda-9068-db1ac57498d2')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
def test_resize_server_confirm(self):
self._test_resize_server_confirm(stop=False)
@test.idempotent_id('138b131d-66df-48c9-a171-64f45eb92962')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
def test_resize_server_confirm_from_stopped(self):
self._test_resize_server_confirm(stop=True)
@test.idempotent_id('c03aab19-adb1-44f5-917d-c419577e9e68')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
def test_resize_server_revert(self):
# The server's RAM and disk space should return to its original
# values after a resize is reverted
self.client.resize_server(self.server_id, self.flavor_ref_alt)
# NOTE(zhufl): Explicitly delete the server to get a new one for later
# tests. Avoids resize down race issues.
self.addCleanup(self.delete_server, self.server_id)
waiters.wait_for_server_status(self.client, self.server_id,
'VERIFY_RESIZE')
self.client.revert_resize_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
server = self.client.show_server(self.server_id)['server']
self.assertEqual(self.flavor_ref, server['flavor']['id'])
@test.idempotent_id('b963d4f1-94b3-4c40-9e97-7b583f46e470')
@testtools.skipUnless(CONF.compute_feature_enabled.snapshot,
'Snapshotting not available, backup not possible.')
@test.services('image')
def test_create_backup(self):
# Positive test:create backup successfully and rotate backups correctly
# create the first and the second backup
# Check if glance v1 is available to determine which client to use. We
# prefer glance v1 for the compute API tests since the compute image
# API proxy was written for glance v1.
if CONF.image_feature_enabled.api_v1:
glance_client = self.os.image_client
elif CONF.image_feature_enabled.api_v2:
glance_client = self.os.image_client_v2
else:
raise lib_exc.InvalidConfiguration(
'Either api_v1 or api_v2 must be True in '
'[image-feature-enabled].')
backup1 = data_utils.rand_name('backup-1')
resp = self.client.create_backup(self.server_id,
backup_type='daily',
rotation=2,
name=backup1).response
oldest_backup_exist = True
# the oldest one should be deleted automatically in this test
def _clean_oldest_backup(oldest_backup):
if oldest_backup_exist:
try:
glance_client.delete_image(oldest_backup)
except lib_exc.NotFound:
pass
else:
LOG.warning("Deletion of oldest backup %s should not have "
"been successful as it should have been "
"deleted during rotation." % oldest_backup)
image1_id = data_utils.parse_image_id(resp['location'])
self.addCleanup(_clean_oldest_backup, image1_id)
waiters.wait_for_image_status(glance_client,
image1_id, 'active')
backup2 = data_utils.rand_name('backup-2')
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
resp = self.client.create_backup(self.server_id,
backup_type='daily',
rotation=2,
name=backup2).response
image2_id = data_utils.parse_image_id(resp['location'])
self.addCleanup(glance_client.delete_image, image2_id)
waiters.wait_for_image_status(glance_client,
image2_id, 'active')
# verify they have been created
properties = {
'image_type': 'backup',
'backup_type': "daily",
'instance_uuid': self.server_id,
}
params = {
'status': 'active',
'sort_key': 'created_at',
'sort_dir': 'asc'
}
if CONF.image_feature_enabled.api_v1:
for key, value in properties.items():
params['property-%s' % key] = value
image_list = glance_client.list_images(
detail=True,
**params)['images']
else:
# Additional properties are flattened in glance v2.
params.update(properties)
image_list = glance_client.list_images(params)['images']
self.assertEqual(2, len(image_list))
self.assertEqual((backup1, backup2),
(image_list[0]['name'], image_list[1]['name']))
# create the third one, due to the rotation is 2,
# the first one will be deleted
backup3 = data_utils.rand_name('backup-3')
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
resp = self.client.create_backup(self.server_id,
backup_type='daily',
rotation=2,
name=backup3).response
image3_id = data_utils.parse_image_id(resp['location'])
self.addCleanup(glance_client.delete_image, image3_id)
# the first back up should be deleted
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
glance_client.wait_for_resource_deletion(image1_id)
oldest_backup_exist = False
if CONF.image_feature_enabled.api_v1:
image_list = glance_client.list_images(
detail=True, **params)['images']
else:
image_list = glance_client.list_images(params)['images']
self.assertEqual(2, len(image_list),
'Unexpected number of images for '
'v2:test_create_backup; was the oldest backup not '
'yet deleted? Image list: %s' %
[image['name'] for image in image_list])
self.assertEqual((backup2, backup3),
(image_list[0]['name'], image_list[1]['name']))
def _get_output(self):
output = self.client.get_console_output(
self.server_id, length=10)['output']
self.assertTrue(output, "Console output was empty.")
lines = len(output.split('\n'))
self.assertEqual(lines, 10)
@test.idempotent_id('4b8867e6-fffa-4d54-b1d1-6fdda57be2f3')
@testtools.skipUnless(CONF.compute_feature_enabled.console_output,
'Console output not supported.')
def test_get_console_output(self):
# Positive test:Should be able to GET the console output
# for a given server_id and number of lines
# This reboot is necessary for outputting some console log after
# creating an instance backup. If an instance backup, the console
# log file is truncated and we cannot get any console log through
# "console-log" API.
# The detail is https://bugs.launchpad.net/nova/+bug/1251920
self.client.reboot_server(self.server_id, type='HARD')
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
self.wait_for(self._get_output)
@test.idempotent_id('89104062-69d8-4b19-a71b-f47b7af093d7')
@testtools.skipUnless(CONF.compute_feature_enabled.console_output,
'Console output not supported.')
def test_get_console_output_with_unlimited_size(self):
server = self.create_test_server(wait_until='ACTIVE')
def _check_full_length_console_log():
output = self.client.get_console_output(server['id'])['output']
self.assertTrue(output, "Console output was empty.")
lines = len(output.split('\n'))
# NOTE: This test tries to get full length console log, and the
# length should be bigger than the one of test_get_console_output.
self.assertTrue(lines > 10, "Cannot get enough console log length."
" (lines: %s)" % lines)
self.wait_for(_check_full_length_console_log)
@test.idempotent_id('5b65d4e7-4ecd-437c-83c0-d6b79d927568')
@testtools.skipUnless(CONF.compute_feature_enabled.console_output,
'Console output not supported.')
def test_get_console_output_server_id_in_shutoff_status(self):
# Positive test:Should be able to GET the console output
# for a given server_id in SHUTOFF status
# NOTE: SHUTOFF is irregular status. To avoid test instability,
# one server is created only for this test without using
# the server that was created in setupClass.
server = self.create_test_server(wait_until='ACTIVE')
temp_server_id = server['id']
self.client.stop_server(temp_server_id)
waiters.wait_for_server_status(self.client, temp_server_id, 'SHUTOFF')
self.wait_for(self._get_output)
@test.idempotent_id('bd61a9fd-062f-4670-972b-2d6c3e3b9e73')
@testtools.skipUnless(CONF.compute_feature_enabled.pause,
'Pause is not available.')
def test_pause_unpause_server(self):
self.client.pause_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'PAUSED')
self.client.unpause_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
@test.idempotent_id('0d8ee21e-b749-462d-83da-b85b41c86c7f')
@testtools.skipUnless(CONF.compute_feature_enabled.suspend,
'Suspend is not available.')
def test_suspend_resume_server(self):
self.client.suspend_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id,
'SUSPENDED')
self.client.resume_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
@test.idempotent_id('77eba8e0-036e-4635-944b-f7a8f3b78dc9')
@testtools.skipUnless(CONF.compute_feature_enabled.shelve,
'Shelve is not available.')
def test_shelve_unshelve_server(self):
compute.shelve_server(self.client, self.server_id,
force_shelve_offload=True)
server = self.client.show_server(self.server_id)['server']
image_name = server['name'] + '-shelved'
params = {'name': image_name}
images = self.compute_images_client.list_images(**params)['images']
self.assertEqual(1, len(images))
self.assertEqual(image_name, images[0]['name'])
self.client.unshelve_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
@test.idempotent_id('af8eafd4-38a7-4a4b-bdbc-75145a580560')
def test_stop_start_server(self):
self.client.stop_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'SHUTOFF')
self.client.start_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
@test.idempotent_id('80a8094c-211e-440a-ab88-9e59d556c7ee')
def test_lock_unlock_server(self):
# Lock the server,try server stop(exceptions throw),unlock it and retry
self.client.lock_server(self.server_id)
self.addCleanup(self.client.unlock_server, self.server_id)
server = self.client.show_server(self.server_id)['server']
self.assertEqual(server['status'], 'ACTIVE')
# Locked server is not allowed to be stopped by non-admin user
self.assertRaises(lib_exc.Conflict,
self.client.stop_server, self.server_id)
self.client.unlock_server(self.server_id)
self.client.stop_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'SHUTOFF')
self.client.start_server(self.server_id)
waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
def _validate_url(self, url):
valid_scheme = ['http', 'https']
parsed_url = urlparse.urlparse(url)
self.assertNotEqual('None', parsed_url.port)
self.assertNotEqual('None', parsed_url.hostname)
self.assertIn(parsed_url.scheme, valid_scheme)
@test.idempotent_id('c6bc11bf-592e-4015-9319-1c98dc64daf5')
@testtools.skipUnless(CONF.compute_feature_enabled.vnc_console,
'VNC Console feature is disabled.')
def test_get_vnc_console(self):
# Get the VNC console of type 'novnc' and 'xvpvnc'
console_types = ['novnc', 'xvpvnc']
for console_type in console_types:
body = self.client.get_vnc_console(self.server_id,
type=console_type)['console']
self.assertEqual(console_type, body['type'])
self.assertNotEqual('', body['url'])
self._validate_url(body['url'])
| {
"content_hash": "5f0d39a4518e1d2c3443da3f6d55af14",
"timestamp": "",
"source": "github",
"line_count": 545,
"max_line_length": 79,
"avg_line_length": 46.4954128440367,
"alnum_prop": 0.6098263614838201,
"repo_name": "sebrandon1/tempest",
"id": "9077801ea9b2f6aca80feeae21a6f076972716a1",
"size": "25976",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tempest/api/compute/servers/test_server_actions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3618834"
},
{
"name": "Shell",
"bytes": "9310"
}
],
"symlink_target": ""
} |
"""Volume-related Utilities and helpers."""
import ast
import math
import re
import time
import uuid
from Crypto.Random import random
import eventlet
from eventlet import tpool
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import strutils
from oslo_utils import timeutils
from oslo_utils import units
import six
from six.moves import range
from cinder.brick.local_dev import lvm as brick_lvm
from cinder import context
from cinder import db
from cinder import exception
from cinder.i18n import _, _LI, _LW, _LE
from cinder import objects
from cinder import rpc
from cinder import utils
from cinder.volume import throttling
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def null_safe_str(s):
return str(s) if s else ''
def _usage_from_volume(context, volume_ref, **kw):
now = timeutils.utcnow()
launched_at = volume_ref['launched_at'] or now
created_at = volume_ref['created_at'] or now
usage_info = dict(
tenant_id=volume_ref['project_id'],
host=volume_ref['host'],
user_id=volume_ref['user_id'],
availability_zone=volume_ref['availability_zone'],
volume_id=volume_ref['id'],
volume_type=volume_ref['volume_type_id'],
display_name=volume_ref['display_name'],
launched_at=launched_at.isoformat(),
created_at=created_at.isoformat(),
status=volume_ref['status'],
snapshot_id=volume_ref['snapshot_id'],
size=volume_ref['size'],
replication_status=volume_ref['replication_status'],
replication_extended_status=volume_ref['replication_extended_status'],
replication_driver_data=volume_ref['replication_driver_data'],
metadata=volume_ref.get('volume_metadata'),)
usage_info.update(kw)
try:
attachments = db.volume_attachment_get_all_by_volume_id(
context, volume_ref['id'])
usage_info['volume_attachment'] = attachments
glance_meta = db.volume_glance_metadata_get(context, volume_ref['id'])
if glance_meta:
usage_info['glance_metadata'] = glance_meta
except exception.GlanceMetadataNotFound:
pass
except exception.VolumeNotFound:
LOG.debug("Can not find volume %s at notify usage", volume_ref['id'])
return usage_info
def _usage_from_backup(backup, **kw):
num_dependent_backups = backup.num_dependent_backups
usage_info = dict(tenant_id=backup.project_id,
user_id=backup.user_id,
availability_zone=backup.availability_zone,
backup_id=backup.id,
host=backup.host,
display_name=backup.display_name,
created_at=str(backup.created_at),
status=backup.status,
volume_id=backup.volume_id,
size=backup.size,
service_metadata=backup.service_metadata,
service=backup.service,
fail_reason=backup.fail_reason,
parent_id=backup.parent_id,
num_dependent_backups=num_dependent_backups,
snapshot_id=backup.snapshot_id,
)
usage_info.update(kw)
return usage_info
def notify_about_volume_usage(context, volume, event_suffix,
extra_usage_info=None, host=None):
if not host:
host = CONF.host
if not extra_usage_info:
extra_usage_info = {}
usage_info = _usage_from_volume(context, volume, **extra_usage_info)
rpc.get_notifier("volume", host).info(context, 'volume.%s' % event_suffix,
usage_info)
def notify_about_backup_usage(context, backup, event_suffix,
extra_usage_info=None,
host=None):
if not host:
host = CONF.host
if not extra_usage_info:
extra_usage_info = {}
usage_info = _usage_from_backup(backup, **extra_usage_info)
rpc.get_notifier("backup", host).info(context, 'backup.%s' % event_suffix,
usage_info)
def _usage_from_snapshot(snapshot, **extra_usage_info):
usage_info = {
'tenant_id': snapshot.project_id,
'user_id': snapshot.user_id,
'availability_zone': snapshot.volume['availability_zone'],
'volume_id': snapshot.volume_id,
'volume_size': snapshot.volume_size,
'snapshot_id': snapshot.id,
'display_name': snapshot.display_name,
'created_at': str(snapshot.created_at),
'status': snapshot.status,
'deleted': null_safe_str(snapshot.deleted),
'metadata': null_safe_str(snapshot.metadata),
}
usage_info.update(extra_usage_info)
return usage_info
def notify_about_snapshot_usage(context, snapshot, event_suffix,
extra_usage_info=None, host=None):
if not host:
host = CONF.host
if not extra_usage_info:
extra_usage_info = {}
usage_info = _usage_from_snapshot(snapshot, **extra_usage_info)
rpc.get_notifier('snapshot', host).info(context,
'snapshot.%s' % event_suffix,
usage_info)
def notify_about_replication_usage(context, volume, suffix,
extra_usage_info=None, host=None):
if not host:
host = CONF.host
if not extra_usage_info:
extra_usage_info = {}
usage_info = _usage_from_volume(context, volume,
**extra_usage_info)
rpc.get_notifier('replication', host).info(context,
'replication.%s' % suffix,
usage_info)
def notify_about_replication_error(context, volume, suffix,
extra_error_info=None, host=None):
if not host:
host = CONF.host
if not extra_error_info:
extra_error_info = {}
usage_info = _usage_from_volume(context, volume,
**extra_error_info)
rpc.get_notifier('replication', host).error(context,
'replication.%s' % suffix,
usage_info)
def _usage_from_consistencygroup(group_ref, **kw):
usage_info = dict(tenant_id=group_ref.project_id,
user_id=group_ref.user_id,
availability_zone=group_ref.availability_zone,
consistencygroup_id=group_ref.id,
name=group_ref.name,
created_at=group_ref.created_at.isoformat(),
status=group_ref.status)
usage_info.update(kw)
return usage_info
def notify_about_consistencygroup_usage(context, group, event_suffix,
extra_usage_info=None, host=None):
if not host:
host = CONF.host
if not extra_usage_info:
extra_usage_info = {}
usage_info = _usage_from_consistencygroup(group,
**extra_usage_info)
rpc.get_notifier("consistencygroup", host).info(
context,
'consistencygroup.%s' % event_suffix,
usage_info)
def _usage_from_cgsnapshot(cgsnapshot, **kw):
usage_info = dict(
tenant_id=cgsnapshot.project_id,
user_id=cgsnapshot.user_id,
cgsnapshot_id=cgsnapshot.id,
name=cgsnapshot.name,
consistencygroup_id=cgsnapshot.consistencygroup_id,
created_at=cgsnapshot.created_at.isoformat(),
status=cgsnapshot.status)
usage_info.update(kw)
return usage_info
def notify_about_cgsnapshot_usage(context, cgsnapshot, event_suffix,
extra_usage_info=None, host=None):
if not host:
host = CONF.host
if not extra_usage_info:
extra_usage_info = {}
usage_info = _usage_from_cgsnapshot(cgsnapshot,
**extra_usage_info)
rpc.get_notifier("cgsnapshot", host).info(
context,
'cgsnapshot.%s' % event_suffix,
usage_info)
def _calculate_count(size_in_m, blocksize):
# Check if volume_dd_blocksize is valid
try:
# Rule out zero-sized/negative/float dd blocksize which
# cannot be caught by strutils
if blocksize.startswith(('-', '0')) or '.' in blocksize:
raise ValueError
bs = strutils.string_to_bytes('%sB' % blocksize)
except ValueError:
LOG.warning(_LW("Incorrect value error: %(blocksize)s, "
"it may indicate that \'volume_dd_blocksize\' "
"was configured incorrectly. Fall back to default."),
{'blocksize': blocksize})
# Fall back to default blocksize
CONF.clear_override('volume_dd_blocksize')
blocksize = CONF.volume_dd_blocksize
bs = strutils.string_to_bytes('%sB' % blocksize)
count = math.ceil(size_in_m * units.Mi / bs)
return blocksize, int(count)
def check_for_odirect_support(src, dest, flag='oflag=direct'):
# Check whether O_DIRECT is supported
try:
# iflag=direct and if=/dev/zero combination does not work
# error: dd: failed to open '/dev/zero': Invalid argument
if (src == '/dev/zero' and flag == 'iflag=direct'):
return False
else:
utils.execute('dd', 'count=0', 'if=%s' % src,
'of=%s' % dest,
flag, run_as_root=True)
return True
except processutils.ProcessExecutionError:
return False
def _copy_volume_with_path(prefix, srcstr, deststr, size_in_m, blocksize,
sync=False, execute=utils.execute, ionice=None,
sparse=False):
# Use O_DIRECT to avoid thrashing the system buffer cache
extra_flags = []
if check_for_odirect_support(srcstr, deststr, 'iflag=direct'):
extra_flags.append('iflag=direct')
if check_for_odirect_support(srcstr, deststr, 'oflag=direct'):
extra_flags.append('oflag=direct')
# If the volume is being unprovisioned then
# request the data is persisted before returning,
# so that it's not discarded from the cache.
conv = []
if sync and not extra_flags:
conv.append('fdatasync')
if sparse:
conv.append('sparse')
if conv:
conv_options = 'conv=' + ",".join(conv)
extra_flags.append(conv_options)
blocksize, count = _calculate_count(size_in_m, blocksize)
cmd = ['dd', 'if=%s' % srcstr, 'of=%s' % deststr,
'count=%d' % count, 'bs=%s' % blocksize]
cmd.extend(extra_flags)
if ionice is not None:
cmd = ['ionice', ionice] + cmd
cmd = prefix + cmd
# Perform the copy
start_time = timeutils.utcnow()
execute(*cmd, run_as_root=True)
duration = timeutils.delta_seconds(start_time, timeutils.utcnow())
# NOTE(jdg): use a default of 1, mostly for unit test, but in
# some incredible event this is 0 (cirros image?) don't barf
if duration < 1:
duration = 1
mbps = (size_in_m / duration)
LOG.debug("Volume copy details: src %(src)s, dest %(dest)s, "
"size %(sz).2f MB, duration %(duration).2f sec",
{"src": srcstr,
"dest": deststr,
"sz": size_in_m,
"duration": duration})
LOG.info(_LI("Volume copy %(size_in_m).2f MB at %(mbps).2f MB/s"),
{'size_in_m': size_in_m, 'mbps': mbps})
def _open_volume_with_path(path, mode):
try:
with utils.temporary_chown(path):
handle = open(path, mode)
return handle
except Exception:
LOG.error(_LE("Failed to open volume from %(path)s."), {'path': path})
def _transfer_data(src, dest, length, chunk_size):
"""Transfer data between files (Python IO objects)."""
chunks = int(math.ceil(length / chunk_size))
remaining_length = length
LOG.debug("%(chunks)s chunks of %(bytes)s bytes to be transferred.",
{'chunks': chunks, 'bytes': chunk_size})
for chunk in range(0, chunks):
before = time.time()
data = tpool.execute(src.read, min(chunk_size, remaining_length))
# If we have reached end of source, discard any extraneous bytes from
# destination volume if trim is enabled and stop writing.
if data == b'':
break
tpool.execute(dest.write, data)
remaining_length -= len(data)
delta = (time.time() - before)
rate = (chunk_size / delta) / units.Ki
LOG.debug("Transferred chunk %(chunk)s of %(chunks)s (%(rate)dK/s).",
{'chunk': chunk + 1, 'chunks': chunks, 'rate': rate})
# yield to any other pending operations
eventlet.sleep(0)
tpool.execute(dest.flush)
def _copy_volume_with_file(src, dest, size_in_m):
src_handle = src
if isinstance(src, six.string_types):
src_handle = _open_volume_with_path(src, 'rb')
dest_handle = dest
if isinstance(dest, six.string_types):
dest_handle = _open_volume_with_path(dest, 'wb')
if not src_handle:
raise exception.DeviceUnavailable(
_("Failed to copy volume, source device unavailable."))
if not dest_handle:
raise exception.DeviceUnavailable(
_("Failed to copy volume, destination device unavailable."))
start_time = timeutils.utcnow()
_transfer_data(src_handle, dest_handle, size_in_m * units.Mi, units.Mi * 4)
duration = max(1, timeutils.delta_seconds(start_time, timeutils.utcnow()))
if isinstance(src, six.string_types):
src_handle.close()
if isinstance(dest, six.string_types):
dest_handle.close()
mbps = (size_in_m / duration)
LOG.info(_LI("Volume copy completed (%(size_in_m).2f MB at "
"%(mbps).2f MB/s)."),
{'size_in_m': size_in_m, 'mbps': mbps})
def copy_volume(src, dest, size_in_m, blocksize, sync=False,
execute=utils.execute, ionice=None, throttle=None,
sparse=False):
"""Copy data from the source volume to the destination volume.
The parameters 'src' and 'dest' are both typically of type str, which
represents the path to each volume on the filesystem. Connectors can
optionally return a volume handle of type RawIOBase for volumes that are
not available on the local filesystem for open/close operations.
If either 'src' or 'dest' are not of type str, then they are assumed to be
of type RawIOBase or any derivative that supports file operations such as
read and write. In this case, the handles are treated as file handles
instead of file paths and, at present moment, throttling is unavailable.
"""
if (isinstance(src, six.string_types) and
isinstance(dest, six.string_types)):
if not throttle:
throttle = throttling.Throttle.get_default()
with throttle.subcommand(src, dest) as throttle_cmd:
_copy_volume_with_path(throttle_cmd['prefix'], src, dest,
size_in_m, blocksize, sync=sync,
execute=execute, ionice=ionice,
sparse=sparse)
else:
_copy_volume_with_file(src, dest, size_in_m)
def clear_volume(volume_size, volume_path, volume_clear=None,
volume_clear_size=None, volume_clear_ionice=None,
throttle=None):
"""Unprovision old volumes to prevent data leaking between users."""
if volume_clear is None:
volume_clear = CONF.volume_clear
if volume_clear_size is None:
volume_clear_size = CONF.volume_clear_size
if volume_clear_size == 0:
volume_clear_size = volume_size
if volume_clear_ionice is None:
volume_clear_ionice = CONF.volume_clear_ionice
LOG.info(_LI("Performing secure delete on volume: %s"), volume_path)
# We pass sparse=False explicitly here so that zero blocks are not
# skipped in order to clear the volume.
if volume_clear == 'zero':
return copy_volume('/dev/zero', volume_path, volume_clear_size,
CONF.volume_dd_blocksize,
sync=True, execute=utils.execute,
ionice=volume_clear_ionice,
throttle=throttle, sparse=False)
elif volume_clear == 'shred':
clear_cmd = ['shred', '-n3']
if volume_clear_size:
clear_cmd.append('-s%dMiB' % volume_clear_size)
else:
raise exception.InvalidConfigurationValue(
option='volume_clear',
value=volume_clear)
clear_cmd.append(volume_path)
start_time = timeutils.utcnow()
utils.execute(*clear_cmd, run_as_root=True)
duration = timeutils.delta_seconds(start_time, timeutils.utcnow())
# NOTE(jdg): use a default of 1, mostly for unit test, but in
# some incredible event this is 0 (cirros image?) don't barf
if duration < 1:
duration = 1
LOG.info(_LI('Elapsed time for clear volume: %.2f sec'), duration)
def supports_thin_provisioning():
return brick_lvm.LVM.supports_thin_provisioning(
utils.get_root_helper())
def get_all_physical_volumes(vg_name=None):
return brick_lvm.LVM.get_all_physical_volumes(
utils.get_root_helper(),
vg_name)
def get_all_volume_groups(vg_name=None):
return brick_lvm.LVM.get_all_volume_groups(
utils.get_root_helper(),
vg_name)
# Default symbols to use for passwords. Avoids visually confusing characters.
# ~6 bits per symbol
DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1
'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O
'abcdefghijkmnopqrstuvwxyz') # Removed: l
def generate_password(length=16, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
"""Generate a random password from the supplied symbol groups.
At least one symbol from each group will be included. Unpredictable
results if length is less than the number of symbol groups.
Believed to be reasonably secure (with a reasonable password length!)
"""
# NOTE(jerdfelt): Some password policies require at least one character
# from each group of symbols, so start off with one random character
# from each symbol group
password = [random.choice(s) for s in symbolgroups]
# If length < len(symbolgroups), the leading characters will only
# be from the first length groups. Try our best to not be predictable
# by shuffling and then truncating.
random.shuffle(password)
password = password[:length]
length -= len(password)
# then fill with random characters from all symbol groups
symbols = ''.join(symbolgroups)
password.extend([random.choice(symbols) for _i in range(length)])
# finally shuffle to ensure first x characters aren't from a
# predictable group
random.shuffle(password)
return ''.join(password)
def generate_username(length=20, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
# Use the same implementation as the password generation.
return generate_password(length, symbolgroups)
DEFAULT_POOL_NAME = '_pool0'
def extract_host(host, level='backend', default_pool_name=False):
"""Extract Host, Backend or Pool information from host string.
:param host: String for host, which could include host@backend#pool info
:param level: Indicate which level of information should be extracted
from host string. Level can be 'host', 'backend' or 'pool',
default value is 'backend'
:param default_pool_name: this flag specify what to do if level == 'pool'
and there is no 'pool' info encoded in host
string. default_pool_name=True will return
DEFAULT_POOL_NAME, otherwise we return None.
Default value of this parameter is False.
:return: expected level of information
For example:
host = 'HostA@BackendB#PoolC'
ret = extract_host(host, 'host')
# ret is 'HostA'
ret = extract_host(host, 'backend')
# ret is 'HostA@BackendB'
ret = extract_host(host, 'pool')
# ret is 'PoolC'
host = 'HostX@BackendY'
ret = extract_host(host, 'pool')
# ret is None
ret = extract_host(host, 'pool', True)
# ret is '_pool0'
"""
if level == 'host':
# make sure pool is not included
hst = host.split('#')[0]
return hst.split('@')[0]
elif level == 'backend':
return host.split('#')[0]
elif level == 'pool':
lst = host.split('#')
if len(lst) == 2:
return lst[1]
elif default_pool_name is True:
return DEFAULT_POOL_NAME
else:
return None
def get_volume_rpc_host(host):
if CONF.rpc_backend and CONF.rpc_backend == "zmq":
# ZeroMQ RPC driver requires only the hostname.
# So, return just that.
return extract_host(host, 'host')
return extract_host(host)
def append_host(host, pool):
"""Encode pool into host info."""
if not host or not pool:
return host
new_host = "#".join([host, pool])
return new_host
def matching_backend_name(src_volume_type, volume_type):
if src_volume_type.get('volume_backend_name') and \
volume_type.get('volume_backend_name'):
return src_volume_type.get('volume_backend_name') == \
volume_type.get('volume_backend_name')
else:
return False
def hosts_are_equivalent(host_1, host_2):
return extract_host(host_1) == extract_host(host_2)
def read_proc_mounts():
"""Read the /proc/mounts file.
It's a dummy function but it eases the writing of unit tests as mocking
__builtin__open() for a specific file only is not trivial.
"""
with open('/proc/mounts') as mounts:
return mounts.readlines()
def _extract_id(vol_name):
regex = re.compile(
CONF.volume_name_template.replace('%s', '(?P<uuid>.+)'))
match = regex.match(vol_name)
return match.group('uuid') if match else None
def check_already_managed_volume(vol_name):
"""Check cinder db for already managed volume.
:param vol_name: volume name parameter
:returns: bool -- return True, if db entry with specified
volume name exist, otherwise return False
"""
vol_id = _extract_id(vol_name)
try:
return (vol_id and uuid.UUID(vol_id, version=4) and
objects.Volume.exists(context.get_admin_context(), vol_id))
except ValueError:
return False
def convert_config_string_to_dict(config_string):
"""Convert config file replication string to a dict.
The only supported form is as follows:
"{'key-1'='val-1' 'key-2'='val-2'...}"
:param config_string: Properly formatted string to convert to dict.
:response: dict of string values
"""
resultant_dict = {}
try:
st = config_string.replace("=", ":")
st = st.replace(" ", ", ")
resultant_dict = ast.literal_eval(st)
except Exception:
LOG.warning(_LW("Error encountered translating config_string: "
"%(config_string)s to dict"),
{'config_string': config_string})
return resultant_dict
| {
"content_hash": "f1610d0deac46e9f2617ad48d25823b8",
"timestamp": "",
"source": "github",
"line_count": 696,
"max_line_length": 79,
"avg_line_length": 34.08764367816092,
"alnum_prop": 0.6003793466807166,
"repo_name": "bswartz/cinder",
"id": "53fd2bdb7cb2b72ee66cecea1ee4cffd9aaaebf4",
"size": "24342",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/volume/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16345375"
},
{
"name": "Shell",
"bytes": "8187"
}
],
"symlink_target": ""
} |
import traceback
class ObjectImportError(Exception):
pass
def import_items(import_directives):
"""
Import the items in import_directives and return a list of the imported items
Each item in import_directives should be one of the following forms
* a tuple like ('module.submodule', ('classname1', 'classname2')), which indicates a 'from module.submodule import classname1, classname2'
* a tuple like ('module.submodule', 'classname1'), which indicates a 'from module.submodule import classname1'
* a tuple like ('module.submodule', '*'), which indicates a 'from module.submodule import *'
* a simple 'module.submodule' which indicates 'import module.submodule'.
Returns a dict mapping the names to the imported items
"""
imported_objects = {}
for directive in import_directives:
try:
# First try a straight import
if type(directive) is str:
imported_object = __import__(directive)
imported_objects[directive.split('.')[0]] = imported_object
print("import %s" % directive)
continue
try:
# Try the ('module.submodule', ('classname1', 'classname2')) form
for name in directive[1]:
imported_object = getattr(__import__(directive[0], {}, {}, name), name)
imported_objects[name] = imported_object
print("from %s import %s" % (directive[0], ', '.join(directive[1])))
# If it is a tuple, but the second item isn't a list, so we have something like ('module.submodule', 'classname1')
except AttributeError:
# Check for the special '*' to import all
if directive[1] == '*':
imported_object = __import__(directive[0], {}, {}, directive[1])
for k in dir(imported_object):
imported_objects[k] = getattr(imported_object, k)
print("from %s import *" % directive[0])
else:
imported_object = getattr(__import__(directive[0], {}, {}, directive[1]), directive[1])
imported_objects[directive[1]] = imported_object
print("from %s import %s" % (directive[0], directive[1]))
except ImportError:
try:
print("Unable to import %s" % directive)
except TypeError:
print("Unable to import %s from %s" % directive)
return imported_objects
def import_objects(options, style):
# XXX: (Temporary) workaround for ticket #1796: force early loading of all
# models from installed apps. (this is fixed by now, but leaving it here
# for people using 0.96 or older trunk (pre [5919]) versions.
from django.db.models.loading import get_models, get_apps
mongoengine = False
try:
from mongoengine.base import _document_registry
mongoengine = True
except:
pass
loaded_models = get_models() # NOQA
from django.conf import settings
imported_objects = {}
dont_load_cli = options.get('dont_load') # optparse will set this to [] if it doensnt exists
dont_load_conf = getattr(settings, 'SHELL_PLUS_DONT_LOAD', [])
dont_load = dont_load_cli + dont_load_conf
quiet_load = options.get('quiet_load')
model_aliases = getattr(settings, 'SHELL_PLUS_MODEL_ALIASES', {})
# Perform pre-imports before any other imports
imports = import_items(getattr(settings, 'SHELL_PLUS_PRE_IMPORTS', {}))
for k, v in imports.iteritems():
imported_objects[k] = v
load_models = {}
if getattr(settings, 'SHELL_PLUS_DJANGO_IMPORTS', True):
load_models.update({
'django.core.cache': ['cache'],
'django.core.urlresolvers': ['reverse'],
'django.conf': ['settings'],
'django.db': ['transaction'],
'django.db.models': ['Avg', 'Count', 'F', 'Max', 'Min', 'Sum', 'Q'],
'django.utils': ['timezone'],
})
if mongoengine:
for name, mod in _document_registry.iteritems():
name = name.split('.')[-1]
app_name = mod.__module__.split('.')[-2]
if app_name in dont_load or ("%s.%s" % (app_name, name)) in dont_load:
continue
load_models.setdefault(mod.__module__, [])
load_models[mod.__module__].append(name)
for app_mod in get_apps():
app_models = get_models(app_mod)
if not app_models:
continue
app_name = app_mod.__name__.split('.')[-2]
if app_name in dont_load:
continue
app_aliases = model_aliases.get(app_name, {})
for mod in app_models:
if "%s.%s" % (app_name, mod.__name__) in dont_load:
continue
load_models.setdefault(mod.__module__, [])
load_models[mod.__module__].append(mod.__name__)
for app_mod, models in sorted(load_models.iteritems()):
app_name = app_mod.split('.')[-2]
app_aliases = model_aliases.get(app_name, {})
model_labels = []
for model_name in sorted(models):
try:
imported_object = getattr(__import__(app_mod, {}, {}, model_name), model_name)
if "%s.%s" % (app_name, model_name) in dont_load:
continue
alias = app_aliases.get(model_name, model_name)
imported_objects[alias] = imported_object
if model_name == alias:
model_labels.append(model_name)
else:
model_labels.append("%s (as %s)" % (model_name, alias))
except AttributeError as e:
if options.get("traceback"):
traceback.print_exc()
if not quiet_load:
print(style.ERROR("Failed to import '%s' from '%s' reason: %s" % (model_name, app_mod, str(e))))
continue
if not quiet_load:
print(style.SQL_COLTYPE("from %s import %s" % (app_mod, ", ".join(model_labels))))
# Perform post-imports after any other imports
imports = import_items(getattr(settings, 'SHELL_PLUS_POST_IMPORTS', {}))
for k, v in imports.iteritems():
imported_objects[k] = v
return imported_objects
| {
"content_hash": "c9549865da0c0a6d30b122ee8898ad0e",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 146,
"avg_line_length": 40.45569620253165,
"alnum_prop": 0.5622653316645807,
"repo_name": "bop/bauhaus",
"id": "a10cdd50a8fb82d06ef604706492d812fc8b7012",
"size": "6392",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/python2.7/site-packages/django_extensions/management/shells.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "145210"
},
{
"name": "Groff",
"bytes": "22"
},
{
"name": "HTML",
"bytes": "1013469"
},
{
"name": "JavaScript",
"bytes": "267371"
},
{
"name": "Python",
"bytes": "6660999"
},
{
"name": "Shell",
"bytes": "4317"
}
],
"symlink_target": ""
} |
import copy
import json
from oslo_log import log as logging
from testtools import matchers
from heat_integrationtests.common import test
from heat_integrationtests.functional import functional_base
LOG = logging.getLogger(__name__)
class AutoscalingGroupTest(functional_base.FunctionalTestsBase):
template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {"size": {"Type": "String", "Default": "1"},
"AZ": {"Type": "String", "Default": "nova"},
"image": {"Type": "String"},
"flavor": {"Type": "String"}},
"Resources": {
"JobServerGroup": {
"Type" : "AWS::AutoScaling::AutoScalingGroup",
"Properties" : {
"AvailabilityZones" : [{"Ref": "AZ"}],
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"MinSize" : {"Ref": "size"},
"MaxSize" : "20"
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Metadata": {"foo": "bar"},
"Properties": {
"ImageId" : {"Ref": "image"},
"InstanceType" : {"Ref": "flavor"},
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
}
},
"Outputs": {
"InstanceList": {"Value": {
"Fn::GetAtt": ["JobServerGroup", "InstanceList"]}},
"JobServerConfigRef": {"Value": {
"Ref": "JobServerConfig"}}
}
}
'''
instance_template = '''
heat_template_version: 2013-05-23
parameters:
ImageId: {type: string}
InstanceType: {type: string}
SecurityGroups: {type: comma_delimited_list}
UserData: {type: string}
Tags: {type: comma_delimited_list, default: "x,y"}
resources:
random1:
type: OS::Heat::RandomString
properties:
salt: {get_param: ImageId}
outputs:
PublicIp: {value: {get_attr: [random1, value]}}
AvailabilityZone: {value: 'not-used11'}
PrivateDnsName: {value: 'not-used12'}
PublicDnsName: {value: 'not-used13'}
PrivateIp: {value: 'not-used14'}
'''
# This is designed to fail.
bad_instance_template = '''
heat_template_version: 2013-05-23
parameters:
ImageId: {type: string}
InstanceType: {type: string}
SecurityGroups: {type: comma_delimited_list}
UserData: {type: string}
Tags: {type: comma_delimited_list, default: "x,y"}
resources:
random1:
type: OS::Heat::RandomString
depends_on: waiter
ready_poster:
type: AWS::CloudFormation::WaitConditionHandle
waiter:
type: AWS::CloudFormation::WaitCondition
properties:
Handle: {get_resource: ready_poster}
Timeout: 1
outputs:
PublicIp:
value: {get_attr: [random1, value]}
'''
def setUp(self):
super(AutoscalingGroupTest, self).setUp()
if not self.conf.image_ref:
raise self.skipException("No image configured to test")
if not self.conf.minimal_image_ref:
raise self.skipException("No minimal image configured to test")
if not self.conf.instance_type:
raise self.skipException("No flavor configured to test")
def assert_instance_count(self, stack, expected_count):
inst_list = self._stack_output(stack, 'InstanceList')
self.assertEqual(expected_count, len(inst_list.split(',')))
def _assert_instance_state(self, nested_identifier,
num_complete, num_failed):
for res in self.client.resources.list(nested_identifier):
if 'COMPLETE' in res.resource_status:
num_complete = num_complete - 1
elif 'FAILED' in res.resource_status:
num_failed = num_failed - 1
self.assertEqual(0, num_failed)
self.assertEqual(0, num_complete)
class AutoscalingGroupBasicTest(AutoscalingGroupTest):
def test_basic_create_works(self):
"""Make sure the working case is good.
Note this combines test_override_aws_ec2_instance into this test as
well, which is:
If AWS::EC2::Instance is overridden, AutoScalingGroup will
automatically use that overridden resource type.
"""
files = {'provider.yaml': self.instance_template}
env = {'resource_registry': {'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': 4,
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
stack_identifier = self.stack_create(template=self.template,
files=files, environment=env)
initial_resources = {
'JobServerConfig': 'AWS::AutoScaling::LaunchConfiguration',
'JobServerGroup': 'AWS::AutoScaling::AutoScalingGroup'}
self.assertEqual(initial_resources,
self.list_resources(stack_identifier))
stack = self.client.stacks.get(stack_identifier)
self.assert_instance_count(stack, 4)
def test_size_updates_work(self):
files = {'provider.yaml': self.instance_template}
env = {'resource_registry': {'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': 2,
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
stack_identifier = self.stack_create(template=self.template,
files=files,
environment=env)
stack = self.client.stacks.get(stack_identifier)
self.assert_instance_count(stack, 2)
# Increase min size to 5
env2 = {'resource_registry': {'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': 5,
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
self.update_stack(stack_identifier, self.template,
environment=env2, files=files)
stack = self.client.stacks.get(stack_identifier)
self.assert_instance_count(stack, 5)
def test_update_group_replace(self):
"""Make sure that during a group update the non updatable
properties cause a replacement.
"""
files = {'provider.yaml': self.instance_template}
env = {'resource_registry':
{'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': '1',
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
stack_identifier = self.stack_create(template=self.template,
files=files,
environment=env)
rsrc = self.client.resources.get(stack_identifier, 'JobServerGroup')
orig_asg_id = rsrc.physical_resource_id
env2 = {'resource_registry':
{'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': '1',
'AZ': 'wibble',
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
self.update_stack(stack_identifier, self.template,
environment=env2, files=files)
# replacement will cause the resource physical_resource_id to change.
rsrc = self.client.resources.get(stack_identifier, 'JobServerGroup')
self.assertNotEqual(orig_asg_id, rsrc.physical_resource_id)
def test_create_instance_error_causes_group_error(self):
"""If a resource in an instance group fails to be created, the instance
group itself will fail and the broken inner resource will remain.
"""
stack_name = self._stack_rand_name()
files = {'provider.yaml': self.bad_instance_template}
env = {'resource_registry': {'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': 2,
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
self.client.stacks.create(
stack_name=stack_name,
template=self.template,
files=files,
disable_rollback=True,
parameters={},
environment=env
)
self.addCleanup(self._stack_delete, stack_name)
stack = self.client.stacks.get(stack_name)
stack_identifier = '%s/%s' % (stack_name, stack.id)
self._wait_for_stack_status(stack_identifier, 'CREATE_FAILED')
initial_resources = {
'JobServerConfig': 'AWS::AutoScaling::LaunchConfiguration',
'JobServerGroup': 'AWS::AutoScaling::AutoScalingGroup'}
self.assertEqual(initial_resources,
self.list_resources(stack_identifier))
nested_ident = self.assert_resource_is_a_stack(stack_identifier,
'JobServerGroup')
self._assert_instance_state(nested_ident, 0, 2)
def test_update_instance_error_causes_group_error(self):
"""If a resource in an instance group fails to be created during an
update, the instance group itself will fail and the broken inner
resource will remain.
"""
files = {'provider.yaml': self.instance_template}
env = {'resource_registry': {'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': 2,
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
stack_identifier = self.stack_create(template=self.template,
files=files,
environment=env)
initial_resources = {
'JobServerConfig': 'AWS::AutoScaling::LaunchConfiguration',
'JobServerGroup': 'AWS::AutoScaling::AutoScalingGroup'}
self.assertEqual(initial_resources,
self.list_resources(stack_identifier))
stack = self.client.stacks.get(stack_identifier)
self.assert_instance_count(stack, 2)
nested_ident = self.assert_resource_is_a_stack(stack_identifier,
'JobServerGroup')
self._assert_instance_state(nested_ident, 2, 0)
initial_list = [res.resource_name
for res in self.client.resources.list(nested_ident)]
env['parameters']['size'] = 3
files2 = {'provider.yaml': self.bad_instance_template}
self.client.stacks.update(
stack_id=stack_identifier,
template=self.template,
files=files2,
disable_rollback=True,
parameters={},
environment=env
)
self._wait_for_stack_status(stack_identifier, 'UPDATE_FAILED')
# assert that there are 3 bad instances
nested_ident = self.assert_resource_is_a_stack(stack_identifier,
'JobServerGroup')
# 2 resources should be in update failed, and one create failed.
for res in self.client.resources.list(nested_ident):
if res.resource_name in initial_list:
self._wait_for_resource_status(nested_ident,
res.resource_name,
'UPDATE_FAILED')
else:
self._wait_for_resource_status(nested_ident,
res.resource_name,
'CREATE_FAILED')
def test_group_suspend_resume(self):
files = {'provider.yaml': self.instance_template}
env = {'resource_registry': {'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': 4,
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
stack_identifier = self.stack_create(template=self.template,
files=files, environment=env)
nested_ident = self.assert_resource_is_a_stack(stack_identifier,
'JobServerGroup')
self.stack_suspend(stack_identifier)
self._wait_for_all_resource_status(nested_ident, 'SUSPEND_COMPLETE')
self.stack_resume(stack_identifier)
self._wait_for_all_resource_status(nested_ident, 'RESUME_COMPLETE')
class AutoscalingGroupUpdatePolicyTest(AutoscalingGroupTest):
def ig_tmpl_with_updt_policy(self):
templ = json.loads(copy.deepcopy(self.template))
up = {"AutoScalingRollingUpdate": {
"MinInstancesInService": "1",
"MaxBatchSize": "2",
"PauseTime": "PT1S"}}
templ['Resources']['JobServerGroup']['UpdatePolicy'] = up
return templ
def update_instance_group(self, updt_template,
num_updates_expected_on_updt,
num_creates_expected_on_updt,
num_deletes_expected_on_updt,
update_replace):
# setup stack from the initial template
files = {'provider.yaml': self.instance_template}
size = 10
env = {'resource_registry': {'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': size,
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
stack_name = self._stack_rand_name()
stack_identifier = self.stack_create(
stack_name=stack_name,
template=self.ig_tmpl_with_updt_policy(),
files=files,
environment=env)
stack = self.client.stacks.get(stack_identifier)
nested_ident = self.assert_resource_is_a_stack(stack_identifier,
'JobServerGroup')
# test that physical resource name of launch configuration is used
conf_name = self._stack_output(stack, 'JobServerConfigRef')
conf_name_pattern = '%s-JobServerConfig-[a-zA-Z0-9]+$' % stack_name
self.assertThat(conf_name,
matchers.MatchesRegex(conf_name_pattern))
# test the number of instances created
self.assert_instance_count(stack, size)
# saves info from initial list of instances for comparison later
init_instances = self.client.resources.list(nested_ident)
init_names = [inst.resource_name for inst in init_instances]
# test stack update
self.update_stack(stack_identifier, updt_template,
environment=env, files=files)
updt_stack = self.client.stacks.get(stack_identifier)
# test that the launch configuration is replaced
updt_conf_name = self._stack_output(updt_stack, 'JobServerConfigRef')
self.assertThat(updt_conf_name,
matchers.MatchesRegex(conf_name_pattern))
self.assertNotEqual(conf_name, updt_conf_name)
# test that the group size are the same
updt_instances = self.client.resources.list(nested_ident)
updt_names = [inst.resource_name for inst in updt_instances]
self.assertEqual(len(init_names), len(updt_names))
for res in updt_instances:
self.assertEqual('UPDATE_COMPLETE', res.resource_status)
# test that the appropriate number of instance names are the same
matched_names = set(updt_names) & set(init_names)
self.assertEqual(num_updates_expected_on_updt, len(matched_names))
# test that the appropriate number of new instances are created
self.assertEqual(num_creates_expected_on_updt,
len(set(updt_names) - set(init_names)))
# test that the appropriate number of instances are deleted
self.assertEqual(num_deletes_expected_on_updt,
len(set(init_names) - set(updt_names)))
# test that the older instances are the ones being deleted
if num_deletes_expected_on_updt > 0:
deletes_expected = init_names[:num_deletes_expected_on_updt]
self.assertNotIn(deletes_expected, updt_names)
def test_instance_group_update_replace(self):
"""
Test simple update replace with no conflict in batch size and
minimum instances in service.
"""
updt_template = self.ig_tmpl_with_updt_policy()
grp = updt_template['Resources']['JobServerGroup']
policy = grp['UpdatePolicy']['AutoScalingRollingUpdate']
policy['MinInstancesInService'] = '1'
policy['MaxBatchSize'] = '3'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = self.conf.minimal_image_ref
self.update_instance_group(updt_template,
num_updates_expected_on_updt=10,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0,
update_replace=True)
def test_instance_group_update_replace_with_adjusted_capacity(self):
"""
Test update replace with capacity adjustment due to conflict in
batch size and minimum instances in service.
"""
updt_template = self.ig_tmpl_with_updt_policy()
grp = updt_template['Resources']['JobServerGroup']
policy = grp['UpdatePolicy']['AutoScalingRollingUpdate']
policy['MinInstancesInService'] = '8'
policy['MaxBatchSize'] = '4'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = self.conf.minimal_image_ref
self.update_instance_group(updt_template,
num_updates_expected_on_updt=8,
num_creates_expected_on_updt=2,
num_deletes_expected_on_updt=2,
update_replace=True)
def test_instance_group_update_replace_huge_batch_size(self):
"""
Test update replace with a huge batch size.
"""
updt_template = self.ig_tmpl_with_updt_policy()
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['AutoScalingRollingUpdate']
policy['MinInstancesInService'] = '0'
policy['MaxBatchSize'] = '20'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = self.conf.minimal_image_ref
self.update_instance_group(updt_template,
num_updates_expected_on_updt=10,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0,
update_replace=True)
def test_instance_group_update_replace_huge_min_in_service(self):
"""
Test update replace with a huge number of minimum instances in service.
"""
updt_template = self.ig_tmpl_with_updt_policy()
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['AutoScalingRollingUpdate']
policy['MinInstancesInService'] = '20'
policy['MaxBatchSize'] = '1'
policy['PauseTime'] = 'PT0S'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['ImageId'] = self.conf.minimal_image_ref
self.update_instance_group(updt_template,
num_updates_expected_on_updt=9,
num_creates_expected_on_updt=1,
num_deletes_expected_on_updt=1,
update_replace=True)
def test_instance_group_update_no_replace(self):
"""
Test simple update only and no replace (i.e. updated instance flavor
in Launch Configuration) with no conflict in batch size and
minimum instances in service.
"""
updt_template = self.ig_tmpl_with_updt_policy()
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['AutoScalingRollingUpdate']
policy['MinInstancesInService'] = '1'
policy['MaxBatchSize'] = '3'
policy['PauseTime'] = 'PT0S'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['InstanceType'] = 'm1.tiny'
self.update_instance_group(updt_template,
num_updates_expected_on_updt=10,
num_creates_expected_on_updt=0,
num_deletes_expected_on_updt=0,
update_replace=False)
def test_instance_group_update_no_replace_with_adjusted_capacity(self):
"""
Test update only and no replace (i.e. updated instance flavor in
Launch Configuration) with capacity adjustment due to conflict in
batch size and minimum instances in service.
"""
updt_template = self.ig_tmpl_with_updt_policy()
group = updt_template['Resources']['JobServerGroup']
policy = group['UpdatePolicy']['AutoScalingRollingUpdate']
policy['MinInstancesInService'] = '8'
policy['MaxBatchSize'] = '4'
policy['PauseTime'] = 'PT0S'
config = updt_template['Resources']['JobServerConfig']
config['Properties']['InstanceType'] = 'm1.tiny'
self.update_instance_group(updt_template,
num_updates_expected_on_updt=8,
num_creates_expected_on_updt=2,
num_deletes_expected_on_updt=2,
update_replace=False)
class AutoScalingSignalTest(AutoscalingGroupTest):
template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {"size": {"Type": "String", "Default": "1"},
"AZ": {"Type": "String", "Default": "nova"},
"image": {"Type": "String"},
"flavor": {"Type": "String"}},
"Resources": {
"custom_lb": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": {"Ref": "image"},
"InstanceType": {"Ref": "flavor"},
"UserData": "foo",
"SecurityGroups": [ "sg-1" ],
"Tags": []
},
"Metadata": {
"IPs": {"Fn::GetAtt": ["JobServerGroup", "InstanceList"]}
}
},
"JobServerGroup": {
"Type" : "AWS::AutoScaling::AutoScalingGroup",
"Properties" : {
"AvailabilityZones" : [{"Ref": "AZ"}],
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"DesiredCapacity" : {"Ref": "size"},
"MinSize" : "0",
"MaxSize" : "20"
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Metadata": {"foo": "bar"},
"Properties": {
"ImageId" : {"Ref": "image"},
"InstanceType" : {"Ref": "flavor"},
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data"
}
},
"ScaleUpPolicy" : {
"Type" : "AWS::AutoScaling::ScalingPolicy",
"Properties" : {
"AdjustmentType" : "ChangeInCapacity",
"AutoScalingGroupName" : { "Ref" : "JobServerGroup" },
"Cooldown" : "0",
"ScalingAdjustment": "1"
}
},
"ScaleDownPolicy" : {
"Type" : "AWS::AutoScaling::ScalingPolicy",
"Properties" : {
"AdjustmentType" : "ChangeInCapacity",
"AutoScalingGroupName" : { "Ref" : "JobServerGroup" },
"Cooldown" : "0",
"ScalingAdjustment" : "-2"
}
}
},
"Outputs": {
"InstanceList": {"Value": {
"Fn::GetAtt": ["JobServerGroup", "InstanceList"]}}
}
}
'''
lb_template = '''
heat_template_version: 2013-05-23
parameters:
ImageId: {type: string}
InstanceType: {type: string}
SecurityGroups: {type: comma_delimited_list}
UserData: {type: string}
Tags: {type: comma_delimited_list, default: "x,y"}
resources:
outputs:
PublicIp: {value: "not-used"}
AvailabilityZone: {value: 'not-used1'}
PrivateDnsName: {value: 'not-used2'}
PublicDnsName: {value: 'not-used3'}
PrivateIp: {value: 'not-used4'}
'''
def setUp(self):
super(AutoScalingSignalTest, self).setUp()
self.build_timeout = self.conf.build_timeout
self.build_interval = self.conf.build_interval
self.files = {'provider.yaml': self.instance_template,
'lb.yaml': self.lb_template}
self.env = {'resource_registry':
{'resources':
{'custom_lb': {'AWS::EC2::Instance': 'lb.yaml'}},
'AWS::EC2::Instance': 'provider.yaml'},
'parameters': {'size': 2,
'image': self.conf.image_ref,
'flavor': self.conf.instance_type}}
def check_instance_count(self, stack_identifier, expected):
md = self.client.resources.metadata(stack_identifier, 'custom_lb')
actual_md = len(md['IPs'].split(','))
if actual_md != expected:
LOG.warn('check_instance_count exp:%d, meta:%s' % (expected,
md['IPs']))
return False
stack = self.client.stacks.get(stack_identifier)
inst_list = self._stack_output(stack, 'InstanceList')
actual = len(inst_list.split(','))
if actual != expected:
LOG.warn('check_instance_count exp:%d, act:%s' % (expected,
inst_list))
return actual == expected
def test_scaling_meta_update(self):
"""Use heatclient to signal the up and down policy.
Then confirm that the metadata in the custom_lb is updated each
time.
"""
stack_identifier = self.stack_create(template=self.template,
files=self.files,
environment=self.env)
self.assertTrue(test.call_until_true(
self.build_timeout, self.build_interval,
self.check_instance_count, stack_identifier, 2))
nested_ident = self.assert_resource_is_a_stack(stack_identifier,
'JobServerGroup')
# Scale up one, Trigger alarm
self.client.resources.signal(stack_identifier, 'ScaleUpPolicy')
self._wait_for_stack_status(nested_ident, 'UPDATE_COMPLETE')
self.assertTrue(test.call_until_true(
self.build_timeout, self.build_interval,
self.check_instance_count, stack_identifier, 3))
# Scale down two, Trigger alarm
self.client.resources.signal(stack_identifier, 'ScaleDownPolicy')
self._wait_for_stack_status(nested_ident, 'UPDATE_COMPLETE')
self.assertTrue(test.call_until_true(
self.build_timeout, self.build_interval,
self.check_instance_count, stack_identifier, 1))
def test_signal_with_policy_update(self):
"""Prove that an updated policy is used in the next signal."""
stack_identifier = self.stack_create(template=self.template,
files=self.files,
environment=self.env)
self.assertTrue(test.call_until_true(
self.build_timeout, self.build_interval,
self.check_instance_count, stack_identifier, 2))
nested_ident = self.assert_resource_is_a_stack(stack_identifier,
'JobServerGroup')
# Scale up one, Trigger alarm
self.client.resources.signal(stack_identifier, 'ScaleUpPolicy')
self._wait_for_stack_status(nested_ident, 'UPDATE_COMPLETE')
self.assertTrue(test.call_until_true(
self.build_timeout, self.build_interval,
self.check_instance_count, stack_identifier, 3))
# increase the adjustment to "+2" and remove the DesiredCapacity
# so we don't go from 3 to 2.
new_template = self.template.replace(
'"ScalingAdjustment": "1"',
'"ScalingAdjustment": "2"').replace(
'"DesiredCapacity" : {"Ref": "size"},', '')
self.update_stack(stack_identifier, template=new_template,
environment=self.env, files=self.files)
# Scale up two, Trigger alarm
self.client.resources.signal(stack_identifier, 'ScaleUpPolicy')
self._wait_for_stack_status(nested_ident, 'UPDATE_COMPLETE')
self.assertTrue(test.call_until_true(
self.build_timeout, self.build_interval,
self.check_instance_count, stack_identifier, 5))
def test_signal_during_suspend(self):
"""Prove that a signal will fail when the stack is in suspend."""
stack_identifier = self.stack_create(template=self.template,
files=self.files,
environment=self.env)
self.assertTrue(test.call_until_true(
self.build_timeout, self.build_interval,
self.check_instance_count, stack_identifier, 2))
nested_ident = self.assert_resource_is_a_stack(stack_identifier,
'JobServerGroup')
# suspend the top level stack.
self.client.actions.suspend(stack_id=stack_identifier)
self._wait_for_resource_status(
stack_identifier, 'JobServerGroup', 'SUSPEND_COMPLETE')
# Send a signal and confirm nothing happened.
self.client.resources.signal(stack_identifier, 'ScaleUpPolicy')
ev = self.wait_for_event_with_reason(
stack_identifier,
reason='Cannot signal resource during SUSPEND',
rsrc_name='ScaleUpPolicy')
self.assertEqual('SUSPEND_COMPLETE', ev[0].resource_status)
# still SUSPEND_COMPLETE (not gone to UPDATE_COMPLETE)
self._wait_for_stack_status(nested_ident, 'SUSPEND_COMPLETE')
self._wait_for_stack_status(stack_identifier, 'SUSPEND_COMPLETE')
# still 2 instances.
self.assertTrue(test.call_until_true(
self.build_timeout, self.build_interval,
self.check_instance_count, stack_identifier, 2))
| {
"content_hash": "4f2c90e285110f94fe15fe36eed91ac7",
"timestamp": "",
"source": "github",
"line_count": 733,
"max_line_length": 79,
"avg_line_length": 41.99317871759891,
"alnum_prop": 0.5652837789545498,
"repo_name": "srznew/heat",
"id": "1b9fe991ba2e7e0d7c6085f41a358ed05f8601b8",
"size": "31354",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "heat_integrationtests/functional/test_autoscaling.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6529810"
},
{
"name": "Shell",
"bytes": "33395"
}
],
"symlink_target": ""
} |
from oslo_log import log as logging
from oslo_utils import excutils
from cinder import context
from cinder import exception
from cinder.i18n import _, _LI
from cinder.volume.drivers.huawei import constants
from cinder.volume import qos_specs
LOG = logging.getLogger(__name__)
class SmartQos(object):
def __init__(self, client):
self.client = client
@staticmethod
def get_qos_by_volume_type(volume_type):
# We prefer the qos_specs association
# and override any existing extra-specs settings
# if present.
if not volume_type:
return {}
qos_specs_id = volume_type.get('qos_specs_id')
if not qos_specs_id:
return {}
qos = {}
io_type_flag = None
ctxt = context.get_admin_context()
kvs = qos_specs.get_qos_specs(ctxt, qos_specs_id)['specs']
LOG.info(_LI('The QoS sepcs is: %s.'), kvs)
for k, v in kvs.items():
if k not in constants.HUAWEI_VALID_KEYS:
continue
if k != 'IOType' and int(v) <= 0:
msg = _('QoS config is wrong. %s must > 0.') % k
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
if k == 'IOType':
if v not in ['0', '1', '2']:
msg = _('Illegal value specified for IOTYPE: 0, 1, or 2.')
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
io_type_flag = 1
qos[k.upper()] = v
else:
qos[k.upper()] = v
if not io_type_flag:
msg = (_('QoS policy must specify for IOTYPE: 0, 1, or 2, '
'QoS policy: %(qos_policy)s ') % {'qos_policy': qos})
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
# QoS policy must specify for IOTYPE and another qos_specs.
if len(qos) < 2:
msg = (_('QoS policy must specify for IOTYPE and another '
'qos_specs, QoS policy: %(qos_policy)s.')
% {'qos_policy': qos})
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
for upper_limit in constants.UPPER_LIMIT_KEYS:
for lower_limit in constants.LOWER_LIMIT_KEYS:
if upper_limit in qos and lower_limit in qos:
msg = (_('QoS policy upper_limit and lower_limit '
'conflict, QoS policy: %(qos_policy)s.')
% {'qos_policy': qos})
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
return qos
def _is_high_priority(self, qos):
"""Check QoS priority."""
for key, value in qos.items():
if (key.find('MIN') == 0) or (key.find('LATENCY') == 0):
return True
return False
def add(self, qos, lun_id):
policy_id = None
try:
# Check QoS priority.
if self._is_high_priority(qos):
self.client.change_lun_priority(lun_id)
# Create QoS policy and activate it.
version = self.client.find_array_version()
if version >= constants.ARRAY_VERSION:
(qos_id, lun_list) = self.client.find_available_qos(qos)
if qos_id:
self.client.add_lun_to_qos(qos_id, lun_id, lun_list)
else:
policy_id = self.client.create_qos_policy(qos, lun_id)
self.client.activate_deactivate_qos(policy_id, True)
else:
policy_id = self.client.create_qos_policy(qos, lun_id)
self.client.activate_deactivate_qos(policy_id, True)
except exception.VolumeBackendAPIException:
with excutils.save_and_reraise_exception():
if policy_id is not None:
self.client.delete_qos_policy(policy_id)
def remove(self, qos_id, lun_id):
qos_info = self.client.get_qos_info(qos_id)
lun_list = self.client.get_lun_list_in_qos(qos_id, qos_info)
if len(lun_list) <= 1:
qos_status = qos_info['RUNNINGSTATUS']
# 2: Active status.
if qos_status == constants.STATUS_QOS_ACTIVE:
self.client.activate_deactivate_qos(qos_id, False)
self.client.delete_qos_policy(qos_id)
else:
self.client.remove_lun_from_qos(lun_id, lun_list, qos_id)
class SmartPartition(object):
def __init__(self, client):
self.client = client
def add(self, opts, lun_id):
if opts['smartpartition'] != 'true':
return
if not opts['partitionname']:
raise exception.InvalidInput(
reason=_('Partition name is None, please set '
'smartpartition:partitionname in key.'))
partition_id = self.client.get_partition_id_by_name(
opts['partitionname'])
if not partition_id:
raise exception.InvalidInput(
reason=(_('Can not find partition id by name %(name)s.')
% {'name': opts['partitionname']}))
self.client.add_lun_to_partition(lun_id, partition_id)
class SmartCache(object):
def __init__(self, client):
self.client = client
def add(self, opts, lun_id):
if opts['smartcache'] != 'true':
return
if not opts['cachename']:
raise exception.InvalidInput(
reason=_('Cache name is None, please set '
'smartcache:cachename in key.'))
cache_id = self.client.get_cache_id_by_name(opts['cachename'])
if not cache_id:
raise exception.InvalidInput(
reason=(_('Can not find cache id by cache name %(name)s.')
% {'name': opts['cachename']}))
self.client.add_lun_to_cache(lun_id, cache_id)
class SmartX(object):
def get_smartx_specs_opts(self, opts):
# Check that smarttier is 0/1/2/3
opts = self.get_smarttier_opts(opts)
opts = self.get_smartthin_opts(opts)
opts = self.get_smartcache_opts(opts)
opts = self.get_smartpartition_opts(opts)
return opts
def get_smarttier_opts(self, opts):
if opts['smarttier'] == 'true':
if not opts['policy']:
opts['policy'] = '1'
elif opts['policy'] not in ['0', '1', '2', '3']:
raise exception.InvalidInput(
reason=(_('Illegal value specified for smarttier: '
'set to either 0, 1, 2, or 3.')))
else:
opts['policy'] = '0'
return opts
def get_smartthin_opts(self, opts):
if opts['thin_provisioning_support'] == 'true':
if opts['thick_provisioning_support'] == 'true':
raise exception.InvalidInput(
reason=(_('Illegal value specified for thin: '
'Can not set thin and thick at the same time.')))
else:
opts['LUNType'] = constants.THIN_LUNTYPE
if opts['thick_provisioning_support'] == 'true':
opts['LUNType'] = constants.THICK_LUNTYPE
return opts
def get_smartcache_opts(self, opts):
if opts['smartcache'] == 'true':
if not opts['cachename']:
raise exception.InvalidInput(
reason=_('Cache name is None, please set '
'smartcache:cachename in key.'))
else:
opts['cachename'] = None
return opts
def get_smartpartition_opts(self, opts):
if opts['smartpartition'] == 'true':
if not opts['partitionname']:
raise exception.InvalidInput(
reason=_('Partition name is None, please set '
'smartpartition:partitionname in key.'))
else:
opts['partitionname'] = None
return opts
| {
"content_hash": "cc8d11d821ef209489e5d182e1ac8c0b",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 79,
"avg_line_length": 37.096330275229356,
"alnum_prop": 0.531222950414245,
"repo_name": "Hybrid-Cloud/cinder",
"id": "2569e57073efe9e98d0645326865fb6d96109e09",
"size": "8736",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "cinder/volume/drivers/huawei/smartx.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17513896"
},
{
"name": "Shell",
"bytes": "8187"
}
],
"symlink_target": ""
} |
import django
class APIConfig(django.apps.AppConfig):
"""
This is the app configuration of the modelmanager.plugins.browser app.
"""
name = 'modelmanager.plugins.browser.api'
verbose_name = 'API'
label = 'api'
| {
"content_hash": "e8321cc15fb4938515ce41f37198dbee",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 74,
"avg_line_length": 23.6,
"alnum_prop": 0.673728813559322,
"repo_name": "mwort/modelmanager",
"id": "e0bafbeb7648bc9537e68a6024147d1652202d60",
"size": "236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modelmanager/plugins/browser/api/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "538"
},
{
"name": "HTML",
"bytes": "2995"
},
{
"name": "JavaScript",
"bytes": "378"
},
{
"name": "Makefile",
"bytes": "1732"
},
{
"name": "Python",
"bytes": "138890"
}
],
"symlink_target": ""
} |
from django.conf.urls import include, url
from . import views
urlpatterns = [
url(r'^api/v1/', include([
url(r'^pastes/$', views.api_paste_list, name='api_paste_list'),
url(r'^pastes/([a-zA-Z0-9]+)/$', views.api_paste_detail, name='api_paste_detail'),
url(r'^star/$', views.api_star_create, name='api_star_create'),
url(r'^star/list/$', views.api_star_list, name='api_star_list'),
url(r'^star/delete/$', views.api_star_delete, name='api_star_delete'),
])),
url(r'^$', views.home, name='home'),
url(r'^new/$', views.paste_create, name='paste_create'),
url(r'^recent/$', views.paste_list, name='paste_list'),
url(r'^search/$', views.paste_list, name='paste_search'),
url(r'^admin/$', views.admin, name='admin'),
url(r'^admin/lexers/$', views.admin_lexers, name='admin_lexers'),
url(r'^p/([a-zA-Z0-9]+)/$', views.paste_redirect, name='paste_redirect'),
url(r'^([a-zA-Z0-9]+)/$', views.paste_detail, name='paste_detail'),
url(r'^([a-zA-Z0-9]+).zip$', views.paste_download, name='paste_download'),
url(r'^([a-zA-Z0-9]+)/(.+)$', views.paste_raw, name='paste_raw'),
]
| {
"content_hash": "23c0baf8ee49acd748ac039da78403c9",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 90,
"avg_line_length": 42.77777777777778,
"alnum_prop": 0.593939393939394,
"repo_name": "davidwtbuxton/captain-pasty",
"id": "efc0c2a07ec7d912999331c048f7408d5fb58a43",
"size": "1155",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pasty/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "112642"
},
{
"name": "HTML",
"bytes": "8347"
},
{
"name": "JavaScript",
"bytes": "3393"
},
{
"name": "Python",
"bytes": "77333"
},
{
"name": "Shell",
"bytes": "1336"
}
],
"symlink_target": ""
} |
"""
BarMap.py -- cotranscriptional folding using *barriers* and *treekin*
"""
import logging
import os
import re
import sys
import argparse
import numpy as np
import subprocess as s
import collections as c
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
import ribolands as ril
from ribolands.syswraps import which, ExecError, SubprocessError
class LostPopulationError(Exception):
"""Raise Error: Lost significant population."""
def __init__(self, var):
self.message = "{}".format(var)
super(LostPopulationError, self).__init__(self.message)
def get_plot_data(tfiles, plist, args):
"""
"""
verb = args.verbose
start = args.start
stop = args.stop
t8 = args.t8
tX = args.tX
plim = 0.02 # former argument plot-cutoff, seems unnecessary
# NOTE: setting plim to a very small number leads to vertical lines in the plot. Why?
# Because sometimes low populated minima merge into a high populated minimum, and then
# this will be visible.
tot_time = 0.
# List of trajectories
tlist = [[] for i in range(len(plist) + 1)]
reg_flt = re.compile('[-+]?[0-9]*.?[0-9]+([eE][-+]?[0-9]+)?.')
# http://www.regular-expressions.info/floatingpoint.html
for el, l in enumerate(range(start, stop + 1)):
tfile = tfiles[el]
# Get raw data => nxy
# t 1 2 3 4
# [x y y y y]
# [x y y y y]
nxy = []
with open(tfile) as tkn:
for line in tkn:
if re.match('#', line):
continue
elif not reg_flt.match(line):
raise Exception(tfile, "neither comment nor time course!")
nxy.append(map(float, line.strip().split()))
if nxy == []:
nxy.append([t8, 1.0])
# Make list of trajectories
# t [x x x x x]
# 1 [y y y y y]
# 2 [y y y y y]
# 3 [y y y y y]
# 4 [y y y y y]
#
# and use the pathlist with lmin information
# [0 0 0 3 3 2 2 2 0]
# [1 1 1 5 5 6 8 9 0]
# [3 3 3 2 2 1 1 2 1]
# [2 2 2 2 2 1 1 2 1]
# [0 9 9 9 9 9 1 2 1]
# ...
#
# to replace every lmin at the current transcription step with the
# trajectory (or None) => avoid duplicates with seen=set()!
traject = map(list, zip(*nxy))
timeline = map(lambda x: x + tot_time, traject[0])
tot_time = timeline[-1]
tlist[0].extend([timeline])
# replace every position in tlist with the time-course of the treekin
# output (or None). Use the mapping provided in the pathlist for the the
# current transcription step l
# Remove seen from this part to get all trajectries from start to end.
seen = set()
pmapping = zip(*plist)[l - start]
for e, idx in enumerate(pmapping, 1):
# print start, l, pmapping, idx # traject, idx
# Make sure that we are above the limit of detection
if (idx == 0) or (idx in seen):
line = [None] # for i in range(len(timeline))]
else:
seen.add(idx)
if max(traject[idx]) > plim:
line = traject[idx]
elif el > 1 and tlist[e][-1][-1] is not None:
line = traject[idx]
else:
line = [None] # for i in range(len(timeline))]
tlist[e].extend([line])
return tlist
def plot_xmgrace(all_in, plist, args):
head = """
@with line
@line on
@line loctype world
@line g0
@line linewidth .1
@line linestyle 1
@line color 7
@line arrow 0
@line arrow type 0
@line arrow length 1.000000
@line arrow layout 1.000000, 1.000000
@line def
@map color 0 to (255, 255, 255), "white"
@map color 1 to (0, 0, 0), "black"
@map color 2 to (255, 0, 0), "red"
@map color 3 to (0, 255, 0), "green"
@map color 4 to (0, 0, 255), "blue"
@map color 5 to (255, 255, 0), "yellow"
@map color 6 to (188, 143, 143), "brown"
@map color 7 to (220, 220, 220), "grey"
@map color 8 to (148, 0, 211), "violet"
@map color 9 to (0, 255, 255), "cyan"
@map color 10 to (255, 0, 255), "magenta"
@map color 11 to (255, 165, 0), "orange"
@map color 12 to (114, 33, 188), "indigo"
@map color 13 to (103, 7, 72), "maroon"
@map color 14 to (64, 224, 208), "turquoise"
@map color 15 to (0, 139, 0), "green4"
"""
best = range(1, 16)
gfile = args.name + '.gr'
c = 0
with open(gfile, 'w') as gfh:
gfh.write(head)
for e, course in enumerate(all_in):
if e == 0:
continue
finalmin = plist[e - 1][-1]
color = best[finalmin - 1] if len(best) > finalmin else best[-1]
flag = 0
for x, y in zip(all_in[0], all_in[e]):
if y[0] is None:
continue
if flag == 0:
gfh.write("@ s{} line color {}\n".format(c, color))
if color == best[-1]:
gfh.write("@ s{} line linewidth 1\n".format(c))
else:
gfh.write("@ s{} line linewidth 2\n".format(c))
for i in range(len(x)):
gfh.write("{:f} {:f}\n".format(x[i], y[i]))
flag = 1
if flag:
gfh.write("&\n")
c += 1
return gfile
def plot_matplotlib(name, seq, tlist, plist, args):
""" Description """
start = args.start
stop = args.stop
t8 = args.t8
lin_time = (stop - start) * float(t8)
tX = lin_time + args.tX if (lin_time +
args.tX) >= lin_time * 10 else lin_time * 10
name = args.name
title = args.name
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.set_ylim([0, 1.01])
ax.set_xscale('linear')
# Make the second part of the plot logarithmic
ax.set_xlim((0, lin_time))
divider = make_axes_locatable(ax)
axLog = divider.append_axes("right", size=2.5, pad=0, sharey=ax)
axLog.set_xscale('log')
axLog.set_xlim((lin_time + 0.00001, tX))
axLog.set_ylim([0, 1.01])
axLog.yaxis.set_visible(False)
best = ['black', 'red', 'green', 'blue', 'yellow', 'brown', 'grey', 'violet',
'magenta', 'orange', 'indigo', 'maroon', 'cyan']
seen = set()
for e, traject in enumerate(tlist):
if e == 0:
for i in range(len(traject)):
ax.axvline(x=traject[i][-1], linewidth=0.1,
color='black', linestyle='--')
continue
fulltime = []
fulltrajectory = []
for i in range(len(traject)):
if traject[i][0] is None:
continue
fulltime += tlist[0][i]
fulltrajectory += traject[i]
finalmin = plist[e - 1][-1]
color = best[finalmin - 1] if len(best) > finalmin else best[-1]
lin, = ax.plot(fulltime, fulltrajectory, '-', color=color)
log, = axLog.plot(fulltime, fulltrajectory, color=color)
if finalmin not in seen:
seen.add(finalmin)
# NOTE: Adjust here for legend
if traject[-1] and max(traject[-1]) > 0.1:
# if fulltrajectory and max(fulltrajectory) > 0.1:
log.set_label("lmin {:d}".format(finalmin))
fig.set_size_inches(7, 3)
fig.text(0.5, 0.95, title, ha='center', va='center')
plt.legend()
ax.set_ylabel('occupancy [mol/l]', fontsize=11)
ax.set_xlabel('time [seconds]', ha='center', va='center', fontsize=11)
ax.xaxis.set_label_coords(.9, -0.15)
# plt.show()
pfile = name + '.pdf'
plt.savefig(pfile, bbox_inches='tight')
return pfile
def barmap_treekin(bname, seq, bfiles, plist, args):
""" write treekin files from bfiles and plist information """
tmpdir = args.tmpdir
start = args.start
stop = args.stop
verb = args.verbose
tX = args.tX
t8 = args.t8
cutoff = args.occupancy_cutoff
p0 = args.p0
tt = 0
tfiles = []
reg_flt = re.compile(b'[-+]?[0-9]*.?[0-9]+([eE][-+]?[0-9]+)?.')
# http://www.regular-expressions.info/floatingpoint.html
for e, l in enumerate(range(start, stop + 1)):
t8 = tX if l == stop else t8
cseq = seq[0:l]
cname = "{}-t8_{}-len_{}".format(bname, t8, l)
[bfile, efile, rfile, psfile, bmfile] = bfiles[e]
with open(bfile) as bf:
for i, _ in enumerate(bf):
pass
if i >= 2:
try:
ctfile, _ = ril.sys_treekin_051(cname, rfile,
treekin = args.treekin,
bofile = bfile,
p0 = p0,
t0 = args.t0,
ti = args.ti,
t8 = t8,
binrates = True,
useplusI = False,
exponent = False,
mpack_method = None,
force = args.force,
verbose = verb)
except SubprocessError:
print("# repeating treekin calculations with --exponent")
ctfile, _ = ril.sys_treekin_051(cname, rfile,
treekin = args.treekin,
bofile = bfile,
p0 = p0,
t0 = args.t0,
ti = args.ti,
t8 = t8,
binrates = True,
useplusI = False,
exponent = True,
mpack_method = None,
force = args.force,
verbose = verb)
lastlines = s.check_output(
['tail', '-2', ctfile]).strip().split(b'\n')
if reg_flt.match(lastlines[0]):
tt += float(lastlines[0].split()[0])
if l < stop:
curlmin = np.array(plist)[:, e]
newlmin = np.array(plist)[:, e + 1]
p0 = set_p0(bfile, l, lastlines, curlmin, newlmin,
cutoff, verb)
else:
for (i, pop) in enumerate(lastlines[0].split()):
if i != 0 and float(pop) > cutoff:
ss, en = get_structure(bfile, i, energy=True)
print("{:3d} {:3d} {:f} {:s} {:6.2f}".format(l, i,
float(pop), ss, float(en)))
else:
raise SubprocessError(
'found crashed treekin trajectory.', ctfile)
else:
# Create an empty file
ctfile = cname + '.tkn'
open(ctfile, 'a').close()
tt += t8
print("{:3d} {:3d} {:f} {:s} {:6.2f}".format(l, 1, 1.0, get_structure(bfile, 1), 0.00))
tfiles.append(ctfile)
return tfiles
def barmap_mapping(_bname, seq, args):
""" Parse mapping info into pathlist """
tmpdir = args.tmpdir
start = args.start
stop = args.stop
verb = args.verbose
force = args.force
plist = []
mapinfo = "{}-map_{:d}_to_{:d}.map".format(_bname, start, stop)
if os.path.exists(mapinfo) and not force:
if verb:
print("# {} <= File exists".format(mapinfo))
with open(mapinfo, 'r') as m:
m.readline().strip()
for line in m:
path = [int(x) for x in line.strip().split(" => ")]
plist.append(path)
else:
mlist = []
for l in range(start, stop + 1):
cseq = seq[0:l]
cname = "{}-len_{:02d}_barriers".format(_bname, l)
pname = "{}-len_{:02d}_barriers".format(_bname, l - 1)
if os.path.exists(pname + '.bar'):
print('do it')
if verb:
print("# Get mapping info {:d} -> {:d}".format(l - 1, l))
mlist.append(get_mapping_dict(pname + '.bar', cname + '.ms'))
else:
print('wtf', pname)
plist = pathlist(mlist)
with open(mapinfo, 'w') as m:
m.write("# Mapping information for barrier trees {:d} to {:d}\n".format(
start, stop))
for path in plist:
m.write(" => ".join(map("{:4d}".format, path)) + "\n")
return plist
def barmap_barriers(_bname, seq, sfiles, args):
""" Print barriers files and mapping info """
bfiles = []
prog = ril.ProgressBar(args.stop - args.start + 1)
for e, l in enumerate(range(args.start, args.stop + 1)):
cseq = seq[0:l]
cname = "{}-len_{:02d}".format(_bname, l)
pname = "{}-len_{:02d}".format(_bname, l - 1)
sfile = sfiles[e]
if os.path.exists(pname + '.bar'):
with open(pname + '.bar', 'r') as oldbar, \
open(pname + '.map', 'w') as mapfile:
for e, line in enumerate(oldbar):
if e == 0:
continue
cols = line.strip().split()
mapfile.write(cols[1] + '.' + "\n")
mfile = pname + '_barriers.ms'
else:
mfile = ''
# Make sure the first round for mapping is always recomputed
# force = True if e == 1 else args.force
print('now running')
[bfile, efile, rfile, rbfile, psfile, bmfile] = ril.sys_barriers_180(cname, sfile,
barriers = args.barriers,
minh = args.b_minh,
maxn = args.b_maxn,
temp = args.temperature,
noLP = args.noLP,
moves = 'single-base-pair',
zipped = True,
rates = True,
k0 = args.k0,
bsize = False,
saddle = False,
bmfile = True,
force = args.force)
bfiles.append([bfile, efile, rbfile, psfile, bmfile])
prog.inc()
return bfiles
def barmap_subopts(_sname, seq, args):
""" Print all suboptimal structures, return list of files """
sfiles = []
prog = ril.ProgressBar(args.stop - args.start + 1)
for l in range(args.start, args.stop + 1):
cseq = seq[0:l]
cname = "{}-len_{}".format(_sname, l)
csfile = ril.sys_suboptimals(cname, cseq,
RNAsubopt=args.RNAsubopt,
ener=args.s_ener,
temp=args.temperature,
noLP=args.noLP,
sort=[
'|', 'sort', '-T', args.s_sortdir, '-k3r', '-k2n'],
force=args.force)
sfiles.append(csfile)
prog.inc()
return sfiles
def set_p0(bfile, l, lastlines, curlmin, newlmin, cutoff, verb):
"""
remap densities from :lastlines: using the information in plist
"""
lminmap = c.defaultdict(int)
for x, y in zip(curlmin, newlmin):
lminmap[x] = y
p0dict = c.defaultdict(float)
for (i, pop) in enumerate(lastlines[0].split()):
if i == 0:
time = float(pop)
else:
p0dict[lminmap[i]] += float(pop)
if float(pop) > cutoff:
ss, en = get_structure(bfile, i, energy=True)
print("{:3d} {:3d} {:f} {:s} {:6.2f} {:4d} => {:d}".format(
l, i, float(pop), ss, float(en), i, lminmap[i]))
if lminmap[i] == 0:
raise LostPopulationError('Lost significant population!')
p0 = []
p0sum = 0.0
for (x, y) in p0dict.items():
if x == 0:
continue
if y > cutoff:
p0.append("{:d}={:f}".format(x, y))
p0sum += y
if verb:
print("# Total population {:.3f}\n".format(p0sum))
return p0
def get_structure(bfile, idx, energy=False):
ss = ''
with open(bfile, 'r') as bar:
for n, line in enumerate(bar):
if n == idx:
[ss, en] = line.strip().split()[1:3]
break
if energy:
return ss, en if ss else ''
else:
return ss
def get_mapping_dict(oldbar, minfo):
'''
* parse the old barfile
o = [[idx, struct],...]
* parse the current barfile
c = [[idx, struct],...]
* dictionary[old_idx]=[new_idx,dist]
'''
mapper = c.defaultdict(int)
with open(oldbar, 'r') as old, \
open(minfo, 'r') as info:
mapinfo = []
for n, line in enumerate(info):
if re.match('[\.\(\)]+', line.strip()):
gstr, sptidx, energy, fmin, fminT, gmin, gminT = line.strip().split()
mapinfo.append([gminT, gstr])
elif re.match('not in hash', line.strip()):
print("# structure not in hash")
mapinfo.append([0, ''])
elif re.match('not yet assigned', line.strip()):
print("# structure not yet assigned")
mapinfo.append([0, ''])
for n, line in enumerate(old):
if n == 0:
continue
else: # old idx = enumerate n
[old_idx, old_min] = line.strip().split()[0:2]
if n > len(mapinfo):
raise Exception('ERROR: To many lines in barfile: '
+ minfo + ' missing mapping information!')
mapper[int(old_idx)] = int(mapinfo[n - 1][0])
return mapper
def pathlist(mapdata):
'''
Translate a list of dictionaries with mapping information into
a list of trajectories for lmin-transitions
:return: plist
'''
print(mapdata)
# initialize path-lists with first lmins
plist = [[i + 1] for i in range(len(mapdata[0]))]
# append the mapping for each current minimum
for m, mdict in enumerate(mapdata):
seen = set()
for path in plist:
last = path[-1]
# print path, "last: ", last, mdict[last]
path.append(mdict[last])
seen.add(mdict[last])
# for path in plist: print "update: ", path
# add new pathlists for lmins that appear in
# the next landscape for the first time
if m == len(mapdata) - 1:
break
for newmin in sorted(
mapdata[m + 1], key=mapdata[m + 1].get, reverse=False):
if newmin not in seen:
newpath = [0 for i in range(len(plist[0]) - 1)]
newpath.append(newmin)
# print "Adding new path: ", newpath
plist.append((newpath))
return sorted(plist, key=lambda m: m[-1])
def add_barmap_args(parser):
""" A collection of arguments that are used by BarMap """
ril.argparse_add_arguments(parser,
RNAsubopt=True,
barriers=True,
treekin=True,
noLP=True, temperature=True,
tmpdir=True, name=True, force=True, verbose=True,
start=True, stop=True, k0=True, tX=True, cutoff=True)
parser.add_argument("--plot_title", default='')
parser.add_argument("--pyplot", action="store_true",
help="Plot the simulation using matplotlib. Interpret the legend \
using the *log* output")
parser.add_argument("--xmgrace", action="store_true",
help="Print a plot for xmgrace. " +
"Interpret the legend using the *log* output")
parser.add_argument("--adaptive", action="store_true",
help="Automatically raise suboptimal energy range if computations fail.")
parser.add_argument("--s_sortdir", default="/tmp", action="store",
help=argparse.SUPPRESS)
return
def main():
""" BarMap -- cotransriptional folding
Dependencies: RNAsubopt, barriers, treekin
TODO:
- change occupancy threshold to: Oc = Omin/c
- reimplement to determine RNAsubopt energy range on the fly.
The relevant local minima determine the necessary energy range for RNAsubopt.
If a simulation requires a lager subopt than the previous one, well then recompute.
Make use of the connect flag, if it is actually helpful.
"""
parser = argparse.ArgumentParser(
# formatter_class=argparse.RawTextHelpFormatter,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
# formatter_class=argparse.MetavarTypeHelpFormatter,
description='echo sequence | %(prog)s [options]')
add_barmap_args(parser)
args = parser.parse_args()
# ~~~~~~~~~~~~~
# Logging Setup
# ~~~~~~~~~~~~~
logger = logging.getLogger('ribolands')
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
formatter = logging.Formatter('# %(levelname)s - %(message)s')
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
# Read Input & Update Arguments
name, seq = ril.parse_vienna_stdin(sys.stdin)
# One name, just to be clear ...
(args.name, name) = (args.name, args.name) if args.name else (name, name)
if args.stop is None:
args.stop = len(seq)
else:
seq = seq[:args.stop]
print("# Input: {:s} {:s}".format(name, seq))
if args.s_ener is None:
args.s_ener, args.s_maxn = ril.sys_subopt_range(seq,
nos=args.s_maxn, maxe=args.s_maxe)
print("# Energyrange {:.2f} computes {:d} sequences".format(
args.s_ener, args.s_maxn))
elif args.verbose:
args.s_ener, args.s_maxn = ril.sys_subopt_range(seq,
nos=0, maxe=args.s_ener)
print("# Energyrange {:.2f} computes {:d} sequences".format(
args.s_ener, args.s_maxn))
if not args.tmpdir:
args.tmpdir = 'BarMap_' + args.name
if not os.path.exists(args.tmpdir):
os.makedirs(args.tmpdir)
"""# Starting with BarMap computations ... """
while True:
print("""# writing RNAsubopt files ... """)
sname = "{}/{}-ener_{:.2f}".format(args.tmpdir, args.name, args.s_ener)
#if args.circ: myfile += '_circ'
if args.noLP:
sname += '_noLP'
sfiles = barmap_subopts(sname, seq, args)
print("""# writing barriers files ... """)
bname = "{}-minh_{}-maxn_{}-k0_{}".format(sname,
args.b_minh, args.b_maxn, args.k0)
bfiles = barmap_barriers(bname, seq, sfiles, args)
print("""# writing/parsing mapping information ... """)
plist = barmap_mapping(bname, seq, args)
print("""# simulations using treekin ... """)
try:
tfiles = barmap_treekin(bname, seq, bfiles, plist, args)
break
except LostPopulationError as e:
if args.adaptive:
args.s_ener += 2
print('repeating caluclations with higher energy:', args.s_ener)
else:
print('caluclations failed with current suboptimal energy range:', args.s_ener)
break
except SubprocessError as e:
if args.adaptive:
args.s_ener += 2
print('repeating caluclations with higher energy:', args.s_ener)
else:
print('caluclations failed with current suboptimal energy range:', args.s_ener)
break
if args.xmgrace or args.pyplot:
print("""# Processing treekin results for plotting ... """)
courses = get_plot_data(tfiles, plist, args)
if args.xmgrace:
grfile = plot_xmgrace(courses, plist, args)
print("# Your results have been plotted in the file: {}".format(grfile))
if args.pyplot:
plotfile = plot_matplotlib(name, seq, courses, plist, args)
print("# Your results have been plotted in the file: {}".format(plotfile))
print("# Thank you for using BarMap b(^.^)d")
if __name__ == '__main__':
main()
| {
"content_hash": "fc8928651b3b110d51d1ceaacc97f48f",
"timestamp": "",
"source": "github",
"line_count": 724,
"max_line_length": 99,
"avg_line_length": 33.680939226519335,
"alnum_prop": 0.507033012097601,
"repo_name": "bad-ants-fleet/ribolands",
"id": "694bd2427fc076d723a1e09f403c3d9ff58a9d60",
"size": "24408",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ribolands/barmap.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "PostScript",
"bytes": "4089"
},
{
"name": "Python",
"bytes": "213844"
}
],
"symlink_target": ""
} |
from .. import tutils, mastertest
from mitmproxy.builtins import stickycookie
from mitmproxy.flow import master
from mitmproxy.flow import state
from mitmproxy import options
from netlib import tutils as ntutils
def test_domain_match():
assert stickycookie.domain_match("www.google.com", ".google.com")
assert stickycookie.domain_match("google.com", ".google.com")
class TestStickyCookie(mastertest.MasterTest):
def mk(self):
s = state.State()
o = options.Options(stickycookie = ".*")
m = master.FlowMaster(o, None, s)
sc = stickycookie.StickyCookie()
m.addons.add(o, sc)
return s, m, sc
def test_config(self):
sc = stickycookie.StickyCookie()
o = options.Options(stickycookie = "~b")
tutils.raises(
"invalid filter",
sc.configure, o, o.keys()
)
def test_simple(self):
s, m, sc = self.mk()
m.addons.add(m.options, sc)
f = tutils.tflow(resp=True)
f.response.headers["set-cookie"] = "foo=bar"
self.invoke(m, "request", f)
f.reply.acked = False
self.invoke(m, "response", f)
assert sc.jar
assert "cookie" not in f.request.headers
f = f.copy()
f.reply.acked = False
self.invoke(m, "request", f)
assert f.request.headers["cookie"] == "foo=bar"
def _response(self, s, m, sc, cookie, host):
f = tutils.tflow(req=ntutils.treq(host=host, port=80), resp=True)
f.response.headers["Set-Cookie"] = cookie
self.invoke(m, "response", f)
return f
def test_response(self):
s, m, sc = self.mk()
c = "SSID=mooo; domain=.google.com, FOO=bar; Domain=.google.com; Path=/; " \
"Expires=Wed, 13-Jan-2021 22:23:01 GMT; Secure; "
self._response(s, m, sc, c, "host")
assert not sc.jar.keys()
self._response(s, m, sc, c, "www.google.com")
assert sc.jar.keys()
sc.jar.clear()
self._response(
s, m, sc, "SSID=mooo", "www.google.com"
)
assert list(sc.jar.keys())[0] == ('www.google.com', 80, '/')
def test_response_multiple(self):
s, m, sc = self.mk()
# Test setting of multiple cookies
c1 = "somecookie=test; Path=/"
c2 = "othercookie=helloworld; Path=/"
f = self._response(s, m, sc, c1, "www.google.com")
f.response.headers["Set-Cookie"] = c2
self.invoke(m, "response", f)
googlekey = list(sc.jar.keys())[0]
assert len(sc.jar[googlekey].keys()) == 2
def test_response_weird(self):
s, m, sc = self.mk()
# Test setting of weird cookie keys
f = tutils.tflow(req=ntutils.treq(host="www.google.com", port=80), resp=True)
cs = [
"foo/bar=hello",
"foo:bar=world",
"foo@bar=fizz",
"foo,bar=buzz",
]
for c in cs:
f.response.headers["Set-Cookie"] = c
self.invoke(m, "response", f)
googlekey = list(sc.jar.keys())[0]
assert len(sc.jar[googlekey].keys()) == len(cs)
def test_response_overwrite(self):
s, m, sc = self.mk()
# Test overwriting of a cookie value
c1 = "somecookie=helloworld; Path=/"
c2 = "somecookie=newvalue; Path=/"
f = self._response(s, m, sc, c1, "www.google.com")
f.response.headers["Set-Cookie"] = c2
self.invoke(m, "response", f)
googlekey = list(sc.jar.keys())[0]
assert len(sc.jar[googlekey].keys()) == 1
assert list(sc.jar[googlekey]["somecookie"].items())[0][1] == "newvalue"
def test_response_delete(self):
s, m, sc = self.mk()
# Test that a cookie is be deleted
# by setting the expire time in the past
f = self._response(s, m, sc, "duffer=zafar; Path=/", "www.google.com")
f.response.headers["Set-Cookie"] = "duffer=; Expires=Thu, 01-Jan-1970 00:00:00 GMT"
self.invoke(m, "response", f)
assert not sc.jar.keys()
def test_request(self):
s, m, sc = self.mk()
f = self._response(s, m, sc, "SSID=mooo", "www.google.com")
assert "cookie" not in f.request.headers
self.invoke(m, "request", f)
assert "cookie" in f.request.headers
| {
"content_hash": "02ed60bfb9642b375d650f3958c4b2b3",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 91,
"avg_line_length": 32.666666666666664,
"alnum_prop": 0.5640074211502782,
"repo_name": "dufferzafar/mitmproxy",
"id": "e9d92c831af5f56b7e132e85cd5136241e8198b4",
"size": "4312",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/mitmproxy/builtins/test_stickycookie.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "186239"
},
{
"name": "HTML",
"bytes": "3034"
},
{
"name": "JavaScript",
"bytes": "146506"
},
{
"name": "PowerShell",
"bytes": "362"
},
{
"name": "Python",
"bytes": "1324129"
},
{
"name": "Shell",
"bytes": "3717"
}
],
"symlink_target": ""
} |
import os
import sys
from typing import List
from django.core.management.base import AppCommand
from django_extensions.management.utils import _make_writeable, signalcommand
class Command(AppCommand):
help = "Creates a Django template tags directory structure for the given app name in the apps's directory"
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
'--name',
'-n',
action='store',
dest='tag_library_name',
default='appname_tags',
help='The name to use for the template tag base name. Defaults to `appname`_tags.'
)
requires_system_checks: List[str] = []
# Can't import settings during this command, because they haven't
# necessarily been created.
can_import_settings = True
@signalcommand
def handle_app_config(self, app_config, **options):
app_dir = app_config.path
tag_library_name = options['tag_library_name']
if tag_library_name == 'appname_tags':
tag_library_name = '%s_tags' % os.path.basename(app_dir)
copy_template('template_tags_template', app_dir, tag_library_name)
def copy_template(template_name, copy_to, tag_library_name):
"""Copy the specified template directory to the copy_to location"""
import django_extensions
import shutil
template_dir = os.path.join(django_extensions.__path__[0], 'conf', template_name)
# walk the template structure and copies it
for d, subdirs, files in os.walk(template_dir):
relative_dir = d[len(template_dir) + 1:]
if relative_dir and not os.path.exists(os.path.join(copy_to, relative_dir)):
os.mkdir(os.path.join(copy_to, relative_dir))
for i, subdir in enumerate(subdirs):
if subdir.startswith('.'):
del subdirs[i]
for f in files:
if f.endswith('.pyc') or f.startswith('.DS_Store'):
continue
path_old = os.path.join(d, f)
path_new = os.path.join(copy_to, relative_dir, f.replace('sample', tag_library_name))
if os.path.exists(path_new):
path_new = os.path.join(copy_to, relative_dir, f)
if os.path.exists(path_new):
continue
path_new = path_new.rstrip(".tmpl")
fp_old = open(path_old, 'r')
fp_new = open(path_new, 'w')
fp_new.write(fp_old.read())
fp_old.close()
fp_new.close()
try:
shutil.copymode(path_old, path_new)
_make_writeable(path_new)
except OSError:
sys.stderr.write("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new)
| {
"content_hash": "2e42b6345f7568f6377b03e45df85d36",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 156,
"avg_line_length": 39.31944444444444,
"alnum_prop": 0.6015542211232779,
"repo_name": "django-extensions/django-extensions",
"id": "83ab9cf9332d6f4e968b2d915ae42fadf96a6be5",
"size": "2855",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "django_extensions/management/commands/create_template_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "740"
},
{
"name": "HTML",
"bytes": "2126"
},
{
"name": "JavaScript",
"bytes": "41410"
},
{
"name": "Makefile",
"bytes": "1257"
},
{
"name": "Python",
"bytes": "826197"
}
],
"symlink_target": ""
} |
import os
import zipfile
def print_seconds_nice(seconds, prefix=""):
"""
Static method for interval print in human readable format
:param seconds: seconds count
:param prefix: prefix for print
:return: string which contains human readable representation of interval
"""
if seconds < 60:
return "{}{}s".format(prefix, seconds)
minutes = seconds // 60
seconds -= minutes * 60
if minutes < 60:
seconds = round(seconds, 2)
return "{}{}m {}s".format(prefix, minutes, seconds)
hours = minutes // 60
minutes -= hours * 60
if hours < 24:
minutes = int(minutes)
seconds = round(seconds, 2)
return "{}{}h {}m {}s".format(prefix, hours, minutes, seconds)
days = hours // 24
hours -= days * 24
seconds = round(seconds, 2)
return "{}{}d {}h {}m {}s".format(prefix, days, hours, minutes, seconds)
def get_file_extension(file_name):
_, file_extension = os.path.splitext(file_name)
return file_extension
def is_file_zipped(file_name):
return str(get_file_extension(file_name)).lower() == ".zip"
def decompress_file(compressed_file_name, tmp_path):
with zipfile.ZipFile(compressed_file_name, "r") as archive:
compressed_filename = archive.filelist[0].filename
archive.extract(compressed_filename, tmp_path)
return os.path.abspath(os.path.join(tmp_path, compressed_filename))
| {
"content_hash": "8c7131e64f35c63fce0dfbe4fb08de13",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 76,
"avg_line_length": 27.5,
"alnum_prop": 0.6405594405594406,
"repo_name": "JFF-Bohdan/tamaku",
"id": "5188ea9d47bbef6abd0577ffd86653d35eabe611",
"size": "1430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "support/support_funcs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1936"
},
{
"name": "Python",
"bytes": "7698"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name="color",
parent_name="scattergeo.marker.colorbar.tickfont",
**kwargs,
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs,
)
| {
"content_hash": "e62ab608efad94b4a441d35270b7caf7",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 66,
"avg_line_length": 28.5625,
"alnum_prop": 0.5798687089715536,
"repo_name": "plotly/plotly.py",
"id": "cfbcf0432d63b822c797597b499c1ba59d4d76dc",
"size": "457",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/scattergeo/marker/colorbar/tickfont/_color.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
"""
Generate morse sounds without using numpy.
"""
import math
import time
import pyaudio
#sudo apt-get install python-pyaudio
PyAudio = pyaudio.PyAudio
#See http://en.wikipedia.org/wiki/Bit_rate#Audio
BITRATE = 22050 #number of frames per second/frameset.
#See http://www.phy.mtu.edu/~suits/notefreqs.html
FREQUENCY = 700.00 #Hz, waves per second, 261.63=C4-note.
LENGTH = 10.0000 #seconds to play sound
NUMBEROFFRAMES = int(BITRATE * LENGTH)
RESTFRAMES = NUMBEROFFRAMES % BITRATE
start = time.time()
WAVEDATA = ''
for x in range(NUMBEROFFRAMES):
WAVEDATA = WAVEDATA+chr(int(math.sin(x/((BITRATE/FREQUENCY)/math.pi))*127+128))
delta = time.time() - start
print('time=%f' % delta)
p = PyAudio()
stream = p.open(format = p.get_format_from_width(1),
channels = 1,
rate = BITRATE,
output = True)
stream.write(WAVEDATA)
stream.stop_stream()
stream.close()
p.terminate()
| {
"content_hash": "86b57531a529dbb1bbab71ee70aadafd",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 83,
"avg_line_length": 23.76923076923077,
"alnum_prop": 0.6871628910463862,
"repo_name": "rzzzwilson/morse",
"id": "bb6b5d0512677ca010ce2b59a210540e2482ad68",
"size": "947",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "morse/test8.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "3257"
},
{
"name": "Makefile",
"bytes": "120"
},
{
"name": "Python",
"bytes": "175420"
}
],
"symlink_target": ""
} |
import sys
import os
import shlex
# Make sure sphinx finds the packages
sys.path.insert(0, os.path.abspath('../'))
print(sys.path)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# Make sure sphinx finds the packages
code_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, code_dir)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MEANS'
copyright = u'2015, Sisi Fan, Quentin Geissmann, Eszter Lakatos, Saulius Lukauskas, Angelique Ale, Ann C. Babtie, Paul D.W. Kirk, and Michael P. H. Stumpf'
author = u'Sisi Fan, Quentin Geissmann, Eszter Lakatos, Saulius Lukauskas, Angelique Ale, Ann C. Babtie, Paul D.W. Kirk, and Michael P. H. Stumpf'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'MEANSdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'MEANS.tex', u'MEANS Documentation',
u'Sisi Fan, Quentin Geissmann, Eszter Lakatos, Saulius Lukauskas, Angelique Ale, Ann C. Babtie, Paul D.W. Kirk, and Michael P. H. Stumpf', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'means', u'MEANS Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'MEANS', u'MEANS Documentation',
author, 'MEANS', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python':('http://docs.python.org/2.7',None),
'matplotlib':('http://matplotlib.sourceforge.net', None),
'numpy':('http://docs.scipy.org/doc/numpy',None),
'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None),
'assimulo': ('http://www.jmodelica.org/assimulo_home/', None),
'luigi': ('http://luigi.readthedocs.org/en/latest/', None)
}
from mock import Mock as MagicMock
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()
MOCK_MODULES = [ 'cython',
'matplotlib',
'matplotlib.artist',
'sbml',
'numpy',
'numpy.testing',
'assimulo',
'assimulo.problem',
'assimulo.solvers',
'assimulo.solvers.sundials',
'assimulo.solvers.runge_kutta',
'assimulo.exception',
'sympy',
'sympy.utilities.autowrap',
'sympy.utilities',
'sympy.core.sympify',
'sympy.core',
'sympy.utilities.iterables',
'scipy',
'scipy.optimize',
'scipy.special']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# Document __init__ methods
autoclass_content = 'both'
def autodoc_skip_member(app, what, name, obj, skip, options):
exclusions = ('__weakref__', # special-members
'__doc__', '__module__', '__dict__', # undoc-members
)
exclude = name in exclusions
# Ignore all the _abc private methods if private methods are set to be documented
if name.startswith('_abc_'):
exclude = True
# Disable the private methods
return skip or exclude
| {
"content_hash": "6a980e6df5d567a80e4b1b949147984d",
"timestamp": "",
"source": "github",
"line_count": 369,
"max_line_length": 155,
"avg_line_length": 32.9349593495935,
"alnum_prop": 0.6725088455525384,
"repo_name": "lukauskas/means",
"id": "4c4f2e246e139769a2d8c22a8d74a0f61021f562",
"size": "12570",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "3396375"
},
{
"name": "Makefile",
"bytes": "7673"
},
{
"name": "Python",
"bytes": "586094"
}
],
"symlink_target": ""
} |
import Adafruit_GPIO.SPI as SPI
import Adafruit_SSD1306
# Raspberry Pi pin configuration:
RST = 24
# Note the following are only used with SPI:
DC = 23
SPI_PORT = 0
SPI_DEVICE = 0
# Beaglebone Black pin configuration:
# RST = 'P9_12'
# Note the following are only used with SPI:
# DC = 'P9_15'
# SPI_PORT = 1
# SPI_DEVICE = 0
# 128x32 display with hardware I2C:
#disp = Adafruit_SSD1306.SSD1306_128_32(rst=RST)
# 128x64 display with hardware I2C:
#disp = Adafruit_SSD1306.SSD1306_128_64(rst=RST)
# Note you can change the I2C address by passing an i2c_address parameter like:
# disp = Adafruit_SSD1306.SSD1306_128_64(rst=RST, i2c_address=0x3C)
# Alternatively you can specify an explicit I2C bus number, for example
# with the 128x32 display you would use:
# disp = Adafruit_SSD1306.SSD1306_128_32(rst=RST, i2c_bus=2)
# 128x32 display with hardware SPI:
# disp = Adafruit_SSD1306.SSD1306_128_32(rst=RST, dc=DC, spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE, max_speed_hz=8000000))
# 128x64 display with hardware SPI:
disp = Adafruit_SSD1306.SSD1306_128_64(rst=RST, dc=DC, spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE, max_speed_hz=8000000))
# Alternatively you can specify a software SPI implementation by providing
# digital GPIO pin numbers for all the required display pins. For example
# on a Raspberry Pi with the 128x32 display you might use:
# disp = Adafruit_SSD1306.SSD1306_128_32(rst=RST, dc=DC, sclk=18, din=25, cs=22)
# Initialize library.
disp.begin()
# Clear display.
disp.clear()
disp.display()
| {
"content_hash": "aa29664f5477f2acfa501554f8607e6a",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 116,
"avg_line_length": 31.333333333333332,
"alnum_prop": 0.742686170212766,
"repo_name": "PrinzJuliano/Linux-Image-Display",
"id": "1a8f609fff4e049818fc0885e1473bef6d2e29aa",
"size": "1504",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clear.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9818"
}
],
"symlink_target": ""
} |
import os
from dataclasses import dataclass, field
from typing import Any, List
import hydra
from mephisto.operations.hydra_config import register_script_config
from omegaconf import DictConfig
from parlai.crowdsourcing.tasks.dialcrowd.dialcrowd_blueprint import (
STATIC_BLUEPRINT_TYPE,
)
from parlai.crowdsourcing.utils.mturk import MTurkRunScriptConfig, run_static_task
TASK_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
_ = STATIC_BLUEPRINT_TYPE
defaults = ["_self_", {"conf": "example"}]
@dataclass
class ScriptConfig(MTurkRunScriptConfig):
defaults: List[Any] = field(default_factory=lambda: defaults)
task_dir: str = TASK_DIRECTORY
monitoring_log_rate: int = field(
default=30,
metadata={
'help': 'Frequency in seconds of logging the monitoring of the crowdsourcing task'
},
)
register_script_config(name='scriptconfig', module=ScriptConfig)
@hydra.main(config_path="hydra_configs", config_name="scriptconfig")
def main(cfg: DictConfig) -> None:
run_static_task(cfg=cfg, task_directory=TASK_DIRECTORY, task_id='dialcrowd')
if __name__ == "__main__":
main()
| {
"content_hash": "c7a2d993de5335f66ea22ad70d7bd8ac",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 94,
"avg_line_length": 27.38095238095238,
"alnum_prop": 0.7234782608695652,
"repo_name": "facebookresearch/ParlAI",
"id": "67d5222515bdf92ac32740b26f7c2ef2e9fdf8a5",
"size": "1350",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "parlai/crowdsourcing/tasks/dialcrowd/run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "2000"
},
{
"name": "CSS",
"bytes": "38474"
},
{
"name": "Cuda",
"bytes": "4118"
},
{
"name": "Dockerfile",
"bytes": "1218"
},
{
"name": "HTML",
"bytes": "645771"
},
{
"name": "JavaScript",
"bytes": "405110"
},
{
"name": "Makefile",
"bytes": "289"
},
{
"name": "Python",
"bytes": "6802410"
},
{
"name": "Shell",
"bytes": "26147"
}
],
"symlink_target": ""
} |
"""
Attach to a MIDI device and print events to standard output.
"""
import sys
import time
import midi
import midi.sequencer as sequencer
if len(sys.argv) != 3:
print("Usage: {0} <client> <port>".format(sys.argv[0]))
exit(2)
client = sys.argv[1]
port = sys.argv[2]
seq = sequencer.SequencerRead(sequencer_resolution=120)
seq.subscribe_port(client, port)
seq.start_sequencer()
while True:
event = seq.event_read()
if event is not None:
print(event)
| {
"content_hash": "d0ae49afa620ae71633a5087c4900891",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 60,
"avg_line_length": 20.565217391304348,
"alnum_prop": 0.6955602536997886,
"repo_name": "jameswenzel/mydy",
"id": "03eba642ff545a62cfe61a754bdf970163d14894",
"size": "495",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/midilisten.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "99871"
},
{
"name": "Python",
"bytes": "79114"
},
{
"name": "Shell",
"bytes": "188"
}
],
"symlink_target": ""
} |
import multiprocessing
from pxpoint import pxpointsc
from pxpoint.util.datacatalog import * # NOQA
from geospatialdefaults import * # NOQA
import geospatiallib
import zmq
class GeoSpatialWorker(multiprocessing.Process):
def __init__(self, options):
"""Spatial Worker Initialization Routine"""
multiprocessing.Process.__init__(self)
self.options = options
def init_spatial(self):
"""Initialize Spatial for PxPointSC"""
catalog = read_catalog_to_dict(
self.options.datacatalog_path, self.options.pxse_dir)
spatial_handle, rc, msg = pxpointsc.geospatial_init_catalog(catalog)
if rc != 0:
raise RuntimeError('Code: {c}. Message: {m}'.format(
c=rc, m=msg))
layer_alias_fields_map = pxpointsc.geospatial_prepare(
spatial_handle, catalog, GeoSpatialDefaults.LAYER_ALIASES)
return spatial_handle, layer_alias_fields_map
def run(self):
sh, layer_alias_fields_map = self.init_spatial()
self.spatial_handle = sh
# Init ZMQ sockets
self.context = zmq.Context()
self.socket_pull = self.context.socket(zmq.PULL)
self.socket_pull.connect(self.options.geospatial_workurl)
self.socket_push = self.context.socket(zmq.PUSH)
self.socket_push.connect(self.options.resulturl)
while True:
msg = self.socket_pull.recv_json()
in_tbl = geospatiallib.create_query_input_table(
msg['WebSocketId'], msg['lat'], msg['lon'])
if('custom' in msg):
layername = msg['custom']['Layer']
fieldmap = msg['custom']['Fields']
fieldmap['INPUT.Id'] = 'INPUT.Id'
out_cols = ';'.join(['[%s]%s' % (layername, k) for k in fieldmap])
else:
layername = msg['layer']
fieldmap = None
out_cols = '[{a}]INPUT.Id;'.format(a=layername) + ';'.join(
layer_alias_fields_map.get(layername, layername))
query_options = geospatiallib.create_query_options(layername)
out_tbl, err_tbl, rc, pxmsg = pxpointsc.geospatial_query(
self.spatial_handle,
in_tbl,
out_cols,
GeoSpatialDefaults.get_query_error_columns(layername),
query_options)
# Create output JSON dictionary
output = geospatiallib.create_json_result_with_status(
msg['WebSocketId'],
out_tbl,
err_tbl,
rc,
pxmsg,
fieldmap)
# put this back on the pipe
self.socket_push.send(output)
| {
"content_hash": "2f147be2eb4941df5056b36deefc5d4d",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 82,
"avg_line_length": 35.64935064935065,
"alnum_prop": 0.570856102003643,
"repo_name": "safou/spatial",
"id": "1b327c3025aad060c44fad61053625cdca4f60cc",
"size": "2745",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "socketserver/geospatialworker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "13396"
},
{
"name": "C++",
"bytes": "71040"
},
{
"name": "CSS",
"bytes": "1358"
},
{
"name": "CoffeeScript",
"bytes": "64"
},
{
"name": "D",
"bytes": "4970"
},
{
"name": "JavaScript",
"bytes": "1030008"
},
{
"name": "Python",
"bytes": "26273"
},
{
"name": "Shell",
"bytes": "4556"
}
],
"symlink_target": ""
} |
from __future__ import with_statement
from sqlite3 import dbapi2 as sqlite3
from hashlib import md5
from flask import Flask, request, session, g, redirect, url_for, \
abort, render_template, flash, _app_ctx_stack, jsonify, Response
from werkzeug import check_password_hash, generate_password_hash
import uuid
import base64
import sys
# configuration
DATABASE = './users.db'
#DEBUG = True
# create the application
app = Flask(__name__)
app.config.from_object(__name__)
app.config.from_envvar('COMPOTHNGS_SETTINGS', silent=True);
# init a black db
def init_db():
"""Creates the database tables."""
with app.app_context():
db = get_db()
with app.open_resource('schema.sql') as f:
db.cursor().executescript(f.read())
db.commit()
# connect to the db
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
top = _app_ctx_stack.top
if not hasattr(top, 'sqlite_db'):
top.sqlite_db = sqlite3.connect(app.config['DATABASE'])
top.sqlite_db.row_factory = sqlite3.Row
return top.sqlite_db
# query the database
def query_db(query, args=(), one=False):
"""Queries the database and returns a list of dictionaries."""
cur = get_db().execute(query, args)
rv = cur.fetchall()
return (rv[0] if rv else None) if one else rv
# query the database
def query_all(query, args=(), one=False):
"""Queries the database and returns a list of dictionaries."""
cur = get_db().execute(query, args)
resp='{ users: \n [\n'
for row in get_db().execute(query,args):
resp+='\t{\n\t\tuser:"'+row['username']+'",'
resp+='\n\t\ttoken:"'+row['api_token']+'"\n\t},\n'
resp+=' ]\n}'
return resp
def make_plain_response(response, code):
response.status_code = (code)
return response
# JSON response
def make_json_response(msg, code):
response = jsonify(message=msg)
response.status_code = (code)
return response
# before request
@app.before_request
def before_request():
g.user = None
if 'user_id' in session:
g.user = query_db('select * from user where user_id = ?',
[session['user_id']], one=True)
# close the db connection in the teardown
@app.teardown_appcontext
def close_database(exception):
"""Closes the database again at the end of the request."""
top = _app_ctx_stack.top
if hasattr(top, 'sqlite_db'):
top.sqlite_db.close()
@app.route('/resetDB', methods=['POST'])
def resetDB():
"""resetDB"""
if request.method == 'POST':
init_db()
return make_json_response('DB INITIALITATED', 200)
return make_json_response('POST method only', 400)
@app.route('/getToken/<username>', methods=['GET'])
def servioticyGetUser(username):
user = query_db('select * from user where username= ?', [username], one=True)
if user is None:
return make_json_response('Invalid username', 400)
return make_json_response(user['api_token'], 200)
@app.route('/getAllTokens', methods=['GET'])
def servioticyGetAllUsers():
user = query_all('select * from user', one=True)
if user is None:
return make_json_response('Empty user database', 400)
return make_plain_response(Response(user, mimetype='application/json'), 200)
@app.route('/registerUser/<username>', methods=['POST'])
def servioticyCreateUser(username):
user = query_db('select * from user where username = ?', [username], one=True)
if user is not None:
return make_json_response('Someone already has that username', 400)
db = get_db()
user_uuid = str(uuid.uuid4()).replace("-", "")
user_token = base64.b64encode(str(uuid.uuid4())) + base64.b64encode(str(uuid.uuid4()))
db.execute('''insert into user(username, uuid, api_token)
values(?, ?, ?)''', [username, user_uuid, user_token])
db.commit()
return make_json_response(user_token, 201)
if __name__ == '__main__':
app.run(host='0.0.0.0',port=5010)
| {
"content_hash": "0639f1abb567babf86abfc791b23ff52",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 88,
"avg_line_length": 30.563492063492063,
"alnum_prop": 0.6756686574915607,
"repo_name": "servioticy/servioticy-vagrant",
"id": "842963fab160c0850d9b112316740101181cca51",
"size": "3851",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "puppet/files/userDB/userDB.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10437"
},
{
"name": "HTML",
"bytes": "56921"
},
{
"name": "JavaScript",
"bytes": "279977"
},
{
"name": "Pascal",
"bytes": "7964"
},
{
"name": "Puppet",
"bytes": "311920"
},
{
"name": "Python",
"bytes": "36665"
},
{
"name": "Ruby",
"bytes": "1297061"
},
{
"name": "Scala",
"bytes": "1353"
},
{
"name": "Shell",
"bytes": "49835"
}
],
"symlink_target": ""
} |
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000141'
addresses_name = 'parl.2017-06-08/Version 1/South-Kesteven Democracy_Club__08June2017.tsv'
stations_name = 'parl.2017-06-08/Version 1/South-Kesteven Democracy_Club__08June2017.tsv'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
| {
"content_hash": "bf5a9c024a85c2989bd077512f79b6a3",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 94,
"avg_line_length": 51.75,
"alnum_prop": 0.7657004830917874,
"repo_name": "chris48s/UK-Polling-Stations",
"id": "2013969faf7d320e574a746dff0de62042f79fe8",
"size": "414",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "polling_stations/apps/data_collection/management/commands/import_south_kesteven.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "347"
},
{
"name": "Gherkin",
"bytes": "3720"
},
{
"name": "HTML",
"bytes": "30715"
},
{
"name": "JavaScript",
"bytes": "3226"
},
{
"name": "Python",
"bytes": "589520"
}
],
"symlink_target": ""
} |
"""
Middleware provided and used by Horizon.
"""
import json
import logging
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.views import redirect_to_login
from django.contrib import messages as django_messages
from django import http
from django import shortcuts
from django.utils.encoding import iri_to_uri
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from openstack_auth import views as auth_views
from horizon import exceptions
from horizon.utils import functions as utils
LOG = logging.getLogger(__name__)
class HorizonMiddleware(object):
"""The main Horizon middleware class. Required for use of Horizon."""
logout_reason = None
def _logout(self, request, login_url=None, message=None, status='success'):
"""Logout a user and display a logout message."""
response = auth_views.logout(request, login_url)
if message is not None:
self.logout_reason = message
utils.add_logout_reason(request, response, message, status)
return response
def process_request(self, request):
"""Adds data necessary for Horizon to function to the request."""
request.horizon = {'dashboard': None,
'panel': None,
'async_messages': []}
if not hasattr(request, "user") or not request.user.is_authenticated():
# proceed no further if the current request is already known
# not to be authenticated
# it is CRITICAL to perform this check as early as possible
# to avoid creating too many sessions
return None
if request.is_ajax():
# if the request is Ajax we do not want to proceed, as clients can
# 1) create pages with constant polling, which can create race
# conditions when a page navigation occurs
# 2) might leave a user seemingly left logged in forever
# 3) thrashes db backed session engines with tons of changes
return None
# If we use cookie-based sessions, check that the cookie size does not
# reach the max size accepted by common web browsers.
if (
settings.SESSION_ENGINE ==
'django.contrib.sessions.backends.signed_cookies'
):
max_cookie_size = getattr(
settings, 'SESSION_COOKIE_MAX_SIZE', None)
session_cookie_name = getattr(
settings, 'SESSION_COOKIE_NAME', None)
session_key = request.COOKIES.get(session_cookie_name)
if max_cookie_size is not None and session_key is not None:
cookie_size = sum((
len(key) + len(value)
for key, value in request.COOKIES.items()
))
if cookie_size >= max_cookie_size:
LOG.error(
'Total Cookie size for user_id: %(user_id)s is '
'%(cookie_size)sB >= %(max_cookie_size)sB. '
'You need to configure file-based or database-backed '
'sessions instead of cookie-based sessions: '
'http://docs.openstack.org/developer/horizon/topics/'
'deployment.html#session-storage',
{
'user_id': request.session.get(
'user_id', 'Unknown'),
'cookie_size': cookie_size,
'max_cookie_size': max_cookie_size,
}
)
tz = utils.get_timezone(request)
if tz:
timezone.activate(tz)
def process_exception(self, request, exception):
"""Catches internal Horizon exception classes.
Exception classes such as NotAuthorized, NotFound and Http302
are caught and handles them gracefully.
"""
if isinstance(exception, (exceptions.NotAuthorized,
exceptions.NotAuthenticated)):
auth_url = settings.LOGIN_URL
next_url = iri_to_uri(request.get_full_path())
if next_url != auth_url:
field_name = REDIRECT_FIELD_NAME
else:
field_name = None
login_url = request.build_absolute_uri(auth_url)
response = redirect_to_login(next_url, login_url=login_url,
redirect_field_name=field_name)
if isinstance(exception, exceptions.NotAuthorized):
logout_reason = _("Unauthorized. Please try logging in again.")
utils.add_logout_reason(request, response, logout_reason,
'error')
# delete messages, created in get_data() method
# since we are going to redirect user to the login page
response.delete_cookie('messages')
if request.is_ajax():
response_401 = http.HttpResponse(status=401)
response_401['X-Horizon-Location'] = response['location']
return response_401
return response
# If an internal "NotFound" error gets this far, return a real 404.
if isinstance(exception, exceptions.NotFound):
raise http.Http404(exception)
if isinstance(exception, exceptions.Http302):
# TODO(gabriel): Find a way to display an appropriate message to
# the user *on* the login form...
return shortcuts.redirect(exception.location)
@staticmethod
def copy_headers(src, dst, headers):
for header in headers:
dst[header] = src[header]
def process_response(self, request, response):
"""Convert HttpResponseRedirect to HttpResponse if request is via ajax.
This is to allow ajax request to redirect url.
"""
if request.is_ajax() and hasattr(request, 'horizon'):
queued_msgs = request.horizon['async_messages']
if type(response) == http.HttpResponseRedirect:
# Drop our messages back into the session as per usual so they
# don't disappear during the redirect. Not that we explicitly
# use django's messages methods here.
for tag, message, extra_tags in queued_msgs:
getattr(django_messages, tag)(request, message, extra_tags)
if response['location'].startswith(settings.LOGOUT_URL):
redirect_response = http.HttpResponse(status=401)
# This header is used for handling the logout in JS
redirect_response['logout'] = True
if self.logout_reason is not None:
utils.add_logout_reason(
request, redirect_response, self.logout_reason,
'error')
else:
redirect_response = http.HttpResponse()
# Use a set while checking if we want a cookie's attributes
# copied
cookie_keys = {'max_age', 'expires', 'path', 'domain',
'secure', 'httponly', 'logout_reason'}
# Copy cookies from HttpResponseRedirect towards HttpResponse
for cookie_name, cookie in response.cookies.items():
cookie_kwargs = dict((
(key, value) for key, value in cookie.items()
if key in cookie_keys and value
))
redirect_response.set_cookie(
cookie_name, cookie.value, **cookie_kwargs)
redirect_response['X-Horizon-Location'] = response['location']
upload_url_key = 'X-File-Upload-URL'
if upload_url_key in response:
self.copy_headers(response, redirect_response,
(upload_url_key, 'X-Auth-Token'))
return redirect_response
if queued_msgs:
# TODO(gabriel): When we have an async connection to the
# client (e.g. websockets) this should be pushed to the
# socket queue rather than being sent via a header.
# The header method has notable drawbacks (length limits,
# etc.) and is not meant as a long-term solution.
response['X-Horizon-Messages'] = json.dumps(queued_msgs)
return response
| {
"content_hash": "9c02ffa34b4edf7a7a400af12d93905d",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 79,
"avg_line_length": 45.64736842105263,
"alnum_prop": 0.565663553557016,
"repo_name": "yeming233/horizon",
"id": "8fd3c407fda8c20c7e949bed1a94dbd6a9207b12",
"size": "9436",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "horizon/middleware/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "105527"
},
{
"name": "HTML",
"bytes": "517093"
},
{
"name": "JavaScript",
"bytes": "953373"
},
{
"name": "Makefile",
"bytes": "588"
},
{
"name": "Python",
"bytes": "4845896"
},
{
"name": "Shell",
"bytes": "18658"
}
],
"symlink_target": ""
} |
import subprocess
import sys
import sqlite3
import os
# from thrift import Thrift
# from thrift.transport import TSocket
# from thrift.transport import TTransport
# from thrift.protocol import TBinaryProtocol
from xml.dom.minidom import Document
import pexpect
import pxssh
sys.path.append('..')
# from THService import THService
# class ThriftClient:
# def __init__(self, host, port):
# self.host = host
# self.port = port
# try:
# self.transport = TSocket.TSocket(self.host, self.port)
# transport = TTransport.TBufferedTransport(self.transport)
# protocol = TBinaryProtocol.TBinaryProtocol(transport)
# self.client = THService.Client(protocol)
# transport.open()
# except Thrift.TException, ex:
# print "%s" % (ex.message)
#
# def execute(self, cmd):
# return self.client.Exec(cmd)
#
# def transfer(self, filename, content):
# return self.client.FileTransfer(filename, content)
#
# def echo(self):
# return self.client.Echo()
#
# def close(self):
# self.transport.close()
class DbClient:
def __init__(self):
self.conn = sqlite3.connect('thmanager.db')
cursor = self.conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' "
"AND name='master'")
if len(cursor.fetchall()) == 0:
print "empty db"
cursor.execute("create table master(id integer primary key,"
"name varchar(50),"
"ip varchar(20))")
else:
print "normal db"
cursor.close()
def close(self):
self.conn.close()
class SshClient:
def __init__(self, host, pwd):
self.host = host
client = pxssh.pxssh()
if not client.login(host, 'root', pwd):
print "cannot connect server %s" % host
self.client = client
def execute(self, cmd):
print 'root@%s> %s' % (self.host, cmd)
client = self.client
client.sendline(cmd)
client.prompt(timeout=3600000)
return client.before.replace(cmd, '')[2:][:-2]
def send_line(self, cmd):
self.client.sendline(cmd)
def close(self):
self.client.logout()
class ScpClient:
@staticmethod
def local2remote(host, password, source, target):
cmd = 'scp %s root@%s:%s' % (source, host, target)
print cmd
child = pexpect.spawn(cmd)
i = child.expect(['password:', pexpect.EOF])
if i == 0:
child.sendline(password)
child.expect(pexpect.EOF)
child.close()
@staticmethod
def remote2local(host, password, source, target):
child = pexpect.spawn('scp root@%s:%s %s' % (host, source, target))
i = child.expect(['password', pexpect.EOF])
if i == 0:
child.sendline(password)
child.close()
class SshCopyIdClient:
@staticmethod
def genkey(master, password):
ssh = pxssh.pxssh()
if not ssh.login(master, 'root', password):
print "cannot connect server %s" % master
print 'send ssh-keygen'
ssh.sendline('ssh-keygen -t rsa')
try:
ssh.expect('Generating.*:')
ssh.sendline()
ssh.expect('Enter.*:')
ssh.sendline()
ssh.expect('Enter.*:')
ssh.sendline()
except pexpect.TIMEOUT:
print 'timeout'
finally:
ssh.logout()
@staticmethod
def copy(master, nodes, password):
ssh = pxssh.pxssh()
if not ssh.login(master, 'root', password):
print "cannot connect server %s" % master
for node in nodes:
ssh.sendline('ssh-copy-id root@%s' % node)
try:
index = ssh.expect(['continue connecting \(yes/no\)', '\'s password:', pexpect.EOF], timeout=8)
if index == 0:
ssh.sendline('yes')
if index == 1:
ssh.sendline(password)
ssh.sendline(password)
except pexpect.TIMEOUT:
print 'timeout'
else:
pass
ssh.logout()
class CmdClient:
@staticmethod
def execute(cmd):
process = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
tmp_out = process.stdout.readlines()
tmp_err = process.stderr.readlines()
tmp = tmp_out + tmp_err
tmp = "".join(tmp)
return tmp
class HadoopConfigGen:
def __init__(self):
self.doc = Document()
self.root = self.doc.createElement('configuration')
self.doc.appendChild(self.root)
def add_property(self, name, value):
prop_element = self.doc.createElement('property')
name_element = self.doc.createElement('name')
value_element = self.doc.createElement('value')
name_element.appendChild(self.doc.createTextNode(name))
value_element.appendChild(self.doc.createTextNode(value))
prop_element.appendChild(name_element)
prop_element.appendChild(value_element)
self.root.appendChild(prop_element)
def save(self, path):
f = open(path, 'w')
f.write(self.doc.toxml())
f.close()
def content(self):
return self.doc.toprettyxml(indent=' ')
| {
"content_hash": "a81d3b56669562c65c987e80f063d612",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 111,
"avg_line_length": 29.891304347826086,
"alnum_prop": 0.5689090909090909,
"repo_name": "openedbox/bigdata-all-in-one",
"id": "7bbaf9f1c19517457f39db3a396a7f0bfc64bde4",
"size": "5500",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/helper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18919"
},
{
"name": "Shell",
"bytes": "1985"
}
],
"symlink_target": ""
} |
from .run_command import run_command
def make_volume_dict():
volume_dict = {}
for line in run_command("sudo blkid").stdout.strip("\n").split(sep="\n"):
line = line.split()
volume_name = line[0][:-1]
volume_dict[volume_name] = {}
for field_value in line[1:]:
field, value = field_value.replace('"', "").split(sep="=")
volume_dict[volume_name][field] = value
return volume_dict
| {
"content_hash": "358a16d1fa6110153710224883a4f7e2",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 77,
"avg_line_length": 20.681818181818183,
"alnum_prop": 0.5626373626373626,
"repo_name": "UCSD-CCAL/ccal",
"id": "fcbe27ad97b1a5f72e8aa76de5b2a0c3cb1f3831",
"size": "455",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ccal/make_volume_dict.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "20830"
},
{
"name": "Python",
"bytes": "294577"
}
],
"symlink_target": ""
} |
'''
Meshes for hamitonian Generator
'''
from numpy import *
from multithreading import mpido
from scipy.linalg import eigh,eigvalsh
from matplotlib.pyplot import *
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from utils import H2G
import pdb,time
__all__=['Emesh','Hmesh','Gmesh']
class Emesh(object):
'''
Energy Mesh.
Construct
----------------
Emesh(data), data is an N-D(N>=2) array or the filename of data file.
Attributes
---------------
data:
The mesh data of energy arrange as [dim_1,...dim_n,nband].
'''
def __init__(self,data):
if type(data)==str:
self.data=loadtxt(data)
else:
self.data=data
def save(filename):
'''Save the hamiltonian data.'''
save(filename,self.data)
@property
def nband(self):
'''The number of bands'''
return self.data.shape[-1]
@property
def size(self):
'''The size of mesh.'''
return self.data.shape[:-1]
def show(self,kmesh=None):
'''
Show the dispersion relation.
kmesh:
N-D(N=1,3) array, The k-mesh holding this <Emesh>.
'''
dim=ndim(self.data)
nband=self.nband
colors=cm.get_cmap('autumn')(linspace(0,1,nband))
if dim>3 or dim<=1:
raise Exception('Not Implemented for mesh with dimension %s!'%(dim))
elif dim==2:
if kmesh is None:
kmesh=arange(len(self.data))
plot(self.data.T)
else:
N1,N2=self.size
if kmesh is None:
kx,ky=meshgrid(arange(N1),arange(N2),indexing='ij')
else:
kx,ky=kmesh[...,0],kmesh[...,1]
fig=gcf()
ax=fig.add_subplot(111,projection='3d')
for i in xrange(nband):
ax.scatter(kx,ky,self.data[...,i],s=5,edgecolor='none',c=colors[i])
legend(arange(nband))
def show_dos(self,wlist,geta=3e-2,inverse_axis=False,weights=None):
'''
Get a List instance of dos.
wlist:
the frequency space.
geta:
smearing factor.
inverse_axis:
inverse x-y axis.
weights:
weights of energies.
'''
nw=len(wlist)
dos=List(shape=[nw],dtype='float64')
dos[...]=(weights/(wlist[:,newaxis]+1j*geta-reshape(self.data,[1,-1]))).imag.sum(axis=-1)
dos*=-1./pi/prod(self.data.shape)
if inverse_axis:
wlist,dos=dos,wlist
plot(wlist,dos)
return wlist,dos
class Hmesh(object):
'''
Mesh of Hamiltonians.
Construct
----------------
Hmesh(data), data is an N-D(N>=3) array or the filename of data file.
Attributes
---------------
data:
The mesh data of Hamiltonian arrange as [dim_1,...dim_n,nband,nband].
'''
def __init__(self,data):
if type(data)==str:
self.data=loadtxt(data)
else:
self.data=data
@property
def size(self):
'''The size of mesh.'''
return self.data.shape[:-2]
@property
def nband(self):
'''The number of bands'''
return self.data.shape[-1]
def save(filename):
'''Save the hamiltonian data.'''
save(filename,self.data)
def getemesh(self,evalvk=False):
'''
Get an Ek(with or without vk) mesh.
evalvk:
Evaluate vkmesh if True.
'''
nband=self.nband
dmesh=mpido(func=eigh if evalvk else eigvalsh,inputlist=self.data.reshape([-1,nband,nband]))
if evalvk:
ekl,vkl=[],[]
for ek,vk in dmesh:
ekl.append(ek)
vkl.append(vk)
return reshape(ekl,self.data.shape[:,-1]),reshape(vkl,self.data.shape)
else:
return reshape(dmesh,self.data.shape[:-1])
def getgmesh(self,w,sigma=None,tp='r',geta=1e-2,**kwargs):
'''
Get the Green's function mesh(Gwmesh) instance.
w:
an array(or a float number) of energy(frequency).
sigma:
self energy correction.
tp:
type of green's function.
* 'r' - retarded.(default)
* 'a' - advanced.(default)
* 'matsu' - finite temperature.
geta:
smearing factor, default is 1e-2.
'''
if ndim(w)==0:
#only 1 w is to be computed
return H2G(w=w,h=self.data,sigma=sigma,tp=tp,geta=geta)
else:
#generate a mesh on w-list
gmesh=H2G(w=w[[slice(None)]+[newaxis]*ndim(self.data)],h=self.data,sigma=sigma,tp=tp,geta=geta)
return gmesh
class Gmesh(object):
'''
Mesh of Green's function
Construct
--------------------
Gmesh(data,geta,tp,T=None)
Attributes
---------------------
data:
A array of Green's function data
geta:
The smearing factor.
tp:
The type of Green's function.
* 'r' - retarded.(default)
* 'a' - advanced.
* 'matsu' - matsubara Green's function.
T:
The temperature for matsubara Green's function.
'''
def __init__(self,data,geta,tp,T=None):
assert(tp=='r' or tp=='a' or (tp=='matsu' and not T is None))
self.data=data
self.tp=tp
self.geta=geta
self.T=T
@property
def dimension(self):
return ndim(self.data)-2
@property
def Amesh(self):
'''
Get the spectrum function Ak-mesh
'''
data=self.data
GH=swapaxes(data.conj(),axis1=-1,axis2=-1)
if self.tp=='r':
res= 1j/2./pi*(data-GH)
elif self.tp=='a':
res= -1j/2./pi*(data-GH)
else:
raise Exception('Error','Rules for spectrum of matsubara Green\'s function is not set.')
return res
def show_dos(self,lw=3,inverse=False,**kwargs):
'''
show dos.
'''
nflv=self.data.shape[-1]
dos=trace(self.Amesh[...,:nflv,:nflv],axis1=-1,axis2=-2)
ddim=ndim(self.data)
if ddim!=3:
kaxis=range(self.dimension)
kaxis.remove(self.iaxis)
dos=mean(dos,axis=kaxis)
| {
"content_hash": "51716e5b7a66077fdd4df9fefa45af6b",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 107,
"avg_line_length": 27.06008583690987,
"alnum_prop": 0.5246629659000793,
"repo_name": "Lynn-015/NJU_DMRG",
"id": "af742f54c601455f125675b76f64dd94b4d9b86c",
"size": "6305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "giggleliu/tba/hgen/mesh.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5106"
},
{
"name": "Makefile",
"bytes": "13010"
},
{
"name": "Python",
"bytes": "314933"
}
],
"symlink_target": ""
} |
import re
from babik_card_primitives.exceptions import (
InvalidCardNumber,
IssuerNotRecognised
)
CARD_ISSUERS = [
{
'regex': re.compile(r'^4[0-9]{12,18}$'),
'slug': 'visa',
'name': 'Visa',
},
{
'regex': re.compile(r'^5[1-5][0-9]{14}$'),
'slug': 'mastercard',
'name': 'MasterCard',
},
]
def get_card_issuer(number):
"""
Use a card's number to determine it's issuer
"""
number = str(number)
for issuer_data in CARD_ISSUERS:
if issuer_data['regex'].match(number):
return issuer_data['slug'], issuer_data['name']
raise IssuerNotRecognised()
def card_number_luhn_test(number):
"""
Use Luhn's algorithm to varify a credit card number
"""
reverse_digit_list = [int(n) for n in str(number)][::-1]
odd_sum = sum(n for n in reverse_digit_list[0::2])
even_sum = sum(sum(divmod(n * 2, 10)) for n in reverse_digit_list[1::2])
return (odd_sum + even_sum) % 10 == 0
whitespace_re = re.compile("\s+")
dash_re = re.compile("([0-9])-+([0-9])")
card_number_re = re.compile("^[0-9]+$")
def clean_card_number(raw_number):
"""
Removes any whitespace and dashes from a card number, throws an
InvalidCardNumber if there are not numeric characters remaining
"""
whitespaced_stripped_number = whitespace_re.sub("", raw_number)
number = dash_re.sub(r"\1\2", whitespaced_stripped_number)
if card_number_re.match(number):
return number
else:
raise InvalidCardNumber
| {
"content_hash": "1a518e3626fa8015d98cee155cef7408",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 76,
"avg_line_length": 26.551724137931036,
"alnum_prop": 0.6025974025974026,
"repo_name": "aubreystarktoller/django-babik-card-primitives",
"id": "3bdef2087e3d83ac27c77a5c6bb84b02ef10fd14",
"size": "1540",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "babik_card_primitives/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "5399"
},
{
"name": "HTML",
"bytes": "1593"
},
{
"name": "JavaScript",
"bytes": "137686"
},
{
"name": "Makefile",
"bytes": "1493"
},
{
"name": "Python",
"bytes": "62386"
}
],
"symlink_target": ""
} |
"""Offer zone automation rules."""
import voluptuous as vol
from homeassistant.const import (
ATTR_FRIENDLY_NAME,
CONF_ENTITY_ID,
CONF_EVENT,
CONF_PLATFORM,
CONF_ZONE,
)
from homeassistant.core import CALLBACK_TYPE, HassJob, callback
from homeassistant.helpers import condition, config_validation as cv, location
from homeassistant.helpers.event import async_track_state_change_event
# mypy: allow-incomplete-defs, allow-untyped-defs
# mypy: no-check-untyped-defs
EVENT_ENTER = "enter"
EVENT_LEAVE = "leave"
DEFAULT_EVENT = EVENT_ENTER
_EVENT_DESCRIPTION = {EVENT_ENTER: "entering", EVENT_LEAVE: "leaving"}
TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_PLATFORM): "zone",
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
vol.Required(CONF_ZONE): cv.entity_id,
vol.Required(CONF_EVENT, default=DEFAULT_EVENT): vol.Any(
EVENT_ENTER, EVENT_LEAVE
),
}
)
async def async_attach_trigger(
hass, config, action, automation_info, *, platform_type: str = "zone"
) -> CALLBACK_TYPE:
"""Listen for state changes based on configuration."""
trigger_data = automation_info["trigger_data"]
entity_id = config.get(CONF_ENTITY_ID)
zone_entity_id = config.get(CONF_ZONE)
event = config.get(CONF_EVENT)
job = HassJob(action)
@callback
def zone_automation_listener(zone_event):
"""Listen for state changes and calls action."""
entity = zone_event.data.get("entity_id")
from_s = zone_event.data.get("old_state")
to_s = zone_event.data.get("new_state")
if (
from_s
and not location.has_location(from_s)
or not location.has_location(to_s)
):
return
zone_state = hass.states.get(zone_entity_id)
from_match = condition.zone(hass, zone_state, from_s) if from_s else False
to_match = condition.zone(hass, zone_state, to_s) if to_s else False
if (
event == EVENT_ENTER
and not from_match
and to_match
or event == EVENT_LEAVE
and from_match
and not to_match
):
description = f"{entity} {_EVENT_DESCRIPTION[event]} {zone_state.attributes[ATTR_FRIENDLY_NAME]}"
hass.async_run_hass_job(
job,
{
"trigger": {
**trigger_data,
"platform": platform_type,
"entity_id": entity,
"from_state": from_s,
"to_state": to_s,
"zone": zone_state,
"event": event,
"description": description,
}
},
to_s.context,
)
return async_track_state_change_event(hass, entity_id, zone_automation_listener)
| {
"content_hash": "575704153bafac3deee5719650cfc15d",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 109,
"avg_line_length": 32.6,
"alnum_prop": 0.5725971370143149,
"repo_name": "Danielhiversen/home-assistant",
"id": "ef054b397140db805e83c6f8e7e935afcb82ebb4",
"size": "2934",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/zone/trigger.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2443"
},
{
"name": "Python",
"bytes": "36870185"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from reversion.admin import VersionAdmin
from profiles.models import Profile
def username(obj):
return (obj.user.username)
username.short_description = "User username"
def user_email(obj):
return (obj.user.email)
user_email.short_description = "User email"
class ProfileAdmin(VersionAdmin):
search_fields = ("user__username", "github_account", "user__email", "email")
list_display = ("github_account", "email", username, user_email)
admin.site.register(Profile, ProfileAdmin)
| {
"content_hash": "d0c62a133b682b78bdf43624c1371781",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 80,
"avg_line_length": 23.26086956521739,
"alnum_prop": 0.7383177570093458,
"repo_name": "pydanny/djangopackages",
"id": "166e810847c0eb68ea916f856bcf4e9a9ec72c9e",
"size": "535",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "profiles/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16316"
},
{
"name": "HTML",
"bytes": "101813"
},
{
"name": "Makefile",
"bytes": "2785"
},
{
"name": "Python",
"bytes": "313114"
},
{
"name": "Shell",
"bytes": "4859"
}
],
"symlink_target": ""
} |
import json
import os
import webapp2
from google.appengine.ext import ndb
DEFAULT_TOKEN_VALUE = 'default_token'
# Key methods
def token_key(token_value=DEFAULT_TOKEN_VALUE):
return ndb.Key('Token', token_value)
# Helper methods
def valid_secret_key(request):
return 'X-Secret-Key' in request.headers and request.headers["X-Secret-Key"] == os.environ.get("SECRET_KEY")
# Models
class Token(ndb.Model):
"""A GCM token to send the app users push notifications"""
value = ndb.StringProperty(indexed=False)
# Pages
class MainPage(webapp2.RequestHandler):
def get(self):
if valid_secret_key(self.request):
self.response.out.write('Valid Secret Key - nice!')
else:
self.response.out.write('Secret Key is not valid')
class GetTokens(webapp2.RequestHandler):
def get(self):
if valid_secret_key(self.request):
token_query = Token.query()
tokens = token_query.fetch(1000)
response_dict = {"result": map(lambda (x): x.value, tokens)}
self.response.write(json.dumps(response_dict, encoding='latin1'))
class AddToken(webapp2.RequestHandler):
def post(self):
if valid_secret_key(self.request):
value = self.request.get('token')
token = Token(parent=token_key(value))
token.value = value
token.put()
self.response.write('Success')
class DeleteToken(webapp2.RequestHandler):
def post(self):
if valid_secret_key(self.request):
value = self.request.get('token')
token_query = Token.query(ancestor=token_key(value))
tokens = token_query.fetch(100)
if len(tokens) > 0:
for token in tokens:
token.key.delete()
self.response.write('Success')
else:
self.response.write('No token found')
app = webapp2.WSGIApplication([
('/', MainPage),
('/add_token', AddToken),
('/delete_token', DeleteToken),
('/get_tokens', GetTokens),
], debug=True)
| {
"content_hash": "1a6ca72f1ad683f4ce5d4c659615d085",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 112,
"avg_line_length": 26.39240506329114,
"alnum_prop": 0.6153477218225419,
"repo_name": "WGierke/weightlifting_schwedt",
"id": "de8bee7a54fc2f2e121ed6b16383b2ba529718a3",
"size": "2085",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/schwedt_app_engine/schwedt_app_engine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "190728"
},
{
"name": "Python",
"bytes": "23947"
},
{
"name": "Shell",
"bytes": "327"
}
],
"symlink_target": ""
} |
import collections
import pickle
import os
import re
import subprocess
import sys
UTF8 = "utf-8"
TRANSFORM, SUMMARIZE = ("TRANSFORM", "SUMMARIZE")
Code = collections.namedtuple("Code", "name code kind")
def main():
genome = 3 * GENOME
for i, code in enumerate(CODE):
context = dict(genome=genome, target="G[AC]{2}TT", replace="TCGA")
execute(code, context)
if sys.version_info[:2] > (3, 1):
def execute(code, context):
module, offset = create_module(code.code, context)
with subprocess.Popen([sys.executable, "-"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process:
communicate(process, code, module, offset)
else:
def execute(code, context):
module, offset = create_module(code.code, context)
process = subprocess.Popen([sys.executable, "-"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
communicate(process, code, module, offset)
def create_module(code, context):
lines = ["import pickle", "import sys", "result = error = None"]
for key, value in context.items():
lines.append("{} = {!r}".format(key, value))
offset = len(lines) + 1
outputLine = "\nsys.stdout.buffer.write(pickle.dumps((result, error)))"
return "\n".join(lines) + "\n" + code + outputLine, offset
def communicate(process, code, module, offset):
stdout, stderr = process.communicate(module.encode(UTF8))
if stderr:
stderr = stderr.decode(UTF8).lstrip().replace(", in <module>", ":")
stderr = re.sub(", line (\d+)",
lambda match: str(int(match.group(1)) - offset), stderr)
print(re.sub(r'File."[^"]+?"', "'{}' has an error on line "
.format(code.name), stderr))
return
if stdout:
result, error = pickle.loads(stdout)
handle_result(code, result, error)
return
print("'{}' produced no result\n".format(code.name))
def handle_result(code, result, error):
if error is not None:
print("'{}' error: {}".format(code.name, error))
elif result is None:
print("'{}' produced no result".format(code.name))
elif code.kind == TRANSFORM:
genome = result
try:
print("'{}' produced a genome of length {}".format(code.name,
len(genome)))
except TypeError as err:
print("'{}' error: expected a sequence result: {}".format(
code.name, err))
elif code.kind == SUMMARIZE:
print("'{}' produced a result of {}".format(code.name, result))
print()
CODE = (
Code("Count",
"""
import re
matches = re.findall(target, genome)
if matches:
result = len(matches)
else:
error = "'{}' not found".format(target)
""", SUMMARIZE)
,
Code("Replace",
"""
import re
result, count = re.subn(target, replace, genome)
if not count:
error = "no '{}' replacements made".format(target)
""", TRANSFORM)
,
Code("Exception Test",
"""
result = 0
for i in range(len(genome)):
if genome[i] = "A":
result += 1
""", SUMMARIZE)
,
Code("Error Test",
"""
import re
matches = re.findall(target * 5, genome)
if matches:
result = len(matches)
else:
error = "'{}' not found".format(target)
""", TRANSFORM)
,
Code("No Result Test",
"""
# No result
""", TRANSFORM)
,
Code("Wrong Kind Test",
"""
result = len(genome)
""", TRANSFORM)
,
Code("Termination Test",
"""
import sys
result = "terminating"
sys.exit()
""", SUMMARIZE)
,
Code("Length",
"""
result = len(genome)
""", SUMMARIZE)
)
GENOME = """TGTTAGTCGCTCCTCGGTCTAAGACATCAAAGTCGGTCTGCGCGGCTGCTCCCTTAGCGCTG
CATAAGAGCGGGGCAGAGAGAGATAGGCGTTTTGACCGTGGCGAGCAAGGCGCGTCATAGTGTCGCCGTGACTG
ATCCTACTGGGTTCTTGCTACTGCCCGGGTCGCAATCCAAAATCTCCACGCGCTGCCACCCCGAAGAAGATATA
TGTCACTGAATTGTATTGGTAACATAGTCGAATTGGGTTCAGGTAAGTTAGTCGTTTAGCCGCTGCGACAGTGG
TGGAAGGGCGAATAGTGTAAAATTTCGCCTGTTAGTGAACATTATCAGGCTGCCATCGTTGATCGCCCCTCTTA
AACTCAGTCTTAAATGAGTTCCCGCCTAAGGTCATTCGTGCCTTGATGATTGATAGCTCGATTGGTCCCTTATG
AAACCGGACCAGAAATGTACCCGCTGAACCGGTGTCATAAGTGTCGCCGTCCCTACGATCGACACTTCCTGAGC
ACGAACGATTTGCGACGCTGTAATGCCACGAGGACTGCATTGAAGATTTTTTGTCCTAGGTGTATGTGCTTCTC
AGGAAGATGCACTACGCACTCCCCTTATCACGGGTGTGACCATCAGGTAGCGTAGGAAGATTAAGACCGCGTAA
CTATCCCTTTCCGTCGCACTCCGACGTCTCAGCACATGTGCGGGGGCCCCTAATTGAGAAACAGTCCATGGTTG
TCCGTAAGTTTCGGAAATCAACTTCACTGCTAGATGGTTGGACGCCAAGGCTCAATAGGTTGGACTCTAAGAAG
""".replace("\n", "")
if __name__ == "__main__":
main()
| {
"content_hash": "c11c745a9944a2018af3585aba20fcac",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 75,
"avg_line_length": 29.083870967741934,
"alnum_prop": 0.6645962732919255,
"repo_name": "johnobrien/PyPractice",
"id": "e55996d0f8b4daf9c4fecf81193c24e170d55c16",
"size": "5117",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "pipeg/genome3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "713211"
}
],
"symlink_target": ""
} |
import sys
from traceback import format_exc, extract_tb
from sanic.exceptions import (
ContentRangeError,
HeaderNotFound,
INTERNAL_SERVER_ERROR_HTML,
InvalidRangeType,
SanicException,
TRACEBACK_LINE_HTML,
TRACEBACK_STYLE,
TRACEBACK_WRAPPER_HTML)
from sanic.log import log
from sanic.response import text, html
class ErrorHandler:
handlers = None
cached_handlers = None
_missing = object()
def __init__(self):
self.handlers = []
self.cached_handlers = {}
self.debug = False
def _render_traceback_html(self, exception, request):
exc_type, exc_value, tb = sys.exc_info()
frames = extract_tb(tb)
frame_html = []
for frame in frames:
frame_html.append(TRACEBACK_LINE_HTML.format(frame))
return TRACEBACK_WRAPPER_HTML.format(
style=TRACEBACK_STYLE,
exc_name=exc_type.__name__,
exc_value=exc_value,
frame_html=''.join(frame_html),
path=request.path)
def add(self, exception, handler):
self.handlers.append((exception, handler))
def lookup(self, exception):
handler = self.cached_handlers.get(exception, self._missing)
if handler is self._missing:
for exception_class, handler in self.handlers:
if isinstance(exception, exception_class):
self.cached_handlers[type(exception)] = handler
return handler
self.cached_handlers[type(exception)] = None
handler = None
return handler
def response(self, request, exception):
"""Fetches and executes an exception handler and returns a response
object
:param request: Request
:param exception: Exception to handle
:return: Response object
"""
handler = self.lookup(exception)
response = None
try:
if handler:
response = handler(request=request, exception=exception)
if response is None:
response = self.default(request=request, exception=exception)
except Exception:
self.log(format_exc())
if self.debug:
url = getattr(request, 'url', 'unknown')
response_message = (
'Exception raised in exception handler "{}" '
'for uri: "{}"\n{}').format(
handler.__name__, url, format_exc())
log.error(response_message)
return text(response_message, 500)
else:
return text('An error occurred while handling an error', 500)
return response
def log(self, message, level='error'):
"""
Override this method in an ErrorHandler subclass to prevent
logging exceptions.
"""
getattr(log, level)(message)
def default(self, request, exception):
self.log(format_exc())
if issubclass(type(exception), SanicException):
return text(
'Error: {}'.format(exception),
status=getattr(exception, 'status_code', 500),
headers=getattr(exception, 'headers', dict())
)
elif self.debug:
html_output = self._render_traceback_html(exception, request)
response_message = (
'Exception occurred while handling uri: "{}"\n{}'.format(
request.url, format_exc()))
log.error(response_message)
return html(html_output, status=500)
else:
return html(INTERNAL_SERVER_ERROR_HTML, status=500)
class ContentRangeHandler:
"""Class responsible for parsing request header"""
__slots__ = ('start', 'end', 'size', 'total', 'headers')
def __init__(self, request, stats):
self.total = stats.st_size
_range = request.headers.get('Range')
if _range is None:
raise HeaderNotFound('Range Header Not Found')
unit, _, value = tuple(map(str.strip, _range.partition('=')))
if unit != 'bytes':
raise InvalidRangeType(
'%s is not a valid Range Type' % (unit,), self)
start_b, _, end_b = tuple(map(str.strip, value.partition('-')))
try:
self.start = int(start_b) if start_b else None
except ValueError:
raise ContentRangeError(
'\'%s\' is invalid for Content Range' % (start_b,), self)
try:
self.end = int(end_b) if end_b else None
except ValueError:
raise ContentRangeError(
'\'%s\' is invalid for Content Range' % (end_b,), self)
if self.end is None:
if self.start is None:
raise ContentRangeError(
'Invalid for Content Range parameters', self)
else:
# this case represents `Content-Range: bytes 5-`
self.end = self.total
else:
if self.start is None:
# this case represents `Content-Range: bytes -5`
self.start = self.total - self.end
self.end = self.total
if self.start >= self.end:
raise ContentRangeError(
'Invalid for Content Range parameters', self)
self.size = self.end - self.start
self.headers = {
'Content-Range': "bytes %s-%s/%s" % (
self.start, self.end, self.total)}
def __bool__(self):
return self.size > 0
| {
"content_hash": "8228170d66d0c1efb190a9802bff808c",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 77,
"avg_line_length": 35.45859872611465,
"alnum_prop": 0.5579306628345608,
"repo_name": "ai0/sanic",
"id": "64df2c2cb794d421f0035eb0a15aaeea99028b1b",
"size": "5567",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sanic/handlers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "482"
},
{
"name": "Makefile",
"bytes": "108"
},
{
"name": "Python",
"bytes": "224370"
}
],
"symlink_target": ""
} |
from farmfs.fs import Path
from farmfs.fs import ensure_file
from farmfs.fs import walk
from hashlib import md5
from json import loads, JSONEncoder
from errno import ENOENT as NoSuchFile
from errno import EISDIR as IsDirectory
from os.path import sep
from farmfs.util import egest, safetype
keydb_encoder = JSONEncoder(ensure_ascii=False, sort_keys=True)
def checksum(value_bytes):
"""
Input string should already be coersed into an encoding before being
provided
"""
return md5(value_bytes).hexdigest()
class KeyDB:
def __init__(self, db_path):
assert isinstance(db_path, Path)
self.root = db_path
# TODO I DONT THINK THIS SHOULD BE A PROPERTY OF THE DB UNLESS WE HAVE SOME
# ITERATOR BASED RECORD TYPE.
def write(self, key, value):
key = safetype(key)
value_json = keydb_encoder.encode(value)
value_bytes = egest(value_json)
value_hash = egest(checksum(value_bytes))
key_path = self.root.join(key)
with ensure_file(key_path, 'wb') as f:
f.write(value_bytes)
f.write(b"\n")
f.write(value_hash)
f.write(b"\n")
def readraw(self, key):
key = safetype(key)
try:
with self.root.join(key).open('rb') as f:
obj_bytes = f.readline().strip()
obj_bytes_checksum = checksum(obj_bytes).encode('utf-8')
key_checksum = f.readline().strip()
if obj_bytes_checksum != key_checksum:
raise ValueError(
"Checksum mismatch for key %s. Expected %s, calculated %s" % (key, key_checksum, obj_bytes_checksum))
obj_str = egest(obj_bytes)
return obj_str
except IOError as e:
if e.errno == NoSuchFile or e.errno == IsDirectory:
return None
else:
raise e
def read(self, key):
obj_str = self.readraw(key)
if obj_str is None:
return None
else:
obj = loads(obj_str)
return obj
def list(self, query=None):
if query is None:
query = ""
query = safetype(query)
query_path = self.root.join(query)
assert self.root in query_path.parents(), \
"%s is not a parent of %s" % (self.root, query_path)
if query_path.exists and query_path.isdir():
return [p.relative_to(self.root)
for (p, t) in walk(query_path)
if t == 'file']
else:
return []
def delete(self, key):
key = safetype(key)
path = self.root.join(key)
path.unlink(clean=self.root)
class KeyDBWindow(KeyDB):
def __init__(self, window, keydb):
window = safetype(window)
assert isinstance(keydb, KeyDB)
self.prefix = window + sep
self.keydb = keydb
def write(self, key, value):
assert key is not None
assert value is not None
self.keydb.write(self.prefix + key, value)
def read(self, key):
return self.keydb.read(self.prefix + key)
def list(self,):
return [x[len(self.prefix):] for x in self.keydb.list(self.prefix)]
def delete(self, key):
self.keydb.delete(self.prefix + key)
class KeyDBFactory():
def __init__(self, keydb, encoder, decoder):
self.keydb = keydb
self.encoder = encoder
self.decoder = decoder
def write(self, key, value):
self.keydb.write(key, self.encoder(value))
def read(self, key):
return self.decoder(self.keydb.read(key), key)
def list(self,):
return self.keydb.list()
def delete(self, key):
self.keydb.delete(key)
# TODO I don't think I used this.
def copy(self, key, remote):
value = remote.read(key)
self.write(key, value)
| {
"content_hash": "4558f5bde642de6bce80a80c7f081ac1",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 121,
"avg_line_length": 30.77777777777778,
"alnum_prop": 0.5796802475502837,
"repo_name": "andrewguy9/farmfs",
"id": "0c8ff6b5ccafcec18cdb6df246c31df7dcd15ab5",
"size": "3878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "farmfs/keydb.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "141852"
},
{
"name": "Shell",
"bytes": "355"
}
],
"symlink_target": ""
} |
import collections
import cPickle as pickle
import hashlib
import json
import math
import os.path as osp
import cv2
import numpy as np
import skimage.color
import skimage.io
import skimage.transform
import rospkg
from .data import get_object_images
PKG_DIR = rospkg.RosPack().get_path('jsk_arc2017_common')
def get_tile_shape(num, ratio_hw=None):
if ratio_hw:
for y_num in xrange(num):
x_num = int(y_num / ratio_hw)
if x_num * y_num > num:
return y_num, x_num
else:
x_num = int(math.sqrt(num))
y_num = 0
while x_num * y_num < num:
y_num += 1
return y_num, x_num
def mask_to_rect(mask):
where = np.argwhere(mask)
(y1, x1), (y2, x2) = where.min(0), where.max(0) + 1
return y1, x1, y2, x2
def centerize(src, shape, margin_color=None, return_mask=False):
"""Centerize image for specified image size
Parameters
----------
src: numpy.ndarray
Image to centerize
shape: tuple of int
Image shape (height, width) or (height, width, channel)
margin_color: numpy.ndarray
Color to be filled in the blank.
return_mask: numpy.ndarray
Mask for centerized image.
"""
if src.shape[:2] == shape[:2]:
if return_mask:
return src, np.ones(shape[:2], dtype=bool)
else:
return src
if len(shape) != src.ndim:
shape = list(shape) + [src.shape[2]]
centerized = np.zeros(shape, dtype=src.dtype)
if margin_color:
centerized[:, :] = margin_color
src_h, src_w = src.shape[:2]
scale_h, scale_w = 1. * shape[0] / src_h, 1. * shape[1] / src_w
scale = min(scale_h, scale_w)
dtype = src.dtype
src = skimage.transform.rescale(src, scale, preserve_range=True)
src = src.astype(dtype)
ph, pw = 0, 0
h, w = src.shape[:2]
dst_h, dst_w = shape[:2]
if h < dst_h:
ph = (dst_h - h) // 2
if w < dst_w:
pw = (dst_w - w) // 2
mask = np.zeros(shape[:2], dtype=bool)
mask[ph:ph + h, pw:pw + w] = True
centerized[ph:ph + h, pw:pw + w] = src
if return_mask:
return centerized, mask
else:
return centerized
def _tile(imgs, shape, dst):
"""Tile images which have same size.
Parameters
----------
imgs: numpy.ndarray
Image list which should be tiled.
shape: tuple of int
Tile shape.
dst:
Image to put the tile on.
"""
y_num, x_num = shape
tile_w = imgs[0].shape[1]
tile_h = imgs[0].shape[0]
if dst is None:
if len(imgs[0].shape) == 3:
dst = np.zeros((tile_h * y_num, tile_w * x_num, 3), dtype=np.uint8)
else:
dst = np.zeros((tile_h * y_num, tile_w * x_num), dtype=np.uint8)
for y in range(y_num):
for x in range(x_num):
i = x + y * x_num
if i < len(imgs):
y1 = y * tile_h
y2 = (y + 1) * tile_h
x1 = x * tile_w
x2 = (x + 1) * tile_w
dst[y1:y2, x1:x2] = imgs[i]
return dst
def tile(imgs, shape=None, dst=None, margin_color=None):
"""Tile images which have different size.
Parameters
----------
imgs:
Image list which should be tiled.
shape:
The tile shape.
dst:
Image to put the tile on.
margin_color: numpy.ndarray
Color to be filled in the blank.
"""
if shape is None:
shape = get_tile_shape(len(imgs))
# get max tile size to which each image should be resized
max_h, max_w = np.inf, np.inf
for img in imgs:
max_h = min(max_h, img.shape[0])
max_w = min(max_w, img.shape[1])
# tile images
is_color = False
for i, img in enumerate(imgs):
if img.ndim >= 3:
is_color = True
if is_color and img.ndim == 2:
img = skimage.color.gray2rgb(img)
if is_color and img.shape[2] == 4:
img = img[:, :, :3]
img = skimage.util.img_as_ubyte(img)
img = centerize(img, (max_h, max_w, 3), margin_color)
imgs[i] = img
return _tile(imgs, shape, dst)
def visualize_container(container_id, contents, container_file, orders=None,
alpha=0.6, font_scale=5.5, thickness=4):
if not isinstance(contents, collections.Sequence):
raise TypeError('contents must be a sequence')
if orders is not None and not isinstance(orders, collections.Sequence):
raise TypeError('orders must be a sequence')
img_container = skimage.io.imread(container_file) / 255.
img_container = (img_container * alpha) + (1. - alpha)
img_container = (img_container * 255).astype(np.uint8)
if contents:
ratio_hw = 1. * img_container.shape[0] / img_container.shape[1]
tile_shape = get_tile_shape(len(contents), ratio_hw)
object_imgs = get_object_images()
if orders is not None:
for obj_name, img_obj in object_imgs.items():
if obj_name in orders:
center = img_obj.shape[1] // 2, img_obj.shape[0] // 2
radius = min(center)
cv2.circle(img_obj, center, radius, (255, 0, 0), thickness)
imgs = [object_imgs[obj] for obj in contents]
img_tiled = tile(imgs, shape=tile_shape)
img_tiled = centerize(img_tiled, img_container.shape)
masks = [np.ones(img.shape[:2], dtype=np.uint8) * 255 for img in imgs]
mask_tiled = tile(masks, shape=tile_shape)
mask_tiled = centerize(mask_tiled, img_container.shape[:2])
y1, x1, y2, x2 = mask_to_rect(mask_tiled)
assert mask_tiled.shape == img_tiled.shape[:2]
img_tiled = img_tiled[y1:y2, x1:x2]
mask_tiled = mask_tiled[y1:y2, x1:x2]
assert mask_tiled.shape == img_tiled.shape[:2]
img_tiled = centerize(img_tiled, img_container.shape)
mask_tiled = centerize(mask_tiled, img_container.shape[:2])
mask_tiled = mask_tiled == 255
mask_tiled[np.all(img_tiled == 0, axis=2)] = False
img_container[mask_tiled] = img_tiled[mask_tiled]
font_face = cv2.FONT_HERSHEY_PLAIN
size, baseline = cv2.getTextSize(
container_id, font_face, font_scale, thickness)
cv2.putText(img_container, container_id,
(img_container.shape[1] - size[0],
img_container.shape[0] - size[1] + baseline),
font_face, font_scale, color=(0, 255, 0), thickness=thickness)
return img_container
memos = {}
def memoize(key=None):
def _memoize(func):
def func_wrapper(*args, **kwargs):
if key:
contents = pickle.dumps(
{'func': func.func_code.co_code,
'contents': key(*args, **kwargs)})
else:
contents = pickle.dumps(
{'func': func.func_code.co_code,
'args': args, 'kwargs': kwargs})
sha1 = hashlib.sha1(contents).hexdigest()
if sha1 in memos:
return memos[sha1]
res = func(*args, **kwargs)
if len(memos) > 50:
memos.popitem()
else:
memos[sha1] = res
return res
return func_wrapper
return _memoize
@memoize(key=lambda filename, order_file:
(json.load(open(filename)), order_file))
def visualize_item_location(filename, order_file=None):
item_location = json.load(open(filename))
# orders
orders = []
if order_file:
data_order = json.load(open(order_file))
for order in data_order['orders']:
orders.extend(order['contents'])
imgs_top = []
# tote
tote = item_location['tote']
img_container = visualize_container(
'tote', tote['contents'], orders=orders,
container_file=osp.join(PKG_DIR, 'data/objects/tote/top.jpg'),
font_scale=11, thickness=8)
imgs_top.append(img_container)
# bin
for bin_ in sorted(item_location['bins'], reverse=True): # C, B, A
if not bin_['contents']:
continue # skip empty bin
img_container = visualize_container(
bin_['bin_id'], bin_['contents'], orders=orders,
container_file=osp.join(PKG_DIR, 'data/objects/bin/top.jpg'),
font_scale=9, thickness=7)
imgs_top.append(img_container)
# visualize
img_top = tile(imgs_top, shape=(1, len(imgs_top)))
if not item_location['boxes']:
return img_top
# box
imgs_box = []
for box in item_location['boxes']:
img_container = visualize_container(
box['size_id'], box['contents'], orders=orders,
container_file=osp.join(PKG_DIR, 'data/objects/box/top.jpg'))
imgs_box.append(img_container)
img_box = tile(imgs_box, shape=(1, len(imgs_box)))
scale = 1. * img_box.shape[1] / img_top.shape[1]
img_top = cv2.resize(img_top, None, None, fx=scale, fy=scale)
img = np.vstack([img_top, img_box])
return img
@memoize(key=lambda filename: json.load(open(filename)))
def visualize_order(filename):
data = json.load(open(filename))
imgs = []
for order in data['orders']:
img = visualize_container(
order['size_id'], order['contents'],
container_file=osp.join(PKG_DIR, 'data/objects/box/top.jpg'))
imgs.append(img)
img = tile(imgs, shape=(1, 3))
return img
| {
"content_hash": "9846162e03ba824aad0d8ab6c5cb0461",
"timestamp": "",
"source": "github",
"line_count": 302,
"max_line_length": 79,
"avg_line_length": 31.39072847682119,
"alnum_prop": 0.564873417721519,
"repo_name": "pazeshun/jsk_apc",
"id": "6dfe4fd931854e6da5f589002f478ad690b80949",
"size": "9480",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "jsk_arc2017_common/python/jsk_arc2017_common/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "101871"
},
{
"name": "CMake",
"bytes": "42995"
},
{
"name": "Common Lisp",
"bytes": "695864"
},
{
"name": "Dockerfile",
"bytes": "1503"
},
{
"name": "HTML",
"bytes": "6364"
},
{
"name": "Python",
"bytes": "406153"
},
{
"name": "Shell",
"bytes": "4475"
}
],
"symlink_target": ""
} |
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog, traduccion):
Dialog.setObjectName("Dialog")
Dialog.resize(499, 428)
self.gridLayout = QtWidgets.QGridLayout(Dialog)
self.gridLayout.setObjectName("gridLayout")
self.listWidget = QtWidgets.QListWidget(Dialog)
self.listWidget.setObjectName("listWidget")
self.gridLayout.addWidget(self.listWidget, 2, 0, 1, 2)
self.textEdit = QtWidgets.QTextEdit(Dialog)
self.textEdit.setObjectName("textEdit")
self.gridLayout.addWidget(self.textEdit, 6, 0, 1, 3)
self.label = QtWidgets.QLabel(Dialog)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 2)
self.label_4 = QtWidgets.QLabel(Dialog)
self.label_4.setObjectName("label_4")
self.gridLayout.addWidget(self.label_4, 3, 0, 1, 2)
self.label_3 = QtWidgets.QLabel(Dialog)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 1, 2, 1, 1)
self.label_7 = QtWidgets.QLabel(Dialog)
self.label_7.setObjectName("label_7")
self.gridLayout.addWidget(self.label_7, 5, 0, 1, 1)
self.listWidget_2 = QtWidgets.QListWidget(Dialog)
self.listWidget_2.setObjectName("listWidget_2")
self.gridLayout.addWidget(self.listWidget_2, 2, 2, 1, 1)
self.label_5 = QtWidgets.QLabel(Dialog)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 3, 2, 1, 1)
self.listWidget_3 = QtWidgets.QListWidget(Dialog)
self.listWidget_3.setObjectName("listWidget_3")
self.gridLayout.addWidget(self.listWidget_3, 4, 0, 1, 2)
self.label_2 = QtWidgets.QLabel(Dialog)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.listWidget_4 = QtWidgets.QListWidget(Dialog)
self.listWidget_4.setEnabled(False)
self.listWidget_4.setObjectName("listWidget_4")
self.gridLayout.addWidget(self.listWidget_4, 4, 2, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 7, 2, 1, 1)
self.retranslateUi(Dialog, traduccion)
QtCore.QMetaObject.connectSlotsByName(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
def retranslateUi(self, Dialog, traduccion):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Propiedades fluido"))
self.label.setText(_translate("Dialog", "Uso de propiedades de fluidos"))
self.label_4.setText(_translate("Dialog", "Primer argumento"))
self.label_3.setText(_translate("Dialog", "Propiedad buscada"))
self.label_7.setText(_translate("Dialog", "Salida"))
self.label_5.setText(_translate("Dialog", "Segundo Argumento"))
self.label_2.setText(_translate("Dialog", "Fluido:"))
| {
"content_hash": "73dd8c3fd6ba38a9c82c7b1c8d0dbe96",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 106,
"avg_line_length": 51.79032258064516,
"alnum_prop": 0.6754905014014326,
"repo_name": "jon85p/pyENL",
"id": "05fc8e2a2b04ac70f7751f1533f55eff76d84afc",
"size": "3406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "GUI/props.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "374242"
}
],
"symlink_target": ""
} |
"""Makes sure that all files contain proper licensing information."""
import json
import optparse
import os.path
import subprocess
import sys
def PrintUsage():
print """Usage: python checklicenses.py [--root <root>] [tocheck]
--root Specifies the repository root. This defaults to "../.." relative
to the script file. This will be correct given the normal location
of the script in "<root>/tools/checklicenses".
--ignore-suppressions Ignores path-specific license whitelist. Useful when
trying to remove a suppression/whitelist entry.
tocheck Specifies the directory, relative to root, to check. This defaults
to "." so it checks everything.
Examples:
python checklicenses.py
python checklicenses.py --root ~/chromium/src third_party"""
WHITELISTED_LICENSES = [
'Anti-Grain Geometry',
'Apache (v2.0)',
'Apache (v2.0) BSD (2 clause)',
'Apache (v2.0) GPL (v2)',
'Apple MIT', # https://fedoraproject.org/wiki/Licensing/Apple_MIT_License
'APSL (v2)',
'APSL (v2) BSD (4 clause)',
'BSD',
'BSD (2 clause)',
'BSD (2 clause) ISC',
'BSD (2 clause) MIT/X11 (BSD like)',
'BSD (3 clause)',
'BSD (3 clause) GPL (v2)',
'BSD (3 clause) ISC',
'BSD (3 clause) LGPL (v2 or later)',
'BSD (3 clause) LGPL (v2.1 or later)',
'BSD (3 clause) MIT/X11 (BSD like)',
'BSD (4 clause)',
'BSD-like',
# TODO(phajdan.jr): Make licensecheck not print BSD-like twice.
'BSD-like MIT/X11 (BSD like)',
'BSL (v1.0)',
'FreeType (BSD like)',
'FreeType (BSD like) with patent clause',
'GPL (v2) LGPL (v2.1 or later)',
'GPL (v2 or later) with Bison parser exception',
'GPL (v2 or later) with libtool exception',
'GPL (v3 or later) with Bison parser exception',
'GPL with Bison parser exception',
'Independent JPEG Group License',
'ISC',
'LGPL (unversioned/unknown version)',
'LGPL (v2)',
'LGPL (v2 or later)',
'LGPL (v2.1)',
'LGPL (v2.1 or later)',
'LGPL (v3 or later)',
'MIT/X11 (BSD like)',
'MIT/X11 (BSD like) LGPL (v2.1 or later)',
'MPL (v1.0) LGPL (v2 or later)',
'MPL (v1.1)',
'MPL (v1.1) BSD (3 clause) GPL (v2) LGPL (v2.1 or later)',
'MPL (v1.1) BSD (3 clause) LGPL (v2.1 or later)',
'MPL (v1.1) BSD-like',
'MPL (v1.1) BSD-like GPL (unversioned/unknown version)',
'MPL (v1.1) BSD-like GPL (v2) LGPL (v2.1 or later)',
'MPL (v1.1) GPL (v2)',
'MPL (v1.1) GPL (v2) LGPL (v2 or later)',
'MPL (v1.1) GPL (v2) LGPL (v2.1 or later)',
'MPL (v1.1) GPL (unversioned/unknown version)',
'MPL (v1.1) LGPL (v2 or later)',
'MPL (v1.1) LGPL (v2.1 or later)',
'MPL (v2.0)',
'Ms-PL',
'Public domain',
'Public domain BSD',
'Public domain BSD (3 clause)',
'Public domain BSD-like',
'Public domain LGPL (v2.1 or later)',
'libpng',
'zlib/libpng',
'SGI Free Software License B',
'University of Illinois/NCSA Open Source License (BSD like)',
('University of Illinois/NCSA Open Source License (BSD like) '
'MIT/X11 (BSD like)'),
]
PATH_SPECIFIC_WHITELISTED_LICENSES = {
'base/third_party/icu': [ # http://crbug.com/98087
'UNKNOWN',
],
# http://code.google.com/p/google-breakpad/issues/detail?id=450
'breakpad/src': [
'UNKNOWN',
],
'chrome/common/extensions/docs/examples': [ # http://crbug.com/98092
'UNKNOWN',
],
'courgette/third_party/bsdiff_create.cc': [ # http://crbug.com/98095
'UNKNOWN',
],
'native_client': [ # http://crbug.com/98099
'UNKNOWN',
],
'native_client/toolchain': [
'BSD GPL (v2 or later)',
'BSD (2 clause) GPL (v2 or later)',
'BSD (3 clause) GPL (v2 or later)',
'BSL (v1.0) GPL',
'BSL (v1.0) GPL (v3.1)',
'GPL',
'GPL (unversioned/unknown version)',
'GPL (v2)',
'GPL (v2 or later)',
'GPL (v3.1)',
'GPL (v3 or later)',
],
'third_party/WebKit': [
'UNKNOWN',
],
# http://code.google.com/p/angleproject/issues/detail?id=217
'third_party/angle': [
'UNKNOWN',
],
# http://crbug.com/222828
# http://bugs.python.org/issue17514
'third_party/chromite/third_party/argparse.py': [
'UNKNOWN',
],
# http://crbug.com/326117
# https://bitbucket.org/chrisatlee/poster/issue/21
'third_party/chromite/third_party/poster': [
'UNKNOWN',
],
# http://crbug.com/333508
'third_party/clang_format/script': [
'UNKNOWN',
],
# http://crbug.com/333508
'buildtools/clang_format/script': [
'UNKNOWN',
],
'third_party/devscripts': [
'GPL (v2 or later)',
],
'third_party/expat/files/lib': [ # http://crbug.com/98121
'UNKNOWN',
],
'third_party/ffmpeg': [
'GPL',
'GPL (v2)',
'GPL (v2 or later)',
'UNKNOWN', # http://crbug.com/98123
],
'third_party/fontconfig': [
# https://bugs.freedesktop.org/show_bug.cgi?id=73401
'UNKNOWN',
],
'third_party/freetype2': [ # http://crbug.com/177319
'UNKNOWN',
],
'third_party/hunspell': [ # http://crbug.com/98134
'UNKNOWN',
],
'third_party/iccjpeg': [ # http://crbug.com/98137
'UNKNOWN',
],
'third_party/icu': [ # http://crbug.com/98301
'UNKNOWN',
],
'third_party/lcov': [ # http://crbug.com/98304
'UNKNOWN',
],
'third_party/lcov/contrib/galaxy/genflat.pl': [
'GPL (v2 or later)',
],
'third_party/libc++/trunk/include/support/solaris': [
# http://llvm.org/bugs/show_bug.cgi?id=18291
'UNKNOWN',
],
'third_party/libc++/trunk/src/support/solaris/xlocale.c': [
# http://llvm.org/bugs/show_bug.cgi?id=18291
'UNKNOWN',
],
'third_party/libc++/trunk/test': [
# http://llvm.org/bugs/show_bug.cgi?id=18291
'UNKNOWN',
],
'third_party/libevent': [ # http://crbug.com/98309
'UNKNOWN',
],
'third_party/libjingle/source/talk': [ # http://crbug.com/98310
'UNKNOWN',
],
'third_party/libjpeg_turbo': [ # http://crbug.com/98314
'UNKNOWN',
],
'third_party/libpng': [ # http://crbug.com/98318
'UNKNOWN',
],
# The following files lack license headers, but are trivial.
'third_party/libusb/src/libusb/os/poll_posix.h': [
'UNKNOWN',
],
'third_party/libvpx/source': [ # http://crbug.com/98319
'UNKNOWN',
],
'third_party/libxml': [
'UNKNOWN',
],
'third_party/libxslt': [
'UNKNOWN',
],
'third_party/lzma_sdk': [
'UNKNOWN',
],
'third_party/mesa/src': [
'GPL (v2)',
'GPL (v3 or later)',
'MIT/X11 (BSD like) GPL (v3 or later) with Bison parser exception',
'UNKNOWN', # http://crbug.com/98450
],
'third_party/modp_b64': [
'UNKNOWN',
],
'third_party/openmax_dl/dl' : [
'Khronos Group',
],
'third_party/openssl': [ # http://crbug.com/98451
'UNKNOWN',
],
'third_party/boringssl': [
# There are some files in BoringSSL which came from OpenSSL and have no
# license in them. We don't wish to add the license header ourselves
# thus we don't expect to pass license checks.
'UNKNOWN',
],
'third_party/ots/tools/ttf-checksum.py': [ # http://code.google.com/p/ots/issues/detail?id=2
'UNKNOWN',
],
'third_party/molokocacao': [ # http://crbug.com/98453
'UNKNOWN',
],
'third_party/npapi/npspy': [
'UNKNOWN',
],
'third_party/ocmock/OCMock': [ # http://crbug.com/98454
'UNKNOWN',
],
'third_party/ply/__init__.py': [
'UNKNOWN',
],
'third_party/protobuf': [ # http://crbug.com/98455
'UNKNOWN',
],
# http://crbug.com/222831
# https://bitbucket.org/eliben/pyelftools/issue/12
'third_party/pyelftools': [
'UNKNOWN',
],
'third_party/scons-2.0.1/engine/SCons': [ # http://crbug.com/98462
'UNKNOWN',
],
'third_party/simplejson': [
'UNKNOWN',
],
'third_party/skia': [ # http://crbug.com/98463
'UNKNOWN',
],
'third_party/snappy/src': [ # http://crbug.com/98464
'UNKNOWN',
],
'third_party/smhasher/src': [ # http://crbug.com/98465
'UNKNOWN',
],
'third_party/speech-dispatcher/libspeechd.h': [
'GPL (v2 or later)',
],
'third_party/sqlite': [
'UNKNOWN',
],
# http://crbug.com/334668
# MIT license.
'tools/swarming_client/third_party/httplib2': [
'UNKNOWN',
],
# http://crbug.com/334668
# Apache v2.0.
'tools/swarming_client/third_party/oauth2client': [
'UNKNOWN',
],
# https://github.com/kennethreitz/requests/issues/1610
'tools/swarming_client/third_party/requests': [
'UNKNOWN',
],
'third_party/swig/Lib/linkruntime.c': [ # http://crbug.com/98585
'UNKNOWN',
],
'third_party/talloc': [
'GPL (v3 or later)',
'UNKNOWN', # http://crbug.com/98588
],
'third_party/tcmalloc': [
'UNKNOWN', # http://crbug.com/98589
],
'third_party/tlslite': [
'UNKNOWN',
],
'third_party/webdriver': [ # http://crbug.com/98590
'UNKNOWN',
],
# https://github.com/html5lib/html5lib-python/issues/125
# https://github.com/KhronosGroup/WebGL/issues/435
'third_party/webgl/src': [
'UNKNOWN',
],
'third_party/webrtc': [ # http://crbug.com/98592
'UNKNOWN',
],
'third_party/xdg-utils': [ # http://crbug.com/98593
'UNKNOWN',
],
'third_party/yasm/source': [ # http://crbug.com/98594
'UNKNOWN',
],
'third_party/zlib/contrib/minizip': [
'UNKNOWN',
],
'third_party/zlib/trees.h': [
'UNKNOWN',
],
'tools/emacs': [ # http://crbug.com/98595
'UNKNOWN',
],
'tools/gyp/test': [
'UNKNOWN',
],
'tools/python/google/__init__.py': [
'UNKNOWN',
],
'tools/stats_viewer/Properties/AssemblyInfo.cs': [
'UNKNOWN',
],
'tools/symsrc/pefile.py': [
'UNKNOWN',
],
'tools/telemetry/third_party/pyserial': [
# https://sourceforge.net/p/pyserial/feature-requests/35/
'UNKNOWN',
],
'v8/test/cctest': [ # http://crbug.com/98597
'UNKNOWN',
],
'v8/src/third_party/kernel/tools/perf/util/jitdump.h': [ # http://crbug.com/391716
'UNKNOWN',
],
}
def check_licenses(options, args):
# Figure out which directory we have to check.
if len(args) == 0:
# No directory to check specified, use the repository root.
start_dir = options.base_directory
elif len(args) == 1:
# Directory specified. Start here. It's supposed to be relative to the
# base directory.
start_dir = os.path.abspath(os.path.join(options.base_directory, args[0]))
else:
# More than one argument, we don't handle this.
PrintUsage()
return 1
print "Using base directory:", options.base_directory
print "Checking:", start_dir
print
licensecheck_path = os.path.abspath(os.path.join(options.base_directory,
'third_party',
'devscripts',
'licensecheck.pl'))
licensecheck = subprocess.Popen([licensecheck_path,
'-l', '100',
'-r', start_dir],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = licensecheck.communicate()
if options.verbose:
print '----------- licensecheck stdout -----------'
print stdout
print '--------- end licensecheck stdout ---------'
if licensecheck.returncode != 0 or stderr:
print '----------- licensecheck stderr -----------'
print stderr
print '--------- end licensecheck stderr ---------'
print "\nFAILED\n"
return 1
used_suppressions = set()
errors = []
for line in stdout.splitlines():
filename, license = line.split(':', 1)
filename = os.path.relpath(filename.strip(), options.base_directory)
# All files in the build output directory are generated one way or another.
# There's no need to check them.
if filename.startswith('out/'):
continue
# For now we're just interested in the license.
license = license.replace('*No copyright*', '').strip()
# Skip generated files.
if 'GENERATED FILE' in license:
continue
if license in WHITELISTED_LICENSES:
continue
if not options.ignore_suppressions:
matched_prefixes = [
prefix for prefix in PATH_SPECIFIC_WHITELISTED_LICENSES
if filename.startswith(prefix) and
license in PATH_SPECIFIC_WHITELISTED_LICENSES[prefix]]
if matched_prefixes:
used_suppressions.update(set(matched_prefixes))
continue
errors.append({'filename': filename, 'license': license})
if options.json:
with open(options.json, 'w') as f:
json.dump(errors, f)
if errors:
for error in errors:
print "'%s' has non-whitelisted license '%s'" % (
error['filename'], error['license'])
print "\nFAILED\n"
print "Please read",
print "http://www.chromium.org/developers/adding-3rd-party-libraries"
print "for more info how to handle the failure."
print
print "Please respect OWNERS of checklicenses.py. Changes violating"
print "this requirement may be reverted."
# Do not print unused suppressions so that above message is clearly
# visible and gets proper attention. Too much unrelated output
# would be distracting and make the important points easier to miss.
return 1
print "\nSUCCESS\n"
if not len(args):
unused_suppressions = set(
PATH_SPECIFIC_WHITELISTED_LICENSES.iterkeys()).difference(
used_suppressions)
if unused_suppressions:
print "\nNOTE: unused suppressions detected:\n"
print '\n'.join(unused_suppressions)
return 0
def main():
default_root = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..'))
option_parser = optparse.OptionParser()
option_parser.add_option('--root', default=default_root,
dest='base_directory',
help='Specifies the repository root. This defaults '
'to "../.." relative to the script file, which '
'will normally be the repository root.')
option_parser.add_option('-v', '--verbose', action='store_true',
default=False, help='Print debug logging')
option_parser.add_option('--ignore-suppressions',
action='store_true',
default=False,
help='Ignore path-specific license whitelist.')
option_parser.add_option('--json', help='Path to JSON output file')
options, args = option_parser.parse_args()
return check_licenses(options, args)
if '__main__' == __name__:
sys.exit(main())
| {
"content_hash": "1f3ee2e58d5b58a979847dabc625eabc",
"timestamp": "",
"source": "github",
"line_count": 518,
"max_line_length": 97,
"avg_line_length": 29.594594594594593,
"alnum_prop": 0.5660795825179387,
"repo_name": "chromium2014/src",
"id": "d90b8eda4cf1911a2200357171bcf4db97211d50",
"size": "15519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/checklicenses/checklicenses.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "1889381"
},
{
"name": "Awk",
"bytes": "8660"
},
{
"name": "C",
"bytes": "39993418"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "220757674"
},
{
"name": "CSS",
"bytes": "973910"
},
{
"name": "Java",
"bytes": "6583410"
},
{
"name": "JavaScript",
"bytes": "20967999"
},
{
"name": "Mercury",
"bytes": "9480"
},
{
"name": "Objective-C",
"bytes": "943237"
},
{
"name": "Objective-C++",
"bytes": "7190130"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "Perl",
"bytes": "674461"
},
{
"name": "Python",
"bytes": "10430892"
},
{
"name": "Rebol",
"bytes": "262"
},
{
"name": "Shell",
"bytes": "1337040"
},
{
"name": "Standard ML",
"bytes": "3705"
},
{
"name": "Tcl",
"bytes": "277091"
},
{
"name": "XSLT",
"bytes": "13493"
},
{
"name": "nesC",
"bytes": "15206"
}
],
"symlink_target": ""
} |
from typing import Type
from pydantic import BaseModel
from pydantic import ValidationError
class Foo:
pass
class Bar(Foo):
pass
class Other:
pass
class SimpleModel(BaseModel):
just_subclasses: Type[Foo]
SimpleModel(just_subclasses=Foo)
SimpleModel(just_subclasses=Bar)
try:
SimpleModel(just_subclasses=Other)
except ValidationError as e:
print(e)
| {
"content_hash": "0f002accb416b1a5a46c3bf80ec8b88e",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 38,
"avg_line_length": 13.678571428571429,
"alnum_prop": 0.7467362924281984,
"repo_name": "samuelcolvin/pydantic",
"id": "933d7ee945985a7bd897ce014fbb2350d4a5c9ac",
"size": "383",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/examples/types_type.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2905"
},
{
"name": "Python",
"bytes": "1140694"
},
{
"name": "Shell",
"bytes": "258"
}
],
"symlink_target": ""
} |
from invoke import Collection
from tasks import build, check, db
ns = Collection()
ns.add_collection(build)
ns.add_collection(check)
ns.add_collection(db)
| {
"content_hash": "e98bf617d776fee5774d69bab9fb4182",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 34,
"avg_line_length": 19.625,
"alnum_prop": 0.7770700636942676,
"repo_name": "xuhcc/airy",
"id": "90f8c9b5852336ddc14183680d8e9a4b28a08725",
"size": "157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tasks/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5531"
},
{
"name": "HTML",
"bytes": "15388"
},
{
"name": "JavaScript",
"bytes": "98979"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "97742"
},
{
"name": "SaltStack",
"bytes": "1572"
}
],
"symlink_target": ""
} |
import numpy
import struct
import traceback
def header_creator():
ret = ""
ret += struct.pack('i', 3) # slot num
ret += struct.pack('i', 1) # sequence flag
ret += struct.pack('i', 0) # slot0 dense type
ret += struct.pack('i', 3) # slot0 dim
ret += struct.pack('i', 1) # slot1 sparse non value type
ret += struct.pack('i', 7) # slot1 dim
ret += struct.pack('i', 3) # slot2 index type
ret += struct.pack('i', 2) # slot2 dim
return ret
def dense_value_creator(sample_num):
ret = ""
ret += struct.pack('i', sample_num) # slot0 sample num
for i in range(sample_num): # slot0 value
ret += struct.pack('f', 1.0)
ret += struct.pack('f', 2.0)
ret += struct.pack('f', 3.0)
return ret
def sparse_value_creator(sample_num):
ret = ""
ret += struct.pack('i', sample_num) # slot1 sample num
for i in range(sample_num): # slot1 index
ret += struct.pack('i', i * 2)
ret += struct.pack('i', sample_num * 2) #slot1 length
for i in range(sample_num): # slot1 value
ret += struct.pack('i', 1)
ret += struct.pack('i', 2)
return ret
def index_value_creator(sample_num):
ret = ""
ret += struct.pack('i', sample_num) # slot2 sample num
for i in range(sample_num): # slot2 value
ret += struct.pack('i', 0)
return ret
def sequenceStartPositions_creator():
ret = ""
ret += struct.pack('i', 2) # slot0 sequence num
ret += struct.pack('i', 0) # slot0 sequence value1
ret += struct.pack('i', 1) # slot0 sequence value2
ret += struct.pack('i', 1) # slot1 sequence num
ret += struct.pack('i', 0) # slot1 sequence value1
ret += struct.pack('i', 2) # slot2 sequence num
ret += struct.pack('i', 0) # slot2 sequence value1
ret += struct.pack('i', 1) # slot2 sequence value2
return ret
def subSequenceStartPositions_creator():
ret = ""
ret += struct.pack('i', 3) # slot0 subsequence num
ret += struct.pack('i', 0) # slot0 subsequence value1
ret += struct.pack('i', 1) # slot0 subsequence value2
ret += struct.pack('i', 2) # slot0 subsequence value3
ret += struct.pack('i', 2) # slot1 subsequence num
ret += struct.pack('i', 0) # slot1 subsequence value1
ret += struct.pack('i', 1) # slot1 subsequence value2
ret += struct.pack('i', 3) # slot2 subsequence num
ret += struct.pack('i', 0) # slot2 subsequence value1
ret += struct.pack('i', 1) # slot2 subsequence value2
ret += struct.pack('i', 2) # slot2 subsequence value3
return ret
class SimpleDataProvider:
def __init__(self, *file_list):
self.file_list = file_list
def shuffle(self):
pass
def reset(self):
pass
def getHeader(self):
return header_creator()
def getNextBatch(self, batch_size):
ret = ""
ret += struct.pack('i', 2) # batch size
ret += dense_value_creator(2) # slot0
ret += sparse_value_creator(2) # slot1
ret += index_value_creator(2) # slot2
ret += sequenceStartPositions_creator()
return ret
class SimpleNestDataProvider:
def __init__(self, *file_list):
self.file_list = file_list
def shuffle(self):
pass
def reset(self):
pass
def getHeader(self):
return header_creator()
def getNextBatch(self, batch_size):
ret = ""
ret += struct.pack('i', 2) # batch size
ret += dense_value_creator(4) # slot0
ret += sparse_value_creator(4) # slot1
ret += index_value_creator(4) # slot2
ret += sequenceStartPositions_creator()
ret += subSequenceStartPositions_creator()
return ret
if __name__ == "__main__":
# test code
data_provider = SimpleDataProvider('./test_batch')
print len(data_provider.getHeader())
print len(data_provider.getNextBatch(2))
data_provider = SimpleNestDataProvider('./test_batch')
print len(data_provider.getHeader())
print len(data_provider.getNextBatch(2))
| {
"content_hash": "31b795f45ace510c50648fb43a0fc74d",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 61,
"avg_line_length": 30.541353383458645,
"alnum_prop": 0.5923190546528804,
"repo_name": "putcn/Paddle",
"id": "85ea90d6eec25eb709b19d06a18c7a955078be04",
"size": "4664",
"binary": false,
"copies": "7",
"ref": "refs/heads/develop",
"path": "paddle/gserver/tests/pyDataProvider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "272910"
},
{
"name": "C++",
"bytes": "7598375"
},
{
"name": "CMake",
"bytes": "269313"
},
{
"name": "Cuda",
"bytes": "1078779"
},
{
"name": "Go",
"bytes": "109501"
},
{
"name": "Perl",
"bytes": "11456"
},
{
"name": "Python",
"bytes": "3637137"
},
{
"name": "Shell",
"bytes": "157071"
}
],
"symlink_target": ""
} |
from evdev import InputDevice, ecodes, list_devices
import sys
import errno
import logging
import pulsectl
from logging import DEBUG
KEY_PRESS = 1
INCREASE = 1
INCREASE_AMOUNT = 0.02
DECREASE_AMOUNT = -0.02
log = logging.getLogger('powermated')
log.setLevel(DEBUG)
log.addHandler(logging.StreamHandler(sys.stdout))
def find_device():
"""
Search for the device representing the interface to the Griffin Powermate.
:return: A list containing the devices found, empty if no matching device is found.
"""
log.info('Searching for Griffin Powermate input device')
devices = [InputDevice(fn) for fn in list_devices()]
for device in devices:
if device.name.find('PowerMate') != -1:
log.info('Device found: %s (%s)', device.name, device.phys)
return [device.fn]
return []
def listen_on(device):
"""
Monitor the given device and modify the sound volume
:param device: the name of the device to read
"""
log.info('Listening on %s', device)
try:
with pulsectl.Pulse('Griffin Powermate') as pulse:
for event in InputDevice(device).read_loop():
# ignore synchronization events
if event.type == ecodes.EV_SYN:
continue
# event action: toggle mute
if event.type == ecodes.EV_KEY:
if event.value == KEY_PRESS:
for sink in pulse.sink_list():
if sink.mute:
log.debug('Received %s, unmuting sink %s', event, sink.description)
pulse.mute(sink, False)
else:
log.debug('Received %s, muting sink %s', event, sink.description)
pulse.mute(sink, True)
# event action: volume
elif event.type == ecodes.EV_REL:
if event.value == INCREASE:
log.debug('Received %s: increasing volume', event)
for sink in pulse.sink_list():
if sink.volume.value_flat == 1.0:
log.debug('Skipping volume increasing for sink %s', sink)
else:
log.debug('Adjusting sink %s', sink)
pulse.volume_change_all_chans(sink, INCREASE_AMOUNT)
else:
log.debug('Received %s: decreasing volume', event)
for sink in pulse.sink_list():
log.debug('Adjusting sink %s', sink)
pulse.volume_change_all_chans(sink, DECREASE_AMOUNT)
else:
log.debug('Ignoring unknown event type %s', event.type)
except IOError as e:
if e.errno == errno.ENODEV:
log.debug('Device unplugged')
else:
log.error(e.message)
raise e
except KeyboardInterrupt:
log.debug('Terminating')
def run(device):
if device is not None:
listen_on(device)
else:
device = find_device()
if len(device) == 0:
log.error("Couldn't find device, try: powermated <device>")
sys.exit(1)
elif len(device) > 1:
log.error("Multiple devices found, try: powermated <device>")
sys.exit(1)
else:
listen_on(device[0])
def main():
run(sys.argv[1] if len(sys.argv) > 1 else None)
if __name__ == "__main__":
main()
| {
"content_hash": "90bc8335f5d635098d5b7e4da6250f74",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 99,
"avg_line_length": 31.25,
"alnum_prop": 0.5249655172413793,
"repo_name": "xstefanox/powermated",
"id": "934168409489f1435d0f0bdcb3678a79d9b67056",
"size": "3625",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "powermated/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2042"
}
],
"symlink_target": ""
} |
import abc
import string
from keystone import exception
# The characters used to generate verifiers are limited to alphanumerical
# values for ease of manual entry. Commonly confused characters are omitted.
VERIFIER_CHARS = string.ascii_letters + string.digits
CONFUSED_CHARS = 'jiIl1oO0'
VERIFIER_CHARS = ''.join(c for c in VERIFIER_CHARS if c not in CONFUSED_CHARS)
def filter_token(access_token_ref):
"""Filter out private items in an access token dict.
'access_secret' is never returned.
:returns: access_token_ref
"""
if access_token_ref:
access_token_ref = access_token_ref.copy()
access_token_ref.pop('access_secret', None)
return access_token_ref
def filter_consumer(consumer_ref):
"""Filter out private items in a consumer dict.
'secret' is never returned.
:returns: consumer_ref
"""
if consumer_ref:
consumer_ref = consumer_ref.copy()
consumer_ref.pop('secret', None)
return consumer_ref
class Oauth1DriverBase(object, metaclass=abc.ABCMeta):
"""Interface description for an OAuth1 driver."""
@abc.abstractmethod
def create_consumer(self, consumer_ref):
"""Create consumer.
:param consumer_ref: consumer ref with consumer name
:type consumer_ref: dict
:returns: consumer_ref
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def update_consumer(self, consumer_id, consumer_ref):
"""Update consumer.
:param consumer_id: id of consumer to update
:type consumer_id: string
:param consumer_ref: new consumer ref with consumer name
:type consumer_ref: dict
:returns: consumer_ref
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_consumers(self):
"""List consumers.
:returns: list of consumers
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_consumer(self, consumer_id):
"""Get consumer, returns the consumer id (key) and description.
:param consumer_id: id of consumer to get
:type consumer_id: string
:returns: consumer_ref
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_consumer_with_secret(self, consumer_id):
"""Like get_consumer(), but also returns consumer secret.
Returned dictionary consumer_ref includes consumer secret.
Secrets should only be shared upon consumer creation; the
consumer secret is required to verify incoming OAuth requests.
:param consumer_id: id of consumer to get
:type consumer_id: string
:returns: consumer_ref containing consumer secret
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_consumer(self, consumer_id):
"""Delete consumer.
:param consumer_id: id of consumer to get
:type consumer_id: string
:returns: None.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_access_tokens(self, user_id):
"""List access tokens.
:param user_id: search for access tokens authorized by given user id
:type user_id: string
:returns: list of access tokens the user has authorized
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_access_token(self, user_id, access_token_id):
"""Delete access token.
:param user_id: authorizing user id
:type user_id: string
:param access_token_id: access token to delete
:type access_token_id: string
:returns: None
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def create_request_token(self, consumer_id, requested_project,
request_token_duration):
"""Create request token.
:param consumer_id: the id of the consumer
:type consumer_id: string
:param requested_project_id: requested project id
:type requested_project_id: string
:param request_token_duration: duration of request token
:type request_token_duration: string
:returns: request_token_ref
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_request_token(self, request_token_id):
"""Get request token.
:param request_token_id: the id of the request token
:type request_token_id: string
:returns: request_token_ref
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_access_token(self, access_token_id):
"""Get access token.
:param access_token_id: the id of the access token
:type access_token_id: string
:returns: access_token_ref
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def authorize_request_token(self, request_token_id, user_id, role_ids):
"""Authorize request token.
:param request_token_id: the id of the request token, to be authorized
:type request_token_id: string
:param user_id: the id of the authorizing user
:type user_id: string
:param role_ids: list of role ids to authorize
:type role_ids: list
:returns: verifier
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def create_access_token(self, request_id, access_token_duration):
"""Create access token.
:param request_id: the id of the request token, to be deleted
:type request_id: string
:param access_token_duration: duration of an access token
:type access_token_duration: string
:returns: access_token_ref
"""
raise exception.NotImplemented() # pragma: no cover
| {
"content_hash": "6b3b471acb32cc837faaa5542d259296",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 78,
"avg_line_length": 29.980295566502463,
"alnum_prop": 0.64196516595465,
"repo_name": "openstack/keystone",
"id": "f8142dd112e975ca3a1e13f6e754a9e85659a865",
"size": "6672",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "keystone/oauth1/backends/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "665"
},
{
"name": "Mako",
"bytes": "976"
},
{
"name": "Python",
"bytes": "6213900"
},
{
"name": "Shell",
"bytes": "30491"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import codecs
import logging
import sys
import itertools
import re
from subprocess import Popen, PIPE
import config.seedev_types
from classification.rext.kernelmodels import ReModel
from classification.results import ResultsRE
from config import config
from text.pair import Pairs
class MirtexClassifier(ReModel):
def __init__(self, corpus, ptype):
"""
Rule based classifier
rules: List of rules to use
"""
self.ptype = ptype
self.corpus = corpus
self.pairs = {}
self.pids = {}
self.trigger_words = {}
self.tregexes_agent = set()
self.tregexes_theme = set()
with open("corpora/miRTex/trigger_words.csv", 'r') as tfile:
for l in tfile:
csv = l.split(";")
if csv[0].strip() != "":
tword = csv[0].strip()
pos = csv[1].strip()
if tword not in self.trigger_words:
self.trigger_words[tword] = set()
self.trigger_words[tword].add(pos)
# print self.trigger_words
tregex_type = "agent"
with open("corpora/miRTex/tregexes.csv", 'r') as tfile:
for l in tfile:
csv = l.split(";")
if csv[0].startswith("agent"):
tregex_type = "agent"
elif csv[0].startswith("theme"):
tregex_type = "theme"
if tregex_type == "agent":
self.tregexes_agent.add(csv[1].strip())
elif tregex_type == "theme":
self.tregexes_theme.add(csv[1].strip())
#print len(self.tregexes_agent), len(self.tregexes_theme)
#print self.tregexes_agent, self.tregexes_theme
def load_classifier(self):
pass
def test(self):
for sentence in self.corpus.get_sentences("goldstandard_mirna"): # get only sentences with miRNAs
if "goldstandard_protein" in sentence.entities.elist: # and proteins
#print sentence.parsetree.replace("\n", "").replace(" ", "")
# print sentence.sid, sentence.text
# check if the same mirna and trigger appear multiple times in the same sentence
sentence_mirnas = {}
for e in sentence.entities.elist["goldstandard_mirna"]:
if e.text in sentence_mirnas:
logging.info("repeated mirna ({}): {}".format(e.text, sentence.text))
sentence_mirnas[e.text] = e
sentence_genes = {e.text: e for e in sentence.entities.elist["goldstandard_protein"]}
sentence_genes = {}
for e in sentence.entities.elist["goldstandard_protein"]:
if e.text in sentence_genes:
logging.info("repeated gene ({}): {}".format(e.text, sentence.text))
sentence_genes[e.text] = e
#write parse tree to file
with open("temp/tregex_sentence.txt", 'w') as tfile: # write sentence parse to file
tfile.write(sentence.parsetree.replace("\n", "").replace(" ", ""))
#tfile.write(sentence.bio_parse)
mirna_to_triggers = {} #mirna-> target for this sentence, assuming each mirna has only 1 mention
# test each regex for agent (mirna)
for tr in self.tregexes_agent:
tregexcall = Popen(["./bin/stanford-tregex-2015-12-09/tregex.sh", "-h", "tr", "-h", "arg", "-t", tr,
"temp/tregex_sentence.txt"],
stdout=PIPE, stderr=PIPE)
res = tregexcall.communicate()
if res[0] != "":
# print tr, "agent:", res[0]
for r in res[0].split("\n"): # each match
if r.strip() != "":
words = [w.split("/")[0] for w in r.split()] #just words, without POS
pos = [w.split("/")[1] for w in r.split()] # just POS
#assumption: each mirna and trigger appear only once in the sentence
mirna_agent = set(words) & set(sentence_mirnas.keys()) # mirnas found
mirna_trigger = set(words) & set(self.trigger_words.keys()) # triggers found
if mirna_agent and mirna_trigger:
for trigger in mirna_trigger:
trigger_i = words.index(trigger)
if pos[trigger_i] not in self.trigger_words[trigger]:
print "skipped because POS did not match:", r
# continue
if trigger not in mirna_to_triggers:
mirna_to_triggers[trigger] = set()
for m in mirna_agent: #
mirna_to_triggers[trigger].add(m)
else: # no mirna-trigger
continue
else: # no result
continue
if not mirna_to_triggers:
continue
# print mirna_targets
for tr in self.tregexes_theme:
tregexcall = Popen(["./bin/stanford-tregex-2015-12-09/tregex.sh", "-h", "tr", "-h", "arg", "-t", tr,
"temp/tregex_sentence.txt"],
stdout=PIPE, stderr=PIPE)
res = tregexcall.communicate()
if res[0] != "":
# print tr, "theme:", res[0]
for r in res[0].split("\n"):
if r.strip() != "":
words = [w.split("/")[0] for w in r.split()]
pos = [w.split("/")[1] for w in r.split()] # just POS
gene_agent = set(words) & set(sentence_genes.keys())
gene_trigger = set(words) & set(self.trigger_words.keys())
if gene_agent and gene_trigger:
for trigger in gene_trigger:
# print "target:", target
trigger_i = words.index(trigger)
if pos[trigger_i] not in self.trigger_words[trigger]:
print "skipped because POS did not match:", r
continue
if trigger in mirna_to_triggers:
for gene in gene_agent:
print "+".join(mirna_to_triggers[trigger]), trigger, gene
for mirna in mirna_to_triggers[trigger]:
self.pids["p{}".format(len(self.pids))] = (sentence_mirnas[mirna],
sentence_genes[gene])
print sentence.text
print
# print gene_agent, gene_target
#sys.exit()
# print
def get_predictions(self, corpus):
results = ResultsRE("")
# print len(self.pids)
for p, pid in enumerate(self.pids):
did = self.pids[pid][0].did
if did not in results.document_pairs:
results.document_pairs[did] = Pairs()
pair = corpus.documents[did].add_relation(self.pids[pid][0], self.pids[pid][1], self.ptype, relation=True)
# print pair, pair[0], pair[1]
#pair = self.get_pair(pid, corpus)
results.document_pairs[did].add_pair(pair, "mirtex_rules")
results.pairs[pid] = pair
pair.recognized_by["mirtex_rules"] = 1
logging.info("{0.eid}:{0.text} => {1.eid}:{1.text}".format(pair.entities[0],pair.entities[1]))
#logging.info("{} - {} SST: {}".format(pair.entities[0], pair.entities[0], score))
results.corpus = corpus
return results
| {
"content_hash": "9586e53b4ab968a009146b077f6469e9",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 120,
"avg_line_length": 51.93413173652694,
"alnum_prop": 0.4537069064914101,
"repo_name": "AndreLamurias/IBEnt",
"id": "214efe1a24f9260565d6fbe9f2149ff8540e02a3",
"size": "8673",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/classification/rext/mirtex_rules.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "728197"
},
{
"name": "Shell",
"bytes": "105438"
}
],
"symlink_target": ""
} |
pass
| {
"content_hash": "deb150aa1931ce4035f97e90db88765b",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 4,
"avg_line_length": 5,
"alnum_prop": 0.8,
"repo_name": "francesconistri/p2ptv-pi",
"id": "e1214d359ccf5a6af86cda0c7a7d6aa39656b515",
"size": "65",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "acestream/ACEStream/TrackerChecking/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "129357"
},
{
"name": "HTML",
"bytes": "18485"
},
{
"name": "JavaScript",
"bytes": "4251422"
},
{
"name": "Makefile",
"bytes": "4250"
},
{
"name": "Python",
"bytes": "2617690"
}
],
"symlink_target": ""
} |
binOffsets = [512+64+8+1, 64+8+1, 8+1, 1, 0]
binOffsetsExtended = [4096+512+64+8+1, 512+64+8+1, 64+8+1, 8+1, 1, 0]
_binFirstShift = 17 # How much to shift to get to finest bin.
_binNextShift = 3 # How much to shift to get to next larger bin.
_binOffsetOldToExtended = 4681 # From binRange.h
def __bin_from_range_standard(start, end):
"""
Given start,end in chromosome coordinates, assign it a bin.
There's a bin for each 128k segment, for each 1M segment, for each 8M segment, for each 64M segment,
and for each chromosome (which is assumed to be less than 512M.)
A range goes into the smallest bin it will fit in./
"""
start_bin = start
end_bin = end-1
start_bin >>= _binFirstShift
end_bin >>= _binFirstShift
for i in range(0, len(binOffsets)):
if start_bin == end_bin:
return binOffsets[i] + start_bin
start_bin >>= _binNextShift
end_bin >>= _binNextShift
raise ValueError("start {}, end {} out of range in findBin (max is 512M)".format(start, end))
# Add one new level to get coverage past chrom sizes of 512 Mb.
# Effective limit is now the size of an integer since chrom start and
# end coordinates are always being used in int's == 2Gb-1
def __bin_from_range_extended(start, end):
"""
Given start,end in chromosome coordinates, assign it a bin.
There's a bin for each 128k segment, for each 1M segment, for each 8M segment, for each 64M segment,
for each 512M segment, and one top level bin for 4Gb.
Note, since start and end are int's, the practical limit is up to 2Gb-1, and thus,
only four result bins on the second level.
A range goes into the smallest bin it will fit in.
"""
start_bin = start
end_bin = end-1
start_bin >>= _binFirstShift
end_bin >>= _binFirstShift
for i in range(0, len(binOffsetsExtended)):
if start_bin == end_bin:
return _binOffsetOldToExtended + binOffsetsExtended[i] + start_bin
start_bin >>= _binNextShift
end_bin >>= _binNextShift
raise ValueError("start {}, end {} out of range in findBin (max is 2Gb)".format(start, end))
def bin_from_range(start, end):
# Initial implementation is used when `chromEnd` is less than or equal to 536,870,912 = 2^29
# Extended implementation is used when `chromEnd` is greater than 536,870,912 = 2^29 and
# less than 2,147,483,647 = 2^31 - 1
if end <= 2**29:
return __bin_from_range_standard(start, end)
else:
return __bin_from_range_extended(start, end)
| {
"content_hash": "30d25b3817f3a4390e63cd2aec3199c0",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 104,
"avg_line_length": 38.77272727272727,
"alnum_prop": 0.6584603360687769,
"repo_name": "ramseylab/cerenkov",
"id": "fd2b88d95b4b384b097101a23f2da893b971540c",
"size": "2881",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ground_truth/osu17/genome_browser_tool.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "261056"
},
{
"name": "R",
"bytes": "229621"
},
{
"name": "Shell",
"bytes": "1482"
}
],
"symlink_target": ""
} |
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest
class AddIncident(unittest.TestCase):
def setUp(self):
self.wd = webdriver.Chrome("C:\\tests\\JARs\\chromedriver_win32\\chromedriver.exe")
#self.wd=WebDriver.Firefox()
self.wd.maximize_window()
self.wd.implicitly_wait(30)
self.verificationErrors = []
self.accept_next_alert = True
def test_add_incident(self):
wd = self.wd
wd.get("https://docit-preprod.stopit.fm/")
wd.find_element_by_id("password").clear()
wd.find_element_by_id("password").send_keys("Stopit1234")
wd.find_element_by_id("email").clear()
wd.find_element_by_id("email").send_keys("tj2@stopit.fm")
wd.find_element_by_id("loginButton").click()
wd.find_element_by_css_selector("div.icon.add_incident").click()
wd.find_element_by_id("notes").clear()
wd.find_element_by_id("notes").send_keys("123")
wd.find_element_by_name("notes").clear()
wd.find_element_by_name("notes").send_keys("123")
wd.find_element_by_id("cancelNewIncident").click()
wd.find_element_by_css_selector("div.icon.home").click()
wd.find_element_by_css_selector("div.down_arrow").click()
wd.find_element_by_xpath("//li[@onclick=\"javascript:location.href='/logout/';\"]").click()
def tearDown(self):
self.wd.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "81647253b91140e8d33cd1504ef17878",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 99,
"avg_line_length": 41.69230769230769,
"alnum_prop": 0.6439114391143912,
"repo_name": "bzelmanov/My_Python",
"id": "655dc95ca84ccf69eb84423926c350a9b91b069d",
"size": "1650",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "add_incident.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1707"
}
],
"symlink_target": ""
} |
'''
Copyright (c) 2008 Georgios Giannoudovardis, <vardis.g@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import logging
import StringIO
from ConfigParser import *
from pano.constants import PanoConstants
from pano.errors.ParseException import ParseException
class FontParser:
def __init__(self):
self.log = logging.getLogger('pano.fontParser')
def parse(self, font, fileContents):
try:
cfg = SafeConfigParser()
strFp = StringIO.StringIO(fileContents)
cfg.readfp(strFp)
options = cfg.options('font')
for opt in options:
val = cfg.get('font', opt)
if not(opt.startswith('locale_') and len(opt) >= 9):
raise ParseException(error='error.parse.invalidOption', resFile=font + '.font', args=(opt))
language = opt[7:]
font.addLocalization(language, val)
except (MissingSectionHeaderError, ParsingError):
raise ParseException(error='error.parse.invalid', resFile=font.getName() + '.font')
except IOError, e:
raise ParseException(error='error.parse.io', resFile=font.getName() + '.font', args=(str(e)))
else:
return font
| {
"content_hash": "c4095faea15655cfc1b70a387de0f37f",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 111,
"avg_line_length": 40.0655737704918,
"alnum_prop": 0.6415711947626841,
"repo_name": "vardis/pano",
"id": "828fdb27663c291494b1880e0e957dc3321f7c1f",
"size": "2444",
"binary": false,
"copies": "1",
"ref": "refs/heads/Panorama",
"path": "src/pano/resources/parsers/FontParser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "566158"
}
],
"symlink_target": ""
} |
"""
surveymonty.constants
---------------------
"""
DEFAULT_HOST = 'https://api.surveymonkey.net'
DEFAULT_VERSION = 'v3'
VERSIONS_MODULE = 'surveymonty.versions'
| {
"content_hash": "7b3c43041490fa5e3e11180e3315a55a",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 45,
"avg_line_length": 23.142857142857142,
"alnum_prop": 0.6358024691358025,
"repo_name": "andrewkshim/surveymonty",
"id": "4c57c3b90ec156f34c901aa263d60a3add1c60f2",
"size": "162",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "surveymonty/constants.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "354"
},
{
"name": "Python",
"bytes": "10899"
}
],
"symlink_target": ""
} |
import logging
import os
from pyjade import Compiler as _Compiler, Parser, register_filter
from pyjade.runtime import attrs
from pyjade.exceptions import CurrentlyNotSupported
from pyjade.utils import process
from django.conf import settings
class Compiler(_Compiler):
autocloseCode = 'if,ifchanged,ifequal,ifnotequal,for,block,filter,autoescape,with,trans,blocktrans,spaceless,comment,cache,localize,compress,verbatim'.split(',')
useRuntime = True
def __init__(self, node, **options):
if settings.configured:
options.update(getattr(settings,'PYJADE',{}))
super(Compiler, self).__init__(node, **options)
def visitCodeBlock(self,block):
self.buffer('{%% block %s %%}'%block.name)
if block.mode=='append': self.buffer('{{block.super}}')
self.visitBlock(block)
if block.mode=='prepend': self.buffer('{{block.super}}')
self.buffer('{% endblock %}')
def visitAssignment(self,assignment):
self.buffer('{%% __pyjade_set %s = %s %%}'%(assignment.name,assignment.val))
def visitMixin(self,mixin):
self.mixing += 1
if not mixin.call:
self.buffer('{%% __pyjade_kwacro %s %s %%}'%(mixin.name,mixin.args))
self.visitBlock(mixin.block)
self.buffer('{% end__pyjade_kwacro %}')
elif mixin.block:
raise CurrentlyNotSupported("The mixin blocks are not supported yet.")
else:
self.buffer('{%% __pyjade_usekwacro %s %s %%}'%(mixin.name,mixin.args))
self.mixing -= 1
def visitCode(self,code):
if code.buffer:
val = code.val.lstrip()
val = self.var_processor(val)
self.buf.append('{{%s%s}}'%(val,'|force_escape' if code.escape else ''))
else:
self.buf.append('{%% %s %%}'%code.val)
if code.block:
self.visit(code.block)
if not code.buffer:
codeTag = code.val.strip().split(' ',1)[0]
if codeTag in self.autocloseCode:
self.buf.append('{%% end%s %%}'%codeTag)
def attributes(self,attrs):
return "{%% __pyjade_attrs %s %%}"%attrs
try:
from django.template.base import add_to_builtins
except ImportError: # Django < 1.8
from django.template import add_to_builtins
add_to_builtins('pyjade.ext.django.templatetags')
from django.utils.translation import trans_real
try:
from django.utils.encoding import force_text as to_text
except ImportError:
from django.utils.encoding import force_unicode as to_text
def decorate_templatize(func):
def templatize(src, origin=None):
src = to_text(src, settings.FILE_CHARSET)
if origin.endswith(".jade"):
html = process(src,compiler=Compiler)
else:
html = src
return func(html, origin)
return templatize
trans_real.templatize = decorate_templatize(trans_real.templatize)
try:
from django.contrib.markup.templatetags.markup import markdown
@register_filter('markdown')
def markdown_filter(x,y):
return markdown(x)
except ImportError:
pass
| {
"content_hash": "62540ec4258e4f7c4666e2b5fb6c1168",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 165,
"avg_line_length": 32.24742268041237,
"alnum_prop": 0.6313938618925832,
"repo_name": "huiyiqun/pyjade",
"id": "7452d8ddcfdca3142d823892b75f905f9d126ab0",
"size": "3128",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "pyjade/ext/django/compiler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "11429"
},
{
"name": "Python",
"bytes": "127962"
},
{
"name": "Shell",
"bytes": "132"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from .models import iam_backend, User
class IamResponse(BaseResponse):
def attach_role_policy(self):
policy_arn = self._get_param('PolicyArn')
role_name = self._get_param('RoleName')
iam_backend.attach_role_policy(policy_arn, role_name)
template = self.response_template(ATTACH_ROLE_POLICY_TEMPLATE)
return template.render()
def create_policy(self):
description = self._get_param('Description')
path = self._get_param('Path')
policy_document = self._get_param('PolicyDocument')
policy_name = self._get_param('PolicyName')
policy = iam_backend.create_policy(
description, path, policy_document, policy_name)
template = self.response_template(CREATE_POLICY_TEMPLATE)
return template.render(policy=policy)
def list_attached_role_policies(self):
marker = self._get_param('Marker')
max_items = self._get_int_param('MaxItems', 100)
path_prefix = self._get_param('PathPrefix', '/')
role_name = self._get_param('RoleName')
policies, marker = iam_backend.list_attached_role_policies(
role_name, marker=marker, max_items=max_items, path_prefix=path_prefix)
template = self.response_template(LIST_ATTACHED_ROLE_POLICIES_TEMPLATE)
return template.render(policies=policies, marker=marker)
def list_policies(self):
marker = self._get_param('Marker')
max_items = self._get_int_param('MaxItems', 100)
only_attached = self._get_bool_param('OnlyAttached', False)
path_prefix = self._get_param('PathPrefix', '/')
scope = self._get_param('Scope', 'All')
policies, marker = iam_backend.list_policies(
marker, max_items, only_attached, path_prefix, scope)
template = self.response_template(LIST_POLICIES_TEMPLATE)
return template.render(policies=policies, marker=marker)
def create_role(self):
role_name = self._get_param('RoleName')
path = self._get_param('Path')
assume_role_policy_document = self._get_param(
'AssumeRolePolicyDocument')
role = iam_backend.create_role(
role_name, assume_role_policy_document, path)
template = self.response_template(CREATE_ROLE_TEMPLATE)
return template.render(role=role)
def get_role(self):
role_name = self._get_param('RoleName')
role = iam_backend.get_role(role_name)
template = self.response_template(GET_ROLE_TEMPLATE)
return template.render(role=role)
def list_role_policies(self):
role_name = self._get_param('RoleName')
role_policies_names = iam_backend.list_role_policies(role_name)
template = self.response_template(LIST_ROLE_POLICIES)
return template.render(role_policies=role_policies_names)
def put_role_policy(self):
role_name = self._get_param('RoleName')
policy_name = self._get_param('PolicyName')
policy_document = self._get_param('PolicyDocument')
iam_backend.put_role_policy(role_name, policy_name, policy_document)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="PutRolePolicyResponse")
def get_role_policy(self):
role_name = self._get_param('RoleName')
policy_name = self._get_param('PolicyName')
policy_name, policy_document = iam_backend.get_role_policy(
role_name, policy_name)
template = self.response_template(GET_ROLE_POLICY_TEMPLATE)
return template.render(role_name=role_name,
policy_name=policy_name,
policy_document=policy_document)
def update_assume_role_policy(self):
role_name = self._get_param('RoleName')
role = iam_backend.get_role(role_name)
role.assume_role_policy_document = self._get_param('PolicyDocument')
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="UpdateAssumeRolePolicyResponse")
def create_instance_profile(self):
profile_name = self._get_param('InstanceProfileName')
path = self._get_param('Path')
profile = iam_backend.create_instance_profile(
profile_name, path, role_ids=[])
template = self.response_template(CREATE_INSTANCE_PROFILE_TEMPLATE)
return template.render(profile=profile)
def get_instance_profile(self):
profile_name = self._get_param('InstanceProfileName')
profile = iam_backend.get_instance_profile(profile_name)
template = self.response_template(GET_INSTANCE_PROFILE_TEMPLATE)
return template.render(profile=profile)
def add_role_to_instance_profile(self):
profile_name = self._get_param('InstanceProfileName')
role_name = self._get_param('RoleName')
iam_backend.add_role_to_instance_profile(profile_name, role_name)
template = self.response_template(
ADD_ROLE_TO_INSTANCE_PROFILE_TEMPLATE)
return template.render()
def remove_role_from_instance_profile(self):
profile_name = self._get_param('InstanceProfileName')
role_name = self._get_param('RoleName')
iam_backend.remove_role_from_instance_profile(profile_name, role_name)
template = self.response_template(
REMOVE_ROLE_FROM_INSTANCE_PROFILE_TEMPLATE)
return template.render()
def list_roles(self):
roles = iam_backend.get_roles()
template = self.response_template(LIST_ROLES_TEMPLATE)
return template.render(roles=roles)
def list_instance_profiles(self):
profiles = iam_backend.get_instance_profiles()
template = self.response_template(LIST_INSTANCE_PROFILES_TEMPLATE)
return template.render(instance_profiles=profiles)
def list_instance_profiles_for_role(self):
role_name = self._get_param('RoleName')
profiles = iam_backend.get_instance_profiles_for_role(
role_name=role_name)
template = self.response_template(
LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE)
return template.render(instance_profiles=profiles)
def upload_server_certificate(self):
cert_name = self._get_param('ServerCertificateName')
cert_body = self._get_param('CertificateBody')
path = self._get_param('Path')
private_key = self._get_param('PrivateKey')
cert_chain = self._get_param('CertificateName')
cert = iam_backend.upload_server_cert(
cert_name, cert_body, private_key, cert_chain=cert_chain, path=path)
template = self.response_template(UPLOAD_CERT_TEMPLATE)
return template.render(certificate=cert)
def list_server_certificates(self, marker=None):
certs = iam_backend.get_all_server_certs(marker=marker)
template = self.response_template(LIST_SERVER_CERTIFICATES_TEMPLATE)
return template.render(server_certificates=certs)
def get_server_certificate(self):
cert_name = self._get_param('ServerCertificateName')
cert = iam_backend.get_server_certificate(cert_name)
template = self.response_template(GET_SERVER_CERTIFICATE_TEMPLATE)
return template.render(certificate=cert)
def create_group(self):
group_name = self._get_param('GroupName')
path = self._get_param('Path')
group = iam_backend.create_group(group_name, path)
template = self.response_template(CREATE_GROUP_TEMPLATE)
return template.render(group=group)
def get_group(self):
group_name = self._get_param('GroupName')
group = iam_backend.get_group(group_name)
template = self.response_template(GET_GROUP_TEMPLATE)
return template.render(group=group)
def list_groups(self):
groups = iam_backend.list_groups()
template = self.response_template(LIST_GROUPS_TEMPLATE)
return template.render(groups=groups)
def list_groups_for_user(self):
user_name = self._get_param('UserName')
groups = iam_backend.get_groups_for_user(user_name)
template = self.response_template(LIST_GROUPS_FOR_USER_TEMPLATE)
return template.render(groups=groups)
def put_group_policy(self):
group_name = self._get_param('GroupName')
policy_name = self._get_param('PolicyName')
policy_document = self._get_param('PolicyDocument')
iam_backend.put_group_policy(group_name, policy_name, policy_document)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name="PutGroupPolicyResponse")
def list_group_policies(self):
group_name = self._get_param('GroupName')
marker = self._get_param('Marker')
max_items = self._get_param('MaxItems')
policies = iam_backend.list_group_policies(group_name,
marker=marker, max_items=max_items)
template = self.response_template(LIST_GROUP_POLICIES_TEMPLATE)
return template.render(name="ListGroupPoliciesResponse",
policies=policies,
marker=marker)
def get_group_policy(self):
group_name = self._get_param('GroupName')
policy_name = self._get_param('PolicyName')
policy_result = iam_backend.get_group_policy(group_name, policy_name)
template = self.response_template(GET_GROUP_POLICY_TEMPLATE)
return template.render(name="GetGroupPolicyResponse", **policy_result)
def create_user(self):
user_name = self._get_param('UserName')
path = self._get_param('Path')
user = iam_backend.create_user(user_name, path)
template = self.response_template(USER_TEMPLATE)
return template.render(action='Create', user=user)
def get_user(self):
user_name = self._get_param('UserName')
if user_name:
user = iam_backend.get_user(user_name)
else:
user = User(name='default_user')
# If no user is specific, IAM returns the current user
template = self.response_template(USER_TEMPLATE)
return template.render(action='Get', user=user)
def list_users(self):
path_prefix = self._get_param('PathPrefix')
marker = self._get_param('Marker')
max_items = self._get_param('MaxItems')
users = iam_backend.list_users(path_prefix, marker, max_items)
template = self.response_template(LIST_USERS_TEMPLATE)
return template.render(action='List', users=users)
def create_login_profile(self):
user_name = self._get_param('UserName')
password = self._get_param('Password')
iam_backend.create_login_profile(user_name, password)
template = self.response_template(CREATE_LOGIN_PROFILE_TEMPLATE)
return template.render(user_name=user_name)
def add_user_to_group(self):
group_name = self._get_param('GroupName')
user_name = self._get_param('UserName')
iam_backend.add_user_to_group(group_name, user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='AddUserToGroup')
def remove_user_from_group(self):
group_name = self._get_param('GroupName')
user_name = self._get_param('UserName')
iam_backend.remove_user_from_group(group_name, user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='RemoveUserFromGroup')
def get_user_policy(self):
user_name = self._get_param('UserName')
policy_name = self._get_param('PolicyName')
policy_document = iam_backend.get_user_policy(user_name, policy_name)
template = self.response_template(GET_USER_POLICY_TEMPLATE)
return template.render(
user_name=user_name,
policy_name=policy_name,
policy_document=policy_document
)
def list_user_policies(self):
user_name = self._get_param('UserName')
policies = iam_backend.list_user_policies(user_name)
template = self.response_template(LIST_USER_POLICIES_TEMPLATE)
return template.render(policies=policies)
def put_user_policy(self):
user_name = self._get_param('UserName')
policy_name = self._get_param('PolicyName')
policy_document = self._get_param('PolicyDocument')
iam_backend.put_user_policy(user_name, policy_name, policy_document)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='PutUserPolicy')
def delete_user_policy(self):
user_name = self._get_param('UserName')
policy_name = self._get_param('PolicyName')
iam_backend.delete_user_policy(user_name, policy_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeleteUserPolicy')
def create_access_key(self):
user_name = self._get_param('UserName')
key = iam_backend.create_access_key(user_name)
template = self.response_template(CREATE_ACCESS_KEY_TEMPLATE)
return template.render(key=key)
def list_access_keys(self):
user_name = self._get_param('UserName')
keys = iam_backend.get_all_access_keys(user_name)
template = self.response_template(LIST_ACCESS_KEYS_TEMPLATE)
return template.render(user_name=user_name, keys=keys)
def delete_access_key(self):
user_name = self._get_param('UserName')
access_key_id = self._get_param('AccessKeyId')
iam_backend.delete_access_key(access_key_id, user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeleteAccessKey')
def deactivate_mfa_device(self):
user_name = self._get_param('UserName')
serial_number = self._get_param('SerialNumber')
iam_backend.deactivate_mfa_device(user_name, serial_number)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeactivateMFADevice')
def enable_mfa_device(self):
user_name = self._get_param('UserName')
serial_number = self._get_param('SerialNumber')
authentication_code_1 = self._get_param('AuthenticationCode1')
authentication_code_2 = self._get_param('AuthenticationCode2')
iam_backend.enable_mfa_device(
user_name,
serial_number,
authentication_code_1,
authentication_code_2
)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='EnableMFADevice')
def list_mfa_devices(self):
user_name = self._get_param('UserName')
devices = iam_backend.list_mfa_devices(user_name)
template = self.response_template(LIST_MFA_DEVICES_TEMPLATE)
return template.render(user_name=user_name, devices=devices)
def delete_user(self):
user_name = self._get_param('UserName')
iam_backend.delete_user(user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeleteUser')
def delete_login_profile(self):
user_name = self._get_param('UserName')
iam_backend.delete_login_profile(user_name)
template = self.response_template(GENERIC_EMPTY_TEMPLATE)
return template.render(name='DeleteLoginProfile')
def generate_credential_report(self):
if iam_backend.report_generated():
template = self.response_template(CREDENTIAL_REPORT_GENERATED)
else:
template = self.response_template(CREDENTIAL_REPORT_GENERATING)
iam_backend.generate_report()
return template.render()
def get_credential_report(self):
report = iam_backend.get_credential_report()
template = self.response_template(CREDENTIAL_REPORT)
return template.render(report=report)
ATTACH_ROLE_POLICY_TEMPLATE = """<AttachRolePolicyResponse>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</AttachRolePolicyResponse>"""
CREATE_POLICY_TEMPLATE = """<CreatePolicyResponse>
<CreatePolicyResult>
<Policy>
<Arn>{{ policy.arn }}</Arn>
<AttachmentCount>{{ policy.attachment_count }}</AttachmentCount>
<CreateDate>{{ policy.create_datetime.isoformat() }}</CreateDate>
<DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId>
<Path>{{ policy.path }}</Path>
<PolicyId>{{ policy.id }}</PolicyId>
<PolicyName>{{ policy.name }}</PolicyName>
<UpdateDate>{{ policy.update_datetime.isoformat() }}</UpdateDate>
</Policy>
</CreatePolicyResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</CreatePolicyResponse>"""
LIST_ATTACHED_ROLE_POLICIES_TEMPLATE = """<ListAttachedRolePoliciesResponse>
<ListAttachedRolePoliciesResult>
{% if marker is none %}
<IsTruncated>false</IsTruncated>
{% else %}
<IsTruncated>true</IsTruncated>
<Marker>{{ marker }}</Marker>
{% endif %}
<AttachedPolicies>
{% for policy in policies %}
<member>
<PolicyName>{{ policy.name }}</PolicyName>
<PolicyArn>{{ policy.arn }}</PolicyArn>
</member>
{% endfor %}
</AttachedPolicies>
</ListAttachedRolePoliciesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListAttachedRolePoliciesResponse>"""
LIST_POLICIES_TEMPLATE = """<ListPoliciesResponse>
<ListPoliciesResult>
{% if marker is none %}
<IsTruncated>false</IsTruncated>
{% else %}
<IsTruncated>true</IsTruncated>
<Marker>{{ marker }}</Marker>
{% endif %}
<Policies>
{% for policy in policies %}
<member>
<Arn>{{ policy.arn }}</Arn>
<AttachmentCount>{{ policy.attachment_count }}</AttachmentCount>
<CreateDate>{{ policy.create_datetime.isoformat() }}</CreateDate>
<DefaultVersionId>{{ policy.default_version_id }}</DefaultVersionId>
<Path>{{ policy.path }}</Path>
<PolicyId>{{ policy.id }}</PolicyId>
<PolicyName>{{ policy.name }}</PolicyName>
<UpdateDate>{{ policy.update_datetime.isoformat() }}</UpdateDate>
</member>
{% endfor %}
</Policies>
</ListPoliciesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListPoliciesResponse>"""
GENERIC_EMPTY_TEMPLATE = """<{{ name }}Response>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</{{ name }}Response>"""
CREATE_INSTANCE_PROFILE_TEMPLATE = """<CreateInstanceProfileResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<CreateInstanceProfileResult>
<InstanceProfile>
<InstanceProfileId>{{ profile.id }}</InstanceProfileId>
<Roles/>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:11:10.222Z</CreateDate>
</InstanceProfile>
</CreateInstanceProfileResult>
<ResponseMetadata>
<RequestId>974142ee-99f1-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</CreateInstanceProfileResponse>"""
GET_INSTANCE_PROFILE_TEMPLATE = """<GetInstanceProfileResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetInstanceProfileResult>
<InstanceProfile>
<InstanceProfileId>{{ profile.id }}</InstanceProfileId>
<Roles>
{% for role in profile.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:11:10Z</CreateDate>
</InstanceProfile>
</GetInstanceProfileResult>
<ResponseMetadata>
<RequestId>37289fda-99f2-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</GetInstanceProfileResponse>"""
CREATE_ROLE_TEMPLATE = """<CreateRoleResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<CreateRoleResult>
<Role>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-08T23:34:01.495Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</Role>
</CreateRoleResult>
<ResponseMetadata>
<RequestId>4a93ceee-9966-11e1-b624-b1aEXAMPLE7c</RequestId>
</ResponseMetadata>
</CreateRoleResponse>"""
GET_ROLE_POLICY_TEMPLATE = """<GetRolePolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetRolePolicyResult>
<PolicyName>{{ policy_name }}</PolicyName>
<RoleName>{{ role_name }}</RoleName>
<PolicyDocument>{{ policy_document }}</PolicyDocument>
</GetRolePolicyResult>
<ResponseMetadata>
<RequestId>7e7cd8bc-99ef-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</GetRolePolicyResponse>"""
GET_ROLE_TEMPLATE = """<GetRoleResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetRoleResult>
<Role>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-08T23:34:01Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</Role>
</GetRoleResult>
<ResponseMetadata>
<RequestId>df37e965-9967-11e1-a4c3-270EXAMPLE04</RequestId>
</ResponseMetadata>
</GetRoleResponse>"""
ADD_ROLE_TO_INSTANCE_PROFILE_TEMPLATE = """<AddRoleToInstanceProfileResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>12657608-99f2-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</AddRoleToInstanceProfileResponse>"""
REMOVE_ROLE_FROM_INSTANCE_PROFILE_TEMPLATE = """<RemoveRoleFromInstanceProfileResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ResponseMetadata>
<RequestId>12657608-99f2-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</RemoveRoleFromInstanceProfileResponse>"""
LIST_ROLES_TEMPLATE = """<ListRolesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListRolesResult>
<IsTruncated>false</IsTruncated>
<Roles>
{% for role in roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
</ListRolesResult>
<ResponseMetadata>
<RequestId>20f7279f-99ee-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListRolesResponse>"""
LIST_ROLE_POLICIES = """<ListRolePoliciesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListRolePoliciesResult>
<PolicyNames>
{% for policy_name in role_policies %}
<member>{{ policy_name }}</member>
{% endfor %}
</PolicyNames>
<IsTruncated>false</IsTruncated>
</ListRolePoliciesResult>
<ResponseMetadata>
<RequestId>8c7e1816-99f0-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListRolePoliciesResponse>"""
LIST_INSTANCE_PROFILES_TEMPLATE = """<ListInstanceProfilesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<ListInstanceProfilesResult>
<IsTruncated>false</IsTruncated>
<InstanceProfiles>
{% for instance in instance_profiles %}
<member>
<Id>{{ instance.id }}</Id>
<Roles>
{% for role in instance.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_role_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ instance.name }}</InstanceProfileName>
<Path>{{ instance.path }}</Path>
<Arn>{{ instance.arn }}</Arn>
<CreateDate>2012-05-09T16:27:03Z</CreateDate>
</member>
{% endfor %}
</InstanceProfiles>
</ListInstanceProfilesResult>
<ResponseMetadata>
<RequestId>fd74fa8d-99f3-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListInstanceProfilesResponse>"""
UPLOAD_CERT_TEMPLATE = """<UploadServerCertificateResponse>
<UploadServerCertificateResult>
<ServerCertificateMetadata>
<ServerCertificateName>{{ certificate.cert_name }}</ServerCertificateName>
{% if certificate.path %}
<Path>{{ certificate.path }}</Path>
{% endif %}
<Arn>{{ certificate.arn }}</Arn>
<UploadDate>2010-05-08T01:02:03.004Z</UploadDate>
<ServerCertificateId>ASCACKCEVSQ6C2EXAMPLE</ServerCertificateId>
<Expiration>2012-05-08T01:02:03.004Z</Expiration>
</ServerCertificateMetadata>
</UploadServerCertificateResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</UploadServerCertificateResponse>"""
LIST_SERVER_CERTIFICATES_TEMPLATE = """<ListServerCertificatesResponse>
<ListServerCertificatesResult>
<IsTruncated>false</IsTruncated>
<ServerCertificateMetadataList>
{% for certificate in server_certificates %}
<member>
<ServerCertificateName>{{ certificate.cert_name }}</ServerCertificateName>
{% if certificate.path %}
<Path>{{ certificate.path }}</Path>
{% endif %}
<Arn>{{ certificate.arn }}</Arn>
<UploadDate>2010-05-08T01:02:03.004Z</UploadDate>
<ServerCertificateId>ASCACKCEVSQ6C2EXAMPLE</ServerCertificateId>
<Expiration>2012-05-08T01:02:03.004Z</Expiration>
</member>
{% endfor %}
</ServerCertificateMetadataList>
</ListServerCertificatesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListServerCertificatesResponse>"""
GET_SERVER_CERTIFICATE_TEMPLATE = """<GetServerCertificateResponse>
<GetServerCertificateResult>
<ServerCertificate>
<ServerCertificateMetadata>
<ServerCertificateName>{{ certificate.cert_name }}</ServerCertificateName>
{% if certificate.path %}
<Path>{{ certificate.path }}</Path>
{% endif %}
<Arn>{{ certificate.arn }}</Arn>
<UploadDate>2010-05-08T01:02:03.004Z</UploadDate>
<ServerCertificateId>ASCACKCEVSQ6C2EXAMPLE</ServerCertificateId>
<Expiration>2012-05-08T01:02:03.004Z</Expiration>
</ServerCertificateMetadata>
<CertificateBody>{{ certificate.cert_body }}</CertificateBody>
</ServerCertificate>
</GetServerCertificateResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</GetServerCertificateResponse>"""
CREATE_GROUP_TEMPLATE = """<CreateGroupResponse>
<CreateGroupResult>
<Group>
<Path>{{ group.path }}</Path>
<GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn>
</Group>
</CreateGroupResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</CreateGroupResponse>"""
GET_GROUP_TEMPLATE = """<GetGroupResponse>
<GetGroupResult>
<Group>
<Path>{{ group.path }}</Path>
<GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn>
</Group>
<Users>
{% for user in group.users %}
<member>
<Path>{{ user.path }}</Path>
<UserName>{{ user.name }}</UserName>
<UserId>{{ user.id }}</UserId>
<Arn>{{ user.arn }}</Arn>
</member>
{% endfor %}
</Users>
<IsTruncated>false</IsTruncated>
</GetGroupResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</GetGroupResponse>"""
LIST_GROUPS_TEMPLATE = """<ListGroupsResponse>
<ListGroupsResult>
<Groups>
{% for group in groups %}
<member>
<Path>{{ group.path }}</Path>
<GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn>
</member>
{% endfor %}
</Groups>
<IsTruncated>false</IsTruncated>
</ListGroupsResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListGroupsResponse>"""
LIST_GROUPS_FOR_USER_TEMPLATE = """<ListGroupsForUserResponse>
<ListGroupsForUserResult>
<Groups>
{% for group in groups %}
<member>
<Path>{{ group.path }}</Path>
<GroupName>{{ group.name }}</GroupName>
<GroupId>{{ group.id }}</GroupId>
<Arn>{{ group.arn }}</Arn>
</member>
{% endfor %}
</Groups>
<IsTruncated>false</IsTruncated>
</ListGroupsForUserResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListGroupsForUserResponse>"""
LIST_GROUP_POLICIES_TEMPLATE = """<ListGroupPoliciesResponse>
<ListGroupPoliciesResult>
{% if marker is none %}
<IsTruncated>false</IsTruncated>
{% else %}
<IsTruncated>true</IsTruncated>
<Marker>{{ marker }}</Marker>
{% endif %}
<PolicyNames>
{% for policy in policies %}
<member>{{ policy }}</member>
{% endfor %}
</PolicyNames>
</ListGroupPoliciesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListGroupPoliciesResponse>"""
GET_GROUP_POLICY_TEMPLATE = """<GetGroupPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
<GetGroupPolicyResult>
<PolicyName>{{ policy_name }}</PolicyName>
<GroupName>{{ group_name }}</GroupName>
<PolicyDocument>{{ policy_document }}</PolicyDocument>
</GetGroupPolicyResult>
<ResponseMetadata>
<RequestId>7e7cd8bc-99ef-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</GetGroupPolicyResponse>"""
USER_TEMPLATE = """<{{ action }}UserResponse>
<{{ action }}UserResult>
<User>
<Path>{{ user.path }}</Path>
<UserName>{{ user.name }}</UserName>
<UserId>{{ user.id }}</UserId>
<Arn>{{ user.arn }}</Arn>
</User>
</{{ action }}UserResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</{{ action }}UserResponse>"""
LIST_USERS_TEMPLATE = """<{{ action }}UsersResponse>
<{{ action }}UsersResult>
<Users>
{% for user in users %}
<member>
<UserId>{{ user.id }}</UserId>
<Path>{{ user.path }}</Path>
<UserName>{{ user.name }}</UserName>
<CreateDate>{{ user.created_iso_8601 }}</CreateDate>
<Arn>{{ user.arn }}</Arn>
</member>
{% endfor %}
</Users>
</{{ action }}UsersResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</{{ action }}UsersResponse>"""
CREATE_LOGIN_PROFILE_TEMPLATE = """
<CreateLoginProfileResponse>
<CreateLoginProfileResult>
<LoginProfile>
<UserName>{{ user_name }}</UserName>
<CreateDate>2011-09-19T23:00:56Z</CreateDate>
</LoginProfile>
</CreateLoginProfileResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</CreateLoginProfileResponse>
"""
GET_USER_POLICY_TEMPLATE = """<GetUserPolicyResponse>
<GetUserPolicyResult>
<UserName>{{ user_name }}</UserName>
<PolicyName>{{ policy_name }}</PolicyName>
<PolicyDocument>
{{ policy_document }}
</PolicyDocument>
</GetUserPolicyResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</GetUserPolicyResponse>"""
LIST_USER_POLICIES_TEMPLATE = """<ListUserPoliciesResponse>
<ListUserPoliciesResult>
<PolicyNames>
{% for policy in policies %}
<member>{{ policy }}</member>
{% endfor %}
</PolicyNames>
</ListUserPoliciesResult>
<IsTruncated>false</IsTruncated>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListUserPoliciesResponse>"""
CREATE_ACCESS_KEY_TEMPLATE = """<CreateAccessKeyResponse>
<CreateAccessKeyResult>
<AccessKey>
<UserName>{{ key.user_name }}</UserName>
<AccessKeyId>{{ key.access_key_id }}</AccessKeyId>
<Status>{{ key.status }}</Status>
<SecretAccessKey>
{{ key.secret_access_key }}
</SecretAccessKey>
</AccessKey>
</CreateAccessKeyResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</CreateAccessKeyResponse>"""
LIST_ACCESS_KEYS_TEMPLATE = """<ListAccessKeysResponse>
<ListAccessKeysResult>
<UserName>{{ user_name }}</UserName>
<AccessKeyMetadata>
{% for key in keys %}
<member>
<UserName>{{ user_name }}</UserName>
<AccessKeyId>{{ key.access_key_id }}</AccessKeyId>
<Status>{{ key.status }}</Status>
</member>
{% endfor %}
</AccessKeyMetadata>
<IsTruncated>false</IsTruncated>
</ListAccessKeysResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListAccessKeysResponse>"""
CREDENTIAL_REPORT_GENERATING = """
<GenerateCredentialReportResponse>
<GenerateCredentialReportResult>
<state>STARTED</state>
<description>No report exists. Starting a new report generation task</description>
</GenerateCredentialReportResult>
<ResponseMetadata>
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
</ResponseMetadata>
</GenerateCredentialReportResponse>"""
CREDENTIAL_REPORT_GENERATED = """<GenerateCredentialReportResponse>
<GenerateCredentialReportResult>
<state>COMPLETE</state>
</GenerateCredentialReportResult>
<ResponseMetadata>
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
</ResponseMetadata>
</GenerateCredentialReportResponse>"""
CREDENTIAL_REPORT = """<GetCredentialReportResponse>
<GetCredentialReportResult>
<content>{{ report }}</content>
<GeneratedTime>2015-02-02T20:02:02Z</GeneratedTime>
<ReportFormat>text/csv</ReportFormat>
</GetCredentialReportResult>
<ResponseMetadata>
<RequestId>fa788a82-aa8a-11e4-a278-1786c418872b"</RequestId>
</ResponseMetadata>
</GetCredentialReportResponse>"""
LIST_INSTANCE_PROFILES_FOR_ROLE_TEMPLATE = """<ListInstanceProfilesForRoleResponse>
<ListInstanceProfilesForRoleResult>
<IsTruncated>false</IsTruncated>
<InstanceProfiles>
{% for profile in instance_profiles %}
<member>
<Id>{{ profile.id }}</Id>
<Roles>
{% for role in profile.roles %}
<member>
<Path>{{ role.path }}</Path>
<Arn>{{ role.arn }}</Arn>
<RoleName>{{ role.name }}</RoleName>
<AssumeRolePolicyDocument>{{ role.assume_policy_document }}</AssumeRolePolicyDocument>
<CreateDate>2012-05-09T15:45:35Z</CreateDate>
<RoleId>{{ role.id }}</RoleId>
</member>
{% endfor %}
</Roles>
<InstanceProfileName>{{ profile.name }}</InstanceProfileName>
<Path>{{ profile.path }}</Path>
<Arn>{{ profile.arn }}</Arn>
<CreateDate>2012-05-09T16:27:11Z</CreateDate>
</member>
{% endfor %}
</InstanceProfiles>
</ListInstanceProfilesForRoleResult>
<ResponseMetadata>
<RequestId>6a8c3992-99f4-11e1-a4c3-27EXAMPLE804</RequestId>
</ResponseMetadata>
</ListInstanceProfilesForRoleResponse>"""
LIST_MFA_DEVICES_TEMPLATE = """<ListMFADevicesResponse>
<ListMFADevicesResult>
<MFADevices>
{% for device in devices %}
<member>
<UserName>{{ user_name }}</UserName>
<SerialNumber>{{ device.serial_number }}</SerialNumber>
</member>
{% endfor %}
</MFADevices>
<IsTruncated>false</IsTruncated>
</ListMFADevicesResult>
<ResponseMetadata>
<RequestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</RequestId>
</ResponseMetadata>
</ListMFADevicesResponse>"""
| {
"content_hash": "e1edfcd6130ea976a42640a54c3d44f8",
"timestamp": "",
"source": "github",
"line_count": 991,
"max_line_length": 137,
"avg_line_length": 37.564076690211905,
"alnum_prop": 0.6559125342502552,
"repo_name": "heddle317/moto",
"id": "8e19b3aa7600c927829e001d206d9d1d7cef52b5",
"size": "37226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "moto/iam/responses.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "630"
},
{
"name": "Python",
"bytes": "2633276"
},
{
"name": "Ruby",
"bytes": "188"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import osf.models.base
class Migration(migrations.Migration):
dependencies = [
('osf', '0116_merge_20180703_2258'),
]
operations = [
migrations.CreateModel(
name='PreprintRequest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
('machine_state', models.CharField(choices=[('initial', 'Initial'), ('pending', 'Pending'), ('accepted', 'Accepted'), ('rejected', 'Rejected')], db_index=True, default='initial', max_length=15)),
('date_last_transitioned', models.DateTimeField(blank=True, db_index=True, null=True)),
('request_type', models.CharField(choices=[('access', 'Access'), ('withdrawal', 'Withdrawal')], max_length=31)),
('comment', models.TextField(blank=True, null=True)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='submitted_preprintrequest', to=settings.AUTH_USER_MODEL)),
('target', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='requests', to='osf.PreprintService')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='PreprintRequestAction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
('trigger', models.CharField(choices=[('submit', 'Submit'), ('accept', 'Accept'), ('reject', 'Reject'), ('edit_comment', 'Edit_Comment')], max_length=31)),
('from_state', models.CharField(choices=[('initial', 'Initial'), ('pending', 'Pending'), ('accepted', 'Accepted'), ('rejected', 'Rejected')], max_length=31)),
('to_state', models.CharField(choices=[('initial', 'Initial'), ('pending', 'Pending'), ('accepted', 'Accepted'), ('rejected', 'Rejected')], max_length=31)),
('comment', models.TextField(blank=True)),
('is_deleted', models.BooleanField(default=False)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)),
('target', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='actions', to='osf.PreprintRequest')),
],
options={
'abstract': False,
},
),
migrations.AlterField(
model_name='noderequest',
name='creator',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='submitted_noderequest', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='noderequest',
name='request_type',
field=models.CharField(choices=[('access', 'Access'), ('withdrawal', 'Withdrawal')], max_length=31),
),
]
| {
"content_hash": "1b714dedb8cacb50e18cd06350f96152",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 211,
"avg_line_length": 61.65625,
"alnum_prop": 0.6137861125190066,
"repo_name": "saradbowman/osf.io",
"id": "56c9d98231f36f21cc5bfe24df1aeeae23004557",
"size": "4020",
"binary": false,
"copies": "13",
"ref": "refs/heads/develop",
"path": "osf/migrations/0117_auto_20180625_0810.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "70340"
},
{
"name": "JavaScript",
"bytes": "2566970"
},
{
"name": "Python",
"bytes": "2154059"
}
],
"symlink_target": ""
} |
"""Test matrix ops via einsum"""
from iree.tf.support import tf_test_utils
from iree.tf.support import tf_utils
import tensorflow.compat.v2 as tf
VECTOR_DIM = 16
class EinsumVectorModule(tf.Module):
@tf.function(input_signature=[
tf.TensorSpec([VECTOR_DIM], tf.float32),
])
def einsum_identity(self, x):
return tf.einsum('i', x)
@tf.function(input_signature=[
tf.TensorSpec([VECTOR_DIM], tf.float32),
])
def einsum_sum(self, x):
return tf.einsum('i ->', x)
@tf.function(input_signature=[
tf.TensorSpec([VECTOR_DIM], tf.float32),
tf.TensorSpec([VECTOR_DIM], tf.float32),
])
def einsum_mul(self, lhs, rhs):
return tf.einsum('i, i -> i', lhs, rhs)
@tf.function(input_signature=[
tf.TensorSpec([VECTOR_DIM], tf.float32),
tf.TensorSpec([VECTOR_DIM], tf.float32),
])
def einsum_implicit_inner_product(self, lhs, rhs):
return tf.einsum('i, i', lhs, rhs)
@tf.function(input_signature=[
tf.TensorSpec([VECTOR_DIM], tf.float32),
tf.TensorSpec([VECTOR_DIM], tf.float32),
])
def einsum_explicit_inner_product(self, lhs, rhs):
return tf.einsum('i, i ->', lhs, rhs)
@tf.function(input_signature=[
tf.TensorSpec([VECTOR_DIM], tf.float32),
tf.TensorSpec([VECTOR_DIM], tf.float32),
])
def einsum_outer_product(self, lhs, rhs):
return tf.einsum('i, j -> ij', lhs, rhs)
class EinsumVectorTest(tf_test_utils.TracedModuleTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._modules = tf_test_utils.compile_tf_module(EinsumVectorModule)
# yapf: disable
def test_einsum_identity(self):
def einsum_identity(module):
module.einsum_identity(tf_utils.ndarange([VECTOR_DIM]))
self.compare_backends(einsum_identity, self._modules)
def test_einsum_sum(self):
def einsum_sum(module):
module.einsum_sum(tf_utils.ndarange([VECTOR_DIM]))
self.compare_backends(einsum_sum, self._modules)
def test_einsum_mul(self):
def einsum_mul(module):
module.einsum_mul(tf_utils.ndarange([VECTOR_DIM]),
tf_utils.ndarange([VECTOR_DIM]))
self.compare_backends(einsum_mul, self._modules)
def test_einsum_implicit_inner_product(self):
def einsum_implicit_inner_product(module):
module.einsum_implicit_inner_product(tf_utils.ndarange([VECTOR_DIM]),
tf_utils.ndarange([VECTOR_DIM]))
self.compare_backends(einsum_implicit_inner_product, self._modules)
def test_einsum_explicit_inner_product(self):
def einsum_explicit_inner_product(module):
module.einsum_explicit_inner_product(tf_utils.ndarange([VECTOR_DIM]),
tf_utils.ndarange([VECTOR_DIM]))
self.compare_backends(einsum_explicit_inner_product, self._modules)
def test_einsum_outer_product(self):
def einsum_outer_product(module):
module.einsum_outer_product(tf_utils.ndarange([VECTOR_DIM]),
tf_utils.ndarange([VECTOR_DIM]))
self.compare_backends(einsum_outer_product, self._modules)
# yapf: enable
if __name__ == "__main__":
if hasattr(tf, "enable_v2_behavior"):
tf.enable_v2_behavior()
tf.test.main()
| {
"content_hash": "2850917d35733d582dea7aa88fd0953e",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 75,
"avg_line_length": 32.707070707070706,
"alnum_prop": 0.6516368128474367,
"repo_name": "iree-org/iree",
"id": "228f26bda0d10b07117edfc9cc41665490ddf965",
"size": "3455",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "integrations/tensorflow/test/python/iree_tf_tests/uncategorized/einsum_vector_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "23010"
},
{
"name": "Batchfile",
"bytes": "353"
},
{
"name": "C",
"bytes": "3830546"
},
{
"name": "C++",
"bytes": "8161374"
},
{
"name": "CMake",
"bytes": "899403"
},
{
"name": "Dockerfile",
"bytes": "28245"
},
{
"name": "GLSL",
"bytes": "2629"
},
{
"name": "HTML",
"bytes": "31018"
},
{
"name": "Java",
"bytes": "31697"
},
{
"name": "JavaScript",
"bytes": "18714"
},
{
"name": "MLIR",
"bytes": "5606822"
},
{
"name": "NASL",
"bytes": "3852"
},
{
"name": "PowerShell",
"bytes": "7893"
},
{
"name": "Python",
"bytes": "1143963"
},
{
"name": "Shell",
"bytes": "248374"
},
{
"name": "Starlark",
"bytes": "600260"
}
],
"symlink_target": ""
} |
"""
oauthlib.oauth2.rfc6749.grant_types
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import logging
from oauthlib import common
from .. import errors
from .base import GrantTypeBase
log = logging.getLogger(__name__)
class ImplicitGrant(GrantTypeBase):
"""`Implicit Grant`_
The implicit grant type is used to obtain access tokens (it does not
support the issuance of refresh tokens) and is optimized for public
clients known to operate a particular redirection URI. These clients
are typically implemented in a browser using a scripting language
such as JavaScript.
Unlike the authorization code grant type, in which the client makes
separate requests for authorization and for an access token, the
client receives the access token as the result of the authorization
request.
The implicit grant type does not include client authentication, and
relies on the presence of the resource owner and the registration of
the redirection URI. Because the access token is encoded into the
redirection URI, it may be exposed to the resource owner and other
applications residing on the same device::
+----------+
| Resource |
| Owner |
| |
+----------+
^
|
(B)
+----|-----+ Client Identifier +---------------+
| -+----(A)-- & Redirection URI --->| |
| User- | | Authorization |
| Agent -|----(B)-- User authenticates -->| Server |
| | | |
| |<---(C)--- Redirection URI ----<| |
| | with Access Token +---------------+
| | in Fragment
| | +---------------+
| |----(D)--- Redirection URI ---->| Web-Hosted |
| | without Fragment | Client |
| | | Resource |
| (F) |<---(E)------- Script ---------<| |
| | +---------------+
+-|--------+
| |
(A) (G) Access Token
| |
^ v
+---------+
| |
| Client |
| |
+---------+
Note: The lines illustrating steps (A) and (B) are broken into two
parts as they pass through the user-agent.
Figure 4: Implicit Grant Flow
The flow illustrated in Figure 4 includes the following steps:
(A) The client initiates the flow by directing the resource owner's
user-agent to the authorization endpoint. The client includes
its client identifier, requested scope, local state, and a
redirection URI to which the authorization server will send the
user-agent back once access is granted (or denied).
(B) The authorization server authenticates the resource owner (via
the user-agent) and establishes whether the resource owner
grants or denies the client's access request.
(C) Assuming the resource owner grants access, the authorization
server redirects the user-agent back to the client using the
redirection URI provided earlier. The redirection URI includes
the access token in the URI fragment.
(D) The user-agent follows the redirection instructions by making a
request to the web-hosted client resource (which does not
include the fragment per [RFC2616]). The user-agent retains the
fragment information locally.
(E) The web-hosted client resource returns a web page (typically an
HTML document with an embedded script) capable of accessing the
full redirection URI including the fragment retained by the
user-agent, and extracting the access token (and other
parameters) contained in the fragment.
(F) The user-agent executes the script provided by the web-hosted
client resource locally, which extracts the access token.
(G) The user-agent passes the access token to the client.
See `Section 10.3`_ and `Section 10.16`_ for important security considerations
when using the implicit grant.
.. _`Implicit Grant`: https://tools.ietf.org/html/rfc6749#section-4.2
.. _`Section 10.3`: https://tools.ietf.org/html/rfc6749#section-10.3
.. _`Section 10.16`: https://tools.ietf.org/html/rfc6749#section-10.16
"""
response_types = ['token']
grant_allows_refresh_token = False
def create_authorization_response(self, request, token_handler):
"""Create an authorization response.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:param token_handler: A token handler instance, for example of type
oauthlib.oauth2.BearerToken.
The client constructs the request URI by adding the following
parameters to the query component of the authorization endpoint URI
using the "application/x-www-form-urlencoded" format, per `Appendix B`_:
response_type
REQUIRED. Value MUST be set to "token" for standard OAuth2 implicit flow
or "id_token token" or just "id_token" for OIDC implicit flow
client_id
REQUIRED. The client identifier as described in `Section 2.2`_.
redirect_uri
OPTIONAL. As described in `Section 3.1.2`_.
scope
OPTIONAL. The scope of the access request as described by
`Section 3.3`_.
state
RECOMMENDED. An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent back
to the client. The parameter SHOULD be used for preventing
cross-site request forgery as described in `Section 10.12`_.
The authorization server validates the request to ensure that all
required parameters are present and valid. The authorization server
MUST verify that the redirection URI to which it will redirect the
access token matches a redirection URI registered by the client as
described in `Section 3.1.2`_.
.. _`Section 2.2`: https://tools.ietf.org/html/rfc6749#section-2.2
.. _`Section 3.1.2`: https://tools.ietf.org/html/rfc6749#section-3.1.2
.. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3
.. _`Section 10.12`: https://tools.ietf.org/html/rfc6749#section-10.12
.. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B
"""
return self.create_token_response(request, token_handler)
def create_token_response(self, request, token_handler):
"""Return token or error embedded in the URI fragment.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:param token_handler: A token handler instance, for example of type
oauthlib.oauth2.BearerToken.
If the resource owner grants the access request, the authorization
server issues an access token and delivers it to the client by adding
the following parameters to the fragment component of the redirection
URI using the "application/x-www-form-urlencoded" format, per
`Appendix B`_:
access_token
REQUIRED. The access token issued by the authorization server.
token_type
REQUIRED. The type of the token issued as described in
`Section 7.1`_. Value is case insensitive.
expires_in
RECOMMENDED. The lifetime in seconds of the access token. For
example, the value "3600" denotes that the access token will
expire in one hour from the time the response was generated.
If omitted, the authorization server SHOULD provide the
expiration time via other means or document the default value.
scope
OPTIONAL, if identical to the scope requested by the client;
otherwise, REQUIRED. The scope of the access token as
described by `Section 3.3`_.
state
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
The authorization server MUST NOT issue a refresh token.
.. _`Appendix B`: https://tools.ietf.org/html/rfc6749#appendix-B
.. _`Section 3.3`: https://tools.ietf.org/html/rfc6749#section-3.3
.. _`Section 7.1`: https://tools.ietf.org/html/rfc6749#section-7.1
"""
try:
self.validate_token_request(request)
# If the request fails due to a missing, invalid, or mismatching
# redirection URI, or if the client identifier is missing or invalid,
# the authorization server SHOULD inform the resource owner of the
# error and MUST NOT automatically redirect the user-agent to the
# invalid redirection URI.
except errors.FatalClientError as e:
log.debug('Fatal client error during validation of %r. %r.',
request, e)
raise
# If the resource owner denies the access request or if the request
# fails for reasons other than a missing or invalid redirection URI,
# the authorization server informs the client by adding the following
# parameters to the fragment component of the redirection URI using the
# "application/x-www-form-urlencoded" format, per Appendix B:
# https://tools.ietf.org/html/rfc6749#appendix-B
except errors.OAuth2Error as e:
log.debug('Client error during validation of %r. %r.', request, e)
return {'Location': common.add_params_to_uri(request.redirect_uri, e.twotuples,
fragment=True)}, None, 302
# In OIDC implicit flow it is possible to have a request_type that does not include the access_token!
# "id_token token" - return the access token and the id token
# "id_token" - don't return the access token
if "token" in request.response_type.split():
token = token_handler.create_token(request, refresh_token=False)
else:
token = {}
if request.state is not None:
token['state'] = request.state
for modifier in self._token_modifiers:
token = modifier(token, token_handler, request)
# In OIDC implicit flow it is possible to have a request_type that does
# not include the access_token! In this case there is no need to save a token.
if "token" in request.response_type.split():
self.request_validator.save_token(token, request)
return self.prepare_authorization_response(
request, token, {}, None, 302)
def validate_authorization_request(self, request):
"""
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
return self.validate_token_request(request)
def validate_token_request(self, request):
"""Check the token request for normal and fatal errors.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
This method is very similar to validate_authorization_request in
the AuthorizationCodeGrant but differ in a few subtle areas.
A normal error could be a missing response_type parameter or the client
attempting to access scope it is not allowed to ask authorization for.
Normal errors can safely be included in the redirection URI and
sent back to the client.
Fatal errors occur when the client_id or redirect_uri is invalid or
missing. These must be caught by the provider and handled, how this
is done is outside of the scope of OAuthLib but showing an error
page describing the issue is a good idea.
"""
# First check for fatal errors
# If the request fails due to a missing, invalid, or mismatching
# redirection URI, or if the client identifier is missing or invalid,
# the authorization server SHOULD inform the resource owner of the
# error and MUST NOT automatically redirect the user-agent to the
# invalid redirection URI.
# First check duplicate parameters
for param in ('client_id', 'response_type', 'redirect_uri', 'scope', 'state'):
try:
duplicate_params = request.duplicate_params
except ValueError:
raise errors.InvalidRequestFatalError(description='Unable to parse query string', request=request)
if param in duplicate_params:
raise errors.InvalidRequestFatalError(description='Duplicate %s parameter.' % param, request=request)
# REQUIRED. The client identifier as described in Section 2.2.
# https://tools.ietf.org/html/rfc6749#section-2.2
if not request.client_id:
raise errors.MissingClientIdError(request=request)
if not self.request_validator.validate_client_id(request.client_id, request):
raise errors.InvalidClientIdError(request=request)
# OPTIONAL. As described in Section 3.1.2.
# https://tools.ietf.org/html/rfc6749#section-3.1.2
self._handle_redirects(request)
# Then check for normal errors.
request_info = self._run_custom_validators(request,
self.custom_validators.all_pre)
# If the resource owner denies the access request or if the request
# fails for reasons other than a missing or invalid redirection URI,
# the authorization server informs the client by adding the following
# parameters to the fragment component of the redirection URI using the
# "application/x-www-form-urlencoded" format, per Appendix B.
# https://tools.ietf.org/html/rfc6749#appendix-B
# Note that the correct parameters to be added are automatically
# populated through the use of specific exceptions
# REQUIRED.
if request.response_type is None:
raise errors.MissingResponseTypeError(request=request)
# Value MUST be one of our registered types: "token" by default or if using OIDC "id_token" or "id_token token"
elif not set(request.response_type.split()).issubset(self.response_types):
raise errors.UnsupportedResponseTypeError(request=request)
log.debug('Validating use of response_type token for client %r (%r).',
request.client_id, request.client)
if not self.request_validator.validate_response_type(request.client_id,
request.response_type,
request.client, request):
log.debug('Client %s is not authorized to use response_type %s.',
request.client_id, request.response_type)
raise errors.UnauthorizedClientError(request=request)
# OPTIONAL. The scope of the access request as described by Section 3.3
# https://tools.ietf.org/html/rfc6749#section-3.3
self.validate_scopes(request)
request_info.update({
'client_id': request.client_id,
'redirect_uri': request.redirect_uri,
'response_type': request.response_type,
'state': request.state,
'request': request,
})
request_info = self._run_custom_validators(
request,
self.custom_validators.all_post,
request_info
)
return request.scopes, request_info
def _run_custom_validators(self,
request,
validations,
request_info=None):
# Make a copy so we don't modify the existing request_info dict
request_info = {} if request_info is None else request_info.copy()
# For implicit grant, auth_validators and token_validators are
# basically equivalent since the token is returned from the
# authorization endpoint.
for validator in validations:
result = validator(request)
if result is not None:
request_info.update(result)
return request_info
| {
"content_hash": "dc585568844eded1f1fd61f77c2822a2",
"timestamp": "",
"source": "github",
"line_count": 376,
"max_line_length": 119,
"avg_line_length": 44.819148936170215,
"alnum_prop": 0.6109660574412533,
"repo_name": "idan/oauthlib",
"id": "6110b6f3379114145d7b830bdaa77e405ea9eb0d",
"size": "16852",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "oauthlib/oauth2/rfc6749/grant_types/implicit.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1617"
},
{
"name": "Python",
"bytes": "661763"
}
],
"symlink_target": ""
} |
import os
from nipype.testing import (skipif)
import nipype.workflows.fmri.fsl as fsl_wf
import nipype.interfaces.fsl as fsl
import nipype.interfaces.utility as util
from nipype.interfaces.fsl import no_fsl, no_fsl_course_data
import nipype.pipeline.engine as pe
import warnings
import tempfile
import shutil
from nipype.workflows.dmri.fsl.epi import create_eddy_correct_pipeline
@skipif(no_fsl)
@skipif(no_fsl_course_data)
def test_create_eddy_correct_pipeline():
fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA'])
dwi_file = os.path.join(fsl_course_dir, "fdt1/subj1/data.nii.gz")
trim_dwi = pe.Node(fsl.ExtractROI(t_min=0,
t_size=2), name="trim_dwi")
trim_dwi.inputs.in_file = dwi_file
nipype_eddycorrect = create_eddy_correct_pipeline("nipype_eddycorrect")
nipype_eddycorrect.inputs.inputnode.ref_num = 0
with warnings.catch_warnings():
warnings.simplefilter("ignore")
original_eddycorrect = pe.Node(interface=fsl.EddyCorrect(), name="original_eddycorrect")
original_eddycorrect.inputs.ref_num = 0
test = pe.Node(util.AssertEqual(), name="eddy_corrected_dwi_test")
pipeline = pe.Workflow(name="test_eddycorrect")
pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_eddycorrect_")
pipeline.connect([(trim_dwi, original_eddycorrect, [("roi_file", "in_file")]),
(trim_dwi, nipype_eddycorrect, [("roi_file", "inputnode.in_file")]),
(nipype_eddycorrect, test, [("outputnode.eddy_corrected", "volume1")]),
(original_eddycorrect, test, [("eddy_corrected", "volume2")]),
])
pipeline.run(plugin='Linear')
shutil.rmtree(pipeline.base_dir)
| {
"content_hash": "8f26dbd5a21088f7b1d9ca1e7ad1177d",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 96,
"avg_line_length": 37.4468085106383,
"alnum_prop": 0.6744318181818182,
"repo_name": "carolFrohlich/nipype",
"id": "f622b8304a8e9dab28db7abc92b08337e55dd1af",
"size": "1784",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nipype/workflows/dmri/fsl/tests/test_epi.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "2320"
},
{
"name": "Matlab",
"bytes": "1717"
},
{
"name": "Python",
"bytes": "5451077"
},
{
"name": "Shell",
"bytes": "3302"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
} |
from tempest.services.volume.xml import availability_zone_client
class VolumeV2AvailabilityZoneClientXML(
availability_zone_client.BaseVolumeAvailabilityZoneClientXML):
def __init__(self, auth_provider):
super(VolumeV2AvailabilityZoneClientXML, self).__init__(
auth_provider)
self.api_version = "v2"
| {
"content_hash": "af040309ca92171f6539055eac1bc165",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 70,
"avg_line_length": 31.272727272727273,
"alnum_prop": 0.7209302325581395,
"repo_name": "nikolay-fedotov/tempest",
"id": "68ca39bb655ae4ae909c3a12f145d5256591743b",
"size": "969",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tempest/services/volume/v2/xml/availability_zone_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
try:
from collections import namedtuple
except ImportError:
try:
from .ip_connection import namedtuple
except ValueError:
from ip_connection import namedtuple
try:
from .ip_connection import Device, IPConnection, Error
except ValueError:
from ip_connection import Device, IPConnection, Error
GetIdentity = namedtuple('Identity', ['uid', 'connected_uid', 'position', 'hardware_version', 'firmware_version', 'device_identifier'])
class BrickletIndustrialDigitalIn4(Device):
"""
Device for controlling up to 4 optically coupled digital inputs
"""
DEVICE_IDENTIFIER = 223
CALLBACK_INTERRUPT = 9
FUNCTION_GET_VALUE = 1
FUNCTION_SET_GROUP = 2
FUNCTION_GET_GROUP = 3
FUNCTION_GET_AVAILABLE_FOR_GROUP = 4
FUNCTION_SET_DEBOUNCE_PERIOD = 5
FUNCTION_GET_DEBOUNCE_PERIOD = 6
FUNCTION_SET_INTERRUPT = 7
FUNCTION_GET_INTERRUPT = 8
FUNCTION_GET_IDENTITY = 255
def __init__(self, uid, ipcon):
"""
Creates an object with the unique device ID *uid* and adds it to
the IP Connection *ipcon*.
"""
Device.__init__(self, uid, ipcon)
self.api_version = (2, 0, 0)
self.response_expected[BrickletIndustrialDigitalIn4.FUNCTION_GET_VALUE] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletIndustrialDigitalIn4.FUNCTION_SET_GROUP] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_FALSE
self.response_expected[BrickletIndustrialDigitalIn4.FUNCTION_GET_GROUP] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletIndustrialDigitalIn4.FUNCTION_GET_AVAILABLE_FOR_GROUP] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletIndustrialDigitalIn4.FUNCTION_SET_DEBOUNCE_PERIOD] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_TRUE
self.response_expected[BrickletIndustrialDigitalIn4.FUNCTION_GET_DEBOUNCE_PERIOD] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletIndustrialDigitalIn4.FUNCTION_SET_INTERRUPT] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_TRUE
self.response_expected[BrickletIndustrialDigitalIn4.FUNCTION_GET_INTERRUPT] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_ALWAYS_TRUE
self.response_expected[BrickletIndustrialDigitalIn4.CALLBACK_INTERRUPT] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_ALWAYS_FALSE
self.response_expected[BrickletIndustrialDigitalIn4.FUNCTION_GET_IDENTITY] = BrickletIndustrialDigitalIn4.RESPONSE_EXPECTED_ALWAYS_TRUE
self.callback_formats[BrickletIndustrialDigitalIn4.CALLBACK_INTERRUPT] = 'H H'
def get_value(self):
"""
Returns the input value with a bitmask. The bitmask
is 16 bit long, *true* refers to high and *false* refers to
low.
For example: The value 0b0000000000000011 means that pins 0-1
are high and the other pins are low.
If no groups are used (see :func:`SetGroup`), the pins correspond to the
markings on the Digital In 4 Bricklet.
If groups are used, the pins correspond to the element in the group.
Element 1 in the group will get pins 0-3, element 2 pins 4-7, element 3
pins 8-11 and element 4 pins 12-15.
"""
return self.ipcon.send_request(self, BrickletIndustrialDigitalIn4.FUNCTION_GET_VALUE, (), '', 'H')
def set_group(self, group):
"""
Sets a group of Digital In 4 Bricklets that should work together. You can
find Bricklets that can be grouped together with :func:`GetAvailableForGroup`.
The group consists of 4 elements. Element 1 in the group will get pins 0-3,
element 2 pins 4-7, element 3 pins 8-11 and element 4 pins 12-15.
Each element can either be one of the ports ('a' to 'd') or 'n' if it should
not be used.
For example: If you have two Digital In 4 Bricklets connected to port A and
port B respectively, you could call with "['a', 'b', 'n', 'n']".
Now the pins on the Digital In 4 on port A are assigned to 0-3 and the
pins on the Digital In 4 on port B are assigned to 4-7. It is now possible
to call :func:`GetValue` and read out two Bricklets at the same time.
"""
self.ipcon.send_request(self, BrickletIndustrialDigitalIn4.FUNCTION_SET_GROUP, (group,), '4c', '')
def get_group(self):
"""
Returns the group as set by :func:`SetGroup`
"""
return self.ipcon.send_request(self, BrickletIndustrialDigitalIn4.FUNCTION_GET_GROUP, (), '', '4c')
def get_available_for_group(self):
"""
Returns a bitmask of ports that are available for grouping. For example the
value 0b0101 means: Port *A* and Port *C* are connected to Bricklets that
can be grouped together.
"""
return self.ipcon.send_request(self, BrickletIndustrialDigitalIn4.FUNCTION_GET_AVAILABLE_FOR_GROUP, (), '', 'B')
def set_debounce_period(self, debounce):
"""
Sets the debounce period of the :func:`Interrupt` callback in ms.
For example: If you set this value to 100, you will get the interrupt
maximal every 100ms. This is necessary if something that bounces is
connected to the Digital In 4 Bricklet, such as a button.
The default value is 100.
"""
self.ipcon.send_request(self, BrickletIndustrialDigitalIn4.FUNCTION_SET_DEBOUNCE_PERIOD, (debounce,), 'I', '')
def get_debounce_period(self):
"""
Returns the debounce period as set by :func:`SetDebouncePeriod`.
"""
return self.ipcon.send_request(self, BrickletIndustrialDigitalIn4.FUNCTION_GET_DEBOUNCE_PERIOD, (), '', 'I')
def set_interrupt(self, interrupt_mask):
"""
Sets the pins on which an interrupt is activated with a bitmask.
Interrupts are triggered on changes of the voltage level of the pin,
i.e. changes from high to low and low to high.
For example: An interrupt bitmask of 9 (0b0000000000001001) will
enable the interrupt for pins 0 and 3.
The interrupts use the grouping as set by :func:`SetGroup`.
The interrupt is delivered with the callback :func:`Interrupt`.
"""
self.ipcon.send_request(self, BrickletIndustrialDigitalIn4.FUNCTION_SET_INTERRUPT, (interrupt_mask,), 'H', '')
def get_interrupt(self):
"""
Returns the interrupt bitmask as set by :func:`SetInterrupt`.
"""
return self.ipcon.send_request(self, BrickletIndustrialDigitalIn4.FUNCTION_GET_INTERRUPT, (), '', 'H')
def get_identity(self):
"""
Returns the UID, the UID where the Bricklet is connected to,
the position, the hardware and firmware version as well as the
device identifier.
The position can be 'a', 'b', 'c' or 'd'.
The device identifiers can be found :ref:`here <device_identifier>`.
.. versionadded:: 2.0.0~(Plugin)
"""
return GetIdentity(*self.ipcon.send_request(self, BrickletIndustrialDigitalIn4.FUNCTION_GET_IDENTITY, (), '', '8s 8s c 3B 3B H'))
def register_callback(self, id, callback):
"""
Registers a callback with ID *id* to the function *callback*.
"""
self.registered_callbacks[id] = callback
IndustrialDigitalIn4 = BrickletIndustrialDigitalIn4 # for backward compatibility
| {
"content_hash": "6f11a85dd083574eb5dd693ce5649a6a",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 154,
"avg_line_length": 45.22485207100592,
"alnum_prop": 0.6760434384404029,
"repo_name": "DeathPoison/tinker-cnc",
"id": "599ba7772231cd437e2432090d16c5e75423cc8e",
"size": "8226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tinkerforge/bricklet_industrial_digital_in_4.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "515489"
}
],
"symlink_target": ""
} |
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(), decorators=[swiftTest])
| {
"content_hash": "63883dc22c4f5fabec62c65b7a4cee6c",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 70,
"avg_line_length": 39.75,
"alnum_prop": 0.8050314465408805,
"repo_name": "apple/swift-lldb",
"id": "d773710c0d159afd89966dd223beef20ec622a47",
"size": "613",
"binary": false,
"copies": "2",
"ref": "refs/heads/stable",
"path": "packages/Python/lldbsuite/test/lang/swift/po/val_types/TestSwiftPOValTypes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "130449"
},
{
"name": "C",
"bytes": "198536"
},
{
"name": "C++",
"bytes": "27687071"
},
{
"name": "CMake",
"bytes": "172176"
},
{
"name": "DTrace",
"bytes": "334"
},
{
"name": "LLVM",
"bytes": "6106"
},
{
"name": "Makefile",
"bytes": "106804"
},
{
"name": "Objective-C",
"bytes": "106821"
},
{
"name": "Objective-C++",
"bytes": "25658"
},
{
"name": "Perl",
"bytes": "72175"
},
{
"name": "Python",
"bytes": "4680483"
},
{
"name": "Shell",
"bytes": "6573"
},
{
"name": "Swift",
"bytes": "260786"
},
{
"name": "Vim script",
"bytes": "8434"
}
],
"symlink_target": ""
} |
from predicthq.endpoints.base import UserBaseEndpoint
from predicthq.endpoints.decorators import accepts, returns
from .schemas import (
SearchParams,
EventResultSet,
CountResultSet,
CalendarParams,
CalendarResultSet,
)
class EventsEndpoint(UserBaseEndpoint):
@accepts(SearchParams)
@returns(EventResultSet)
def search(self, **params):
verify_ssl = params.pop("config.verify_ssl", True)
return self.client.get(
self.build_url("v1", "events"),
params=params,
verify=verify_ssl,
)
@accepts(SearchParams)
@returns(CountResultSet)
def count(self, **params):
verify_ssl = params.pop("config.verify_ssl", True)
return self.client.get(
self.build_url("v1", "events/count"),
params=params,
verify=verify_ssl,
)
@accepts(CalendarParams)
@returns(CalendarResultSet)
def calendar(self, **params):
verify_ssl = params.pop("config.verify_ssl", True)
return self.client.get(
self.build_url("v1", "events/calendar"),
params=params,
verify=verify_ssl,
)
| {
"content_hash": "e177b46d6bfbfd467e66ede0c3f8043b",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 59,
"avg_line_length": 28.73170731707317,
"alnum_prop": 0.6179966044142614,
"repo_name": "predicthq/sdk-py",
"id": "51fb33a51758fe901ec18164538705d2d49d1415",
"size": "1178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "predicthq/endpoints/v1/events/endpoint.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "143018"
}
],
"symlink_target": ""
} |
"""
Common functionalities shared between different iLO modules.
"""
import tempfile
from oslo_config import cfg
from oslo_utils import importutils
import six.moves.urllib.parse as urlparse
from ironic.common import exception
from ironic.common.glance_service import service_utils
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.common.i18n import _LI
from ironic.common import images
from ironic.common import swift
from ironic.common import utils
from ironic.drivers.modules import deploy_utils
from ironic.openstack.common import log as logging
ilo_client = importutils.try_import('proliantutils.ilo.client')
ilo_error = importutils.try_import('proliantutils.exception')
STANDARD_LICENSE = 1
ESSENTIALS_LICENSE = 2
ADVANCED_LICENSE = 3
opts = [
cfg.IntOpt('client_timeout',
default=60,
help='Timeout (in seconds) for iLO operations'),
cfg.IntOpt('client_port',
default=443,
help='Port to be used for iLO operations'),
cfg.StrOpt('swift_ilo_container',
default='ironic_ilo_container',
help='The Swift iLO container to store data.'),
cfg.IntOpt('swift_object_expiry_timeout',
default=900,
help='Amount of time in seconds for Swift objects to '
'auto-expire.'),
]
CONF = cfg.CONF
CONF.register_opts(opts, group='ilo')
LOG = logging.getLogger(__name__)
REQUIRED_PROPERTIES = {
'ilo_address': _("IP address or hostname of the iLO. Required."),
'ilo_username': _("username for the iLO with administrator privileges. "
"Required."),
'ilo_password': _("password for ilo_username. Required.")
}
OPTIONAL_PROPERTIES = {
'client_port': _("port to be used for iLO operations. Optional."),
'client_timeout': _("timeout (in seconds) for iLO operations. Optional."),
}
CONSOLE_PROPERTIES = {
'console_port': _("node's UDP port to connect to. Only required for "
"console access.")
}
CLEAN_PROPERTIES = {
'ilo_change_password': _("new password for iLO. Required if the clean "
"step 'reset_ilo_credential' is enabled.")
}
COMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()
COMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)
DEFAULT_BOOT_MODE = 'LEGACY'
BOOT_MODE_GENERIC_TO_ILO = {'bios': 'legacy', 'uefi': 'uefi'}
BOOT_MODE_ILO_TO_GENERIC = dict((v, k)
for (k, v) in BOOT_MODE_GENERIC_TO_ILO.items())
def parse_driver_info(node):
"""Gets the driver specific Node info.
This method validates whether the 'driver_info' property of the
supplied node contains the required information for this driver.
:param node: an ironic Node object.
:returns: a dict containing information from driver_info (or where
applicable, config values).
:raises: InvalidParameterValue if any parameters are incorrect
:raises: MissingParameterValue if some mandatory information
is missing on the node
"""
info = node.driver_info
d_info = {}
missing_info = []
for param in REQUIRED_PROPERTIES:
try:
d_info[param] = info[param]
except KeyError:
missing_info.append(param)
if missing_info:
raise exception.MissingParameterValue(_(
"The following required iLO parameters are missing from the "
"node's driver_info: %s") % missing_info)
not_integers = []
for param in OPTIONAL_PROPERTIES:
value = info.get(param, CONF.ilo.get(param))
try:
d_info[param] = int(value)
except ValueError:
not_integers.append(param)
for param in CONSOLE_PROPERTIES:
value = info.get(param)
if value:
try:
d_info[param] = int(value)
except ValueError:
not_integers.append(param)
if not_integers:
raise exception.InvalidParameterValue(_(
"The following iLO parameters from the node's driver_info "
"should be integers: %s") % not_integers)
return d_info
def get_ilo_object(node):
"""Gets an IloClient object from proliantutils library.
Given an ironic node object, this method gives back a IloClient object
to do operations on the iLO.
:param node: an ironic node object.
:returns: an IloClient object.
:raises: InvalidParameterValue on invalid inputs.
:raises: MissingParameterValue if some mandatory information
is missing on the node
"""
driver_info = parse_driver_info(node)
ilo_object = ilo_client.IloClient(driver_info['ilo_address'],
driver_info['ilo_username'],
driver_info['ilo_password'],
driver_info['client_timeout'],
driver_info['client_port'])
return ilo_object
def get_ilo_license(node):
"""Gives the current installed license on the node.
Given an ironic node object, this method queries the iLO
for currently installed license and returns it back.
:param node: an ironic node object.
:returns: a constant defined in this module which
refers to the current license installed on the node.
:raises: InvalidParameterValue on invalid inputs.
:raises: MissingParameterValue if some mandatory information
is missing on the node
:raises: IloOperationError if it failed to retrieve the
installed licenses from the iLO.
"""
# Get the ilo client object, and then the license from the iLO
ilo_object = get_ilo_object(node)
try:
license_info = ilo_object.get_all_licenses()
except ilo_error.IloError as ilo_exception:
raise exception.IloOperationError(operation=_('iLO license check'),
error=str(ilo_exception))
# Check the license to see if the given license exists
current_license_type = license_info['LICENSE_TYPE']
if current_license_type.endswith("Advanced"):
return ADVANCED_LICENSE
elif current_license_type.endswith("Essentials"):
return ESSENTIALS_LICENSE
else:
return STANDARD_LICENSE
def update_ipmi_properties(task):
"""Update ipmi properties to node driver_info
:param task: a task from TaskManager.
"""
node = task.node
info = node.driver_info
# updating ipmi credentials
info['ipmi_address'] = info.get('ilo_address')
info['ipmi_username'] = info.get('ilo_username')
info['ipmi_password'] = info.get('ilo_password')
if 'console_port' in info:
info['ipmi_terminal_port'] = info['console_port']
# saving ipmi credentials to task object
task.node.driver_info = info
def _get_floppy_image_name(node):
"""Returns the floppy image name for a given node.
:param node: the node for which image name is to be provided.
"""
return "image-%s" % node.uuid
def _prepare_floppy_image(task, params):
"""Prepares the floppy image for passing the parameters.
This method prepares a temporary vfat filesystem image. Then it adds
two files into the image - one containing the authentication token and
the other containing the parameters to be passed to the ramdisk. Then it
uploads the file to Swift in 'swift_ilo_container', setting it to
auto-expire after 'swift_object_expiry_timeout' seconds. Then it returns
the temp url for the Swift object.
:param task: a TaskManager instance containing the node to act on.
:param params: a dictionary containing 'parameter name'->'value' mapping
to be passed to the deploy ramdisk via the floppy image.
:raises: ImageCreationFailed, if it failed while creating the floppy image.
:raises: SwiftOperationError, if any operation with Swift fails.
:returns: the Swift temp url for the floppy image.
"""
with tempfile.NamedTemporaryFile() as vfat_image_tmpfile_obj:
files_info = {}
token_tmpfile_obj = None
vfat_image_tmpfile = vfat_image_tmpfile_obj.name
# If auth_strategy is noauth, then no need to write token into
# the image file.
if task.context.auth_token:
token_tmpfile_obj = tempfile.NamedTemporaryFile()
token_tmpfile = token_tmpfile_obj.name
utils.write_to_file(token_tmpfile, task.context.auth_token)
files_info[token_tmpfile] = 'token'
try:
images.create_vfat_image(vfat_image_tmpfile, files_info=files_info,
parameters=params)
finally:
if token_tmpfile_obj:
token_tmpfile_obj.close()
container = CONF.ilo.swift_ilo_container
object_name = _get_floppy_image_name(task.node)
timeout = CONF.ilo.swift_object_expiry_timeout
object_headers = {'X-Delete-After': timeout}
swift_api = swift.SwiftAPI()
swift_api.create_object(container, object_name,
vfat_image_tmpfile,
object_headers=object_headers)
temp_url = swift_api.get_temp_url(container, object_name, timeout)
LOG.debug("Uploaded floppy image %(object_name)s to %(container)s "
"for deployment.",
{'object_name': object_name, 'container': container})
return temp_url
def attach_vmedia(node, device, url):
"""Attaches the given url as virtual media on the node.
:param node: an ironic node object.
:param device: the virtual media device to attach
:param url: the http/https url to attach as the virtual media device
:raises: IloOperationError if insert virtual media failed.
"""
ilo_object = get_ilo_object(node)
try:
ilo_object.insert_virtual_media(url, device=device)
ilo_object.set_vm_status(device=device, boot_option='CONNECT',
write_protect='YES')
except ilo_error.IloError as ilo_exception:
operation = _("Inserting virtual media %s") % device
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
LOG.info(_LI("Attached virtual media %s successfully."), device)
def set_boot_mode(node, boot_mode):
"""Sets the node to boot using boot_mode for the next boot.
:param node: an ironic node object.
:param boot_mode: Next boot mode.
:raises: IloOperationError if setting boot mode failed.
"""
ilo_object = get_ilo_object(node)
try:
p_boot_mode = ilo_object.get_pending_boot_mode()
except ilo_error.IloCommandNotSupportedError:
p_boot_mode = DEFAULT_BOOT_MODE
if BOOT_MODE_ILO_TO_GENERIC[p_boot_mode.lower()] == boot_mode:
LOG.info(_LI("Node %(uuid)s pending boot mode is %(boot_mode)s."),
{'uuid': node.uuid, 'boot_mode': boot_mode})
return
try:
ilo_object.set_pending_boot_mode(
BOOT_MODE_GENERIC_TO_ILO[boot_mode].upper())
except ilo_error.IloError as ilo_exception:
operation = _("Setting %s as boot mode") % boot_mode
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
LOG.info(_LI("Node %(uuid)s boot mode is set to %(boot_mode)s."),
{'uuid': node.uuid, 'boot_mode': boot_mode})
def update_boot_mode(task):
"""Update instance_info with boot mode to be used for deploy.
This method updates instance_info with boot mode to be used for
deploy if node properties['capabilities'] do not have boot_mode.
It sets the boot mode on the node.
:param task: Task object.
:raises: IloOperationError if setting boot mode failed.
"""
node = task.node
boot_mode = deploy_utils.get_boot_mode_for_deploy(node)
if boot_mode is not None:
LOG.debug("Node %(uuid)s boot mode is being set to %(boot_mode)s",
{'uuid': node.uuid, 'boot_mode': boot_mode})
set_boot_mode(node, boot_mode)
return
LOG.debug("Check pending boot mode for node %s.", node.uuid)
ilo_object = get_ilo_object(node)
try:
boot_mode = ilo_object.get_pending_boot_mode()
except ilo_error.IloCommandNotSupportedError:
boot_mode = 'legacy'
if boot_mode != 'UNKNOWN':
boot_mode = BOOT_MODE_ILO_TO_GENERIC[boot_mode.lower()]
if boot_mode == 'UNKNOWN':
# NOTE(faizan) ILO will return this in remote cases and mostly on
# the nodes which supports UEFI. Such nodes mostly comes with UEFI
# as default boot mode. So we will try setting bootmode to UEFI
# and if it fails then we fall back to BIOS boot mode.
try:
boot_mode = 'uefi'
ilo_object.set_pending_boot_mode(
BOOT_MODE_GENERIC_TO_ILO[boot_mode].upper())
except ilo_error.IloError as ilo_exception:
operation = _("Setting %s as boot mode") % boot_mode
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
LOG.debug("Node %(uuid)s boot mode is being set to %(boot_mode)s "
"as pending boot mode is unknown.",
{'uuid': node.uuid, 'boot_mode': boot_mode})
instance_info = node.instance_info
instance_info['deploy_boot_mode'] = boot_mode
node.instance_info = instance_info
node.save()
def setup_vmedia_for_boot(task, boot_iso, parameters=None):
"""Sets up the node to boot from the given ISO image.
This method attaches the given boot_iso on the node and passes
the required parameters to it via virtual floppy image.
:param task: a TaskManager instance containing the node to act on.
:param boot_iso: a bootable ISO image to attach to. Should be either
of below:
* A Swift object - It should be of format 'swift:<object-name>'.
It is assumed that the image object is present in
CONF.ilo.swift_ilo_container;
* A Glance image - It should be format 'glance://<glance-image-uuid>'
or just <glance-image-uuid>;
* An HTTP(S) URL.
:param parameters: the parameters to pass in the virtual floppy image
in a dictionary. This is optional.
:raises: ImageCreationFailed, if it failed while creating the floppy image.
:raises: SwiftOperationError, if any operation with Swift fails.
:raises: IloOperationError, if attaching virtual media failed.
"""
LOG.info(_LI("Setting up node %s to boot from virtual media"),
task.node.uuid)
if parameters:
floppy_image_temp_url = _prepare_floppy_image(task, parameters)
attach_vmedia(task.node, 'FLOPPY', floppy_image_temp_url)
boot_iso_url = None
parsed_ref = urlparse.urlparse(boot_iso)
if parsed_ref.scheme == 'swift':
swift_api = swift.SwiftAPI()
container = CONF.ilo.swift_ilo_container
object_name = parsed_ref.path
timeout = CONF.ilo.swift_object_expiry_timeout
boot_iso_url = swift_api.get_temp_url(container, object_name,
timeout)
elif service_utils.is_glance_image(boot_iso):
boot_iso_url = images.get_temp_url_for_glance_image(task.context,
boot_iso)
attach_vmedia(task.node, 'CDROM', boot_iso_url or boot_iso)
def cleanup_vmedia_boot(task):
"""Cleans a node after a virtual media boot.
This method cleans up a node after a virtual media boot. It deletes the
floppy image if it exists in CONF.ilo.swift_ilo_container. It also
ejects both virtual media cdrom and virtual media floppy.
:param task: a TaskManager instance containing the node to act on.
"""
LOG.debug("Cleaning up node %s after virtual media boot", task.node.uuid)
container = CONF.ilo.swift_ilo_container
object_name = _get_floppy_image_name(task.node)
try:
swift_api = swift.SwiftAPI()
swift_api.delete_object(container, object_name)
except exception.SwiftOperationError as e:
LOG.exception(_LE("Error while deleting %(object_name)s from "
"%(container)s. Error: %(error)s"),
{'object_name': object_name, 'container': container,
'error': e})
ilo_object = get_ilo_object(task.node)
for device in ('FLOPPY', 'CDROM'):
try:
ilo_object.eject_virtual_media(device)
except ilo_error.IloError as ilo_exception:
LOG.exception(_LE("Error while ejecting virtual media %(device)s "
"from node %(uuid)s. Error: %(error)s"),
{'device': device, 'uuid': task.node.uuid,
'error': ilo_exception})
def get_secure_boot_mode(task):
"""Retrieves current enabled state of UEFI secure boot on the node
Returns the current enabled state of UEFI secure boot on the node.
:param task: a task from TaskManager.
:raises: MissingParameterValue if a required iLO parameter is missing.
:raises: IloOperationError on an error from IloClient library.
:raises: IloOperationNotSupported if UEFI secure boot is not supported.
:returns: Boolean value indicating current state of UEFI secure boot
on the node.
"""
operation = _("Get secure boot mode for node %s.") % task.node.uuid
secure_boot_state = False
ilo_object = get_ilo_object(task.node)
try:
current_boot_mode = ilo_object.get_current_boot_mode()
if current_boot_mode == 'UEFI':
secure_boot_state = ilo_object.get_secure_boot_mode()
except ilo_error.IloCommandNotSupportedError as ilo_exception:
raise exception.IloOperationNotSupported(operation=operation,
error=ilo_exception)
except ilo_error.IloError as ilo_exception:
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
LOG.debug("Get secure boot mode for node %(node)s returned %(value)s",
{'value': secure_boot_state, 'node': task.node.uuid})
return secure_boot_state
def set_secure_boot_mode(task, flag):
"""Enable or disable UEFI Secure Boot for the next boot
Enable or disable UEFI Secure Boot for the next boot
:param task: a task from TaskManager.
:param flag: Boolean value. True if the secure boot to be
enabled in next boot.
:raises: IloOperationError on an error from IloClient library.
:raises: IloOperationNotSupported if UEFI secure boot is not supported.
"""
operation = (_("Setting secure boot to %(flag)s for node %(node)s.") %
{'flag': flag, 'node': task.node.uuid})
ilo_object = get_ilo_object(task.node)
try:
ilo_object.set_secure_boot_mode(flag)
LOG.debug(operation)
except ilo_error.IloCommandNotSupportedError as ilo_exception:
raise exception.IloOperationNotSupported(operation=operation,
error=ilo_exception)
except ilo_error.IloError as ilo_exception:
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
| {
"content_hash": "c454a7abf417d70058683cfc8514ef12",
"timestamp": "",
"source": "github",
"line_count": 512,
"max_line_length": 79,
"avg_line_length": 37.869140625,
"alnum_prop": 0.6383516426839961,
"repo_name": "supermari0/ironic",
"id": "1d1363d0df44ff8d3ead6ddd6b446e339ab2f669",
"size": "19996",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "ironic/drivers/modules/ilo/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "2938734"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.test import override_settings
from django.urls import reverse
from ..models import Article
@override_settings(ROOT_URLCONF='forms_tests.urls')
class LiveWidgetTests(AdminSeleniumTestCase):
available_apps = ['forms_tests'] + AdminSeleniumTestCase.available_apps
def test_textarea_trailing_newlines(self):
"""
A roundtrip on a ModelForm doesn't alter the TextField value
"""
article = Article.objects.create(content="\nTst\n")
self.selenium.get(self.live_server_url + reverse('article_form', args=[article.pk]))
self.selenium.find_element_by_id('submit').submit()
article = Article.objects.get(pk=article.pk)
self.assertEqual(article.content, "\r\nTst\r\n")
| {
"content_hash": "c38c691782d8cd66d2691dbe958a298a",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 92,
"avg_line_length": 36.82608695652174,
"alnum_prop": 0.7166469893742621,
"repo_name": "sarthakmeh03/django",
"id": "f67954fd3a5fb4977ecae66797050943e5ee464a",
"size": "847",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/forms_tests/tests/test_widgets.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52170"
},
{
"name": "HTML",
"bytes": "174451"
},
{
"name": "JavaScript",
"bytes": "251434"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11348046"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
"""
Tests the shell core module
:author: Thomas Calmant
"""
# Standard library
import os
import sys
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
# Tests
try:
import unittest2 as unittest
except ImportError:
import unittest
# Pelix
from pelix.framework import FrameworkFactory, create_framework, Bundle
import pelix.constants as constants
from pelix.ipopo.constants import use_ipopo
# Shell constants
from pelix.shell import SERVICE_SHELL, SERVICE_SHELL_COMMAND, \
SERVICE_SHELL_UTILS
import pelix.shell.beans as beans
# ------------------------------------------------------------------------------
__version_info__ = (1, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
class ShellUtilsTest(unittest.TestCase):
"""
Tests the shell utility service
"""
def setUp(self):
"""
Starts a framework and install the shell bundle
"""
# Start the framework
self.framework = FrameworkFactory.get_framework()
self.framework.start()
self.context = self.framework.get_bundle_context()
# Install the bundle
self.context.install_bundle("pelix.shell.core").start()
# Get the utility service
svc_ref = self.context.get_service_reference(SERVICE_SHELL_UTILS)
self.utility = self.context.get_service(svc_ref)
def tearDown(self):
"""
Cleans up the framework
"""
self.framework.stop()
FrameworkFactory.delete_framework()
self.utility = None
self.context = None
self.framework = None
def testTableSimple(self):
"""
Tests a valid table creation
"""
headers = ('ID', 'Name', 'Properties')
lines = [(12, 'Toto', {'valid': True}),
(True, [1, 2, 3], (1, 2, 3))]
# Test without prefix
result = """+------+-----------+-----------------+
| ID | Name | Properties |
+======+===========+=================+
| 12 | Toto | {'valid': True} |
+------+-----------+-----------------+
| True | [1, 2, 3] | (1, 2, 3) |
+------+-----------+-----------------+
"""
self.assertEqual(self.utility.make_table(headers, lines),
result, "Different outputs")
# Test with prefix
result = """ +------+-----------+-----------------+
| ID | Name | Properties |
+======+===========+=================+
| 12 | Toto | {'valid': True} |
+------+-----------+-----------------+
| True | [1, 2, 3] | (1, 2, 3) |
+------+-----------+-----------------+
"""
self.assertEqual(self.utility.make_table(headers, lines, ' '),
result, "Different outputs")
def testTableEmpty(self):
"""
Tests the creation of an empty table
"""
headers = ('ID', 'Name', 'Properties')
result = """+----+------+------------+
| ID | Name | Properties |
+====+======+============+
"""
self.assertEqual(self.utility.make_table(headers, []),
result, "Different outputs")
def testTableBadCount(self):
"""
Tests the creation of table with different headers/columns count
"""
headers = ('ID', 'Name', 'Properties')
bad_columns_1 = [(1, 2, 3, 4)]
bad_columns_2 = [(1, 2, 3),
(4, 5)]
self.assertRaises(ValueError, self.utility.make_table,
headers, bad_columns_1,
"Too many columns accepted")
self.assertRaises(ValueError, self.utility.make_table,
headers, bad_columns_2,
"Missing columns accepted")
def testTableBadType(self):
"""
Tests invalid types of line
"""
headers = ('ID', 'Name', 'Properties')
for bad_line in (None, 12, object()):
self.assertRaises(ValueError, self.utility.make_table,
headers, [bad_line],
"Bad line type accepted")
# ------------------------------------------------------------------------------
class ShellCoreTest(unittest.TestCase):
"""
Tests the shell core service
"""
def setUp(self):
"""
Starts a framework and install the shell bundle
"""
# Start the framework
self.framework = FrameworkFactory.get_framework()
self.framework.start()
self.context = self.framework.get_bundle_context()
# Install the bundle
self.context.install_bundle("pelix.shell.core").start()
# Get the utility service
svc_ref = self.context.get_service_reference(SERVICE_SHELL)
self.shell = self.context.get_service(svc_ref)
# Command flags
self._flag = False
def tearDown(self):
"""
Cleans up the framework
"""
self.framework.stop()
FrameworkFactory.delete_framework()
self.shell = None
self.context = None
self.framework = None
self._flag = False
def _command1(self, io_handler):
"""
Test command
"""
self._flag = True
def testRegister(self):
"""
Tests registration method
"""
# 1st registration
self.assertTrue(self.shell.register_command("test", "command",
self._command1),
"Command not registered")
# 2nd registration
self.assertFalse(self.shell.register_command("test", "command",
self._command1),
"Command registered twice")
# Invalid command
for invalid in (None, "", " "):
self.assertFalse(
self.shell.register_command("test", invalid, self._command1),
"Invalid command registered: '{0}'".format(invalid))
# Invalid method
self.assertFalse(self.shell.register_command("test", "invalid", None),
"Invalid method registered")
def testExecute(self):
"""
Tests the execute() method
"""
# Registration
self.shell.register_command("test", "command", self._command1)
self.assertFalse(self._flag, "Bad flag value")
# Call it (complete name)
self.assertTrue(self.shell.execute("test.command"),
"Error in executing 'test.command'")
self.assertTrue(self._flag, "Command not called")
# Call it (simple name)
self._flag = False
self.assertTrue(self.shell.execute("command"),
"Error in executing 'command'")
self.assertTrue(self._flag, "Command not called")
def testExecuteInvalid(self):
"""
Tests execution of empty or unknown commands
"""
# Empty line
for empty in (None, "", " "):
self.assertFalse(self.shell.execute(empty),
"No error executing '{0}'".format(empty))
# Unknown command
for unknown in ("unknown", "test.unknown", "unknown.unknown"):
self.assertFalse(self.shell.execute(unknown),
"No error executing unknown command")
def testUnregister(self):
"""
Tests command unregistration
"""
# Registration
self.shell.register_command("test", "command", self._command1)
self.assertFalse(self._flag, "Bad flag value")
# Call it (complete name)
self.assertTrue(self.shell.execute("test.command"),
"Error in executing 'test.command'")
self.assertTrue(self._flag, "Command not called")
# Unregister the command
self._flag = False
self.assertTrue(self.shell.unregister("test", "command"),
"Failed unregistration")
self.assertFalse(self.shell.unregister("test", "command"),
"Unregistered twice")
# Check next call
self.assertFalse(self.shell.execute("test.command"),
"Succeeded executing 'test.command'")
self.assertFalse(self._flag, "Command called")
def testGetters(self):
"""
Tests get_*() methods
"""
# No exception here
self.assertIsNotNone(self.shell.get_ps1(), "No PS1")
self.assertIsNotNone(self.shell.get_banner(), "No banner")
# Name spaces
self.assertEqual(self.shell.get_namespaces(), [],
"Invalid name spaces")
self.assertIn("help", self.shell.get_commands(None),
"No help in default commands")
self.assertEqual(self.shell.get_commands("test"), [],
"Test commands should be []")
# Register a command
self.shell.register_command("test", "command", self._command1)
self.assertEqual(self.shell.get_namespaces(), ['test'],
"Invalid name spaces")
self.assertIn("command", self.shell.get_commands('test'),
"Registered command not in get_commands")
def testMultiplePossibilities(self):
"""
Tests the execution of multiple command possibilities
"""
self.shell.register_command("test", "command", self._command1)
self.shell.register_command("test2", "command", self._command1)
# Call them (complete name)
for name in ("test.command", "test2.command"):
self._flag = False
self.assertTrue(self.shell.execute(name),
"Error in executing '{0}'".format(name))
self.assertTrue(self._flag, "Command not called")
# Simple name must fail
self._flag = False
self.assertFalse(self.shell.execute('command'),
"Error in executing 'command'")
self.assertFalse(self._flag, "Command called")
# ------------------------------------------------------------------------------
class ShellCommandTest(unittest.TestCase):
"""
Tests the shell core service
"""
def setUp(self):
"""
Starts a framework and install the shell bundle
"""
# Start the framework
self.framework = FrameworkFactory.get_framework()
self.framework.start()
self.context = self.framework.get_bundle_context()
# Install the bundle
self.context.install_bundle("pelix.shell.core").start()
# Get the utility service
svc_ref = self.context.get_service_reference(SERVICE_SHELL)
self.shell = self.context.get_service(svc_ref)
def tearDown(self):
"""
Cleans up the framework
"""
self.framework.stop()
FrameworkFactory.delete_framework()
self.shell = None
self.context = None
self.framework = None
def testPositional(self):
"""
Tests positional arguments
"""
def command(io_handler, arg1, arg2):
"""
Sample command
"""
return arg1, arg2
# Register the command
self.shell.register_command('test', 'command', command)
# Valid execution
session = beans.ShellSession(beans.IOHandler(sys.stdin, sys.stdout))
self.assertTrue(self.shell.execute('test.command a 2', session))
result = session.last_result
self.assertEqual(result, ('a', '2'))
# Invalid call
for invalid in ([1], (1, 2, 3)):
args = ' '.join(str(arg) for arg in invalid)
self.assertFalse(self.shell.execute(
'test.command {0}'.format(args)), "Invalid call passed")
def testKeywords(self):
"""
Tests positional arguments
"""
def command(io_handler, arg1='15', **kwargs):
"""
Sample command
"""
return arg1, kwargs
# Register the command
self.shell.register_command('test', 'command', command)
# Prepare the session
session = beans.ShellSession(beans.IOHandler(sys.stdin, sys.stdout))
# Valid execution
self.shell.execute('test.command arg1=12 a=2 b=abc', session)
result = session.last_result
self.assertEqual(result, ('12', {'a': '2', 'b': 'abc'}),
"Invalid result: {0}".format(result))
self.shell.execute('test.command 12', session)
result = session.last_result
self.assertEqual(result, ('12', {}),
"Invalid result: {0}".format(result))
self.shell.execute('test.command a=12', session)
result = session.last_result
self.assertEqual(result, ('15', {'a': '12'}),
"Invalid result: {0}".format(result))
# First '=' sign is the assignment one,
# shlex.split removes slashes
self.shell.execute('test.command a=a=b b\=a=b', session)
result = session.last_result
self.assertEqual(result, ('15', {'a': 'a=b', 'b': 'a=b'}),
"Invalid result: {0}".format(result))
# Invalid call (2 arguments)
self.assertFalse(self.shell.execute('test.command 1 2'),
"Invalid call passed")
def testWhiteboard(self):
"""
Tests commands registered by a service
"""
class CommandService(object):
"""
Command service
"""
def __init__(self):
self.flag = False
def get_namespace(self):
return "test"
def get_methods(self):
return [("command", self._command)]
def _command(self, io_handler):
self.flag = True
# Create the service object
service = CommandService()
# Check state
self.assertFalse(self.shell.execute("test.command"),
"'test.command' can be called")
self.assertFalse(service.flag, "Bad flag value")
# Register the service
svc_reg = self.context.register_service(SERVICE_SHELL_COMMAND,
service, {})
# Test execution
self.assertTrue(self.shell.execute("test.command"),
"Error in executing 'test.command'")
self.assertTrue(service.flag, "Command not called")
service.flag = False
# Unregister the service
svc_reg.unregister()
svc_reg = None
# Check state
self.assertFalse(self.shell.execute("test.command"),
"'test.command' can still be called")
self.assertFalse(service.flag, "Bad flag value")
# ------------------------------------------------------------------------------
class ShellCoreCommandsTest(unittest.TestCase):
"""
Tests the shell core commands
"""
def setUp(self):
"""
Starts a framework and install the shell bundle
"""
# Start the framework
self.framework = create_framework(['pelix.shell.core'])
self.framework.start()
self.context = self.framework.get_bundle_context()
svc_ref = self.context.get_service_reference(SERVICE_SHELL)
self.shell = self.context.get_service(svc_ref)
def tearDown(self):
"""
Cleans up the framework
"""
self.framework.stop()
FrameworkFactory.delete_framework()
self.shell = None
self.context = None
self.framework = None
def _make_session(self):
"""
Prepares a ShellSession object for _run_command
"""
# String output
str_output = StringIO()
# Session bean
session = beans.ShellSession(beans.IOHandler(None, str_output))
return session, str_output
def _run_command(self, command, *args, **kwargs):
"""
Runs the given command and returns the output stream. A keyword
argument 'session' can be given to use a custom ShellSession.
"""
# Format command
if args:
command = command.format(*args)
try:
# Get the given session
session = kwargs['session']
str_output = kwargs['output']
str_output.truncate(0)
str_output.seek(0)
except KeyError:
# No session given
str_output = StringIO()
session = beans.ShellSession(beans.IOHandler(None, str_output))
# Run command
self.shell.execute(command, session)
return str_output.getvalue()
def testHelp(self):
"""
Tests the help command
"""
# Register some commands
self.shell.register_command("test", "dummy", self.testHelp)
# All commands
output = self._run_command('help')
for namespace in self.shell.get_namespaces():
self.assertIn(namespace, output)
for command in self.shell.get_commands(namespace):
self.assertIn(command, output)
# Namespace commands
for namespace in self.shell.get_namespaces():
output = self._run_command('help {0}', namespace)
self.assertIn(namespace, output)
for command in self.shell.get_commands(namespace):
self.assertIn(command, output)
# Commands
for namespace in self.shell.get_namespaces():
for command in self.shell.get_commands(namespace):
output = self._run_command('help {0}', command)
self.assertIn(namespace, output)
self.assertIn(command, output)
def testEcho(self):
"""
Tests the echo command
"""
echo_value = "Hello, World !"
output = self._run_command("echo {0}", echo_value)
self.assertEqual(output.strip(), echo_value)
def test_variables(self):
"""
Tests the set and unset commands. Also tests the substitution of
variables
"""
var_name = 'toto'
session, str_output = self._make_session()
kwargs = {"session": session, "output": str_output}
# No value set yet
output = self._run_command("set", **kwargs)
self.assertNotIn(var_name, output)
self.assertRaises(KeyError, session.get, var_name)
output = self._run_command("echo ${0}", var_name, **kwargs)
self.assertEqual(output.strip(), "")
old_value = None
for value in ("Some value", "Another value"):
# Set a value
output = self._run_command("set {0}='{1}'".format(var_name, value),
**kwargs)
self.assertEqual(session.get(var_name), value)
self.assertIn(var_name, output)
self.assertIn(value, output)
output = self._run_command("set", **kwargs)
self.assertIn(var_name, output)
self.assertIn(value, output)
if old_value is not None:
self.assertNotIn(old_value, output)
# Test the output
output = self._run_command("echo ${0}", var_name, **kwargs)
self.assertEqual(output.strip(), value)
old_value = value
# Unset the value
self._run_command("unset {0}", var_name, **kwargs)
self.assertRaises(KeyError, session.get, var_name)
output = self._run_command("echo ${0}", var_name, **kwargs)
self.assertEqual(output.strip(), "")
output = self._run_command("set", **kwargs)
self.assertNotIn(var_name, output)
def test_run_file(self):
"""
Tests the run shell command
"""
# Check bad file error
self.assertFalse(self.shell.execute("run __fake_file__"))
# Compute test file name
filename = os.path.join(os.path.dirname(__file__),
"rshell_starter.pelix")
# Install iPOPO
self.context.install_bundle('pelix.ipopo.core').start()
self.context.install_bundle('pelix.shell.ipopo').start()
# Prepare a session
port = 9001
session = beans.ShellSession(beans.IOHandler(sys.stdin, sys.stdout),
{"port": port})
# Run the file a first time
self.assertTrue(self.shell.execute("run '{0}'".format(filename),
session))
# Check the result
self.assertEqual(session.get("rshell.name"), "rshell")
# Check the bundle
rshell_bundle = int(session.get("rshell.bundle"))
bundle = self.context.get_bundle(rshell_bundle)
self.assertEqual(bundle.get_symbolic_name(), "pelix.shell.remote")
# Check the instance properties
with use_ipopo(self.context) as ipopo:
details = ipopo.get_instance_details(session.get("rshell.name"))
self.assertEqual(int(details["properties"]["pelix.shell.port"]),
port)
# Run the file a second time: it must fail
self.assertFalse(self.shell.execute("run '{0}'".format(filename),
session))
def testBundlesInfo(self):
"""
Tests the bd and bl commands
"""
# Install a bundle with another prefix
self.context.install_bundle("tests.interfaces")
# List of bundles
output = self._run_command('bl')
# Ensure that all bundles have been listed
for bundle in self.context.get_bundles():
self.assertIn(str(bundle.get_bundle_id()), output)
self.assertIn(bundle.get_symbolic_name(), output)
self.assertIn(str(bundle.get_version()), output)
# Test filter by name: all pelix bundles
for prefix in ('pelix', 'tests', 'pelix.shell'):
output = self._run_command('bl {0}', prefix)
for bundle in self.context.get_bundles():
name = bundle.get_symbolic_name()
if name.startswith(prefix):
self.assertIn(name, output)
# Test bundle details
for bundle in self.context.get_bundles():
for selector in (bundle.get_bundle_id(),
bundle.get_symbolic_name()):
output = self._run_command('bd {0}', selector)
self.assertIn(str(bundle.get_bundle_id()), output)
self.assertIn(bundle.get_symbolic_name(), output)
self.assertIn(str(bundle.get_version()), output)
# Test invalid bundle
output = self._run_command('bd {0}', -1)
self.assertIn("Unknown bundle", output)
output = self._run_command('bd aaa')
self.assertIn("Unknown bundle", output)
def testBundlesCommands(self):
"""
Tests the install, start, update, stop and uninstall commands
"""
# Install a bundle
output = self._run_command('install pelix.http.basic')
# Find it
bundle = self.context.get_bundles()[-1]
bundle_id = bundle.get_bundle_id()
# The bundle ID should have been printed
# and the bundle must be installed or resolved
self.assertIn(str(bundle_id), output)
self.assertIn(bundle.get_state(), (Bundle.INSTALLED, Bundle.RESOLVED))
# Start the bundle
self._run_command('start {0}', bundle_id)
self.assertEqual(bundle.get_state(), Bundle.ACTIVE)
# Update the bundle
self._run_command('update {0}', bundle_id)
self.assertEqual(bundle.get_state(), Bundle.ACTIVE)
# Stop it
self._run_command('stop {0}', bundle_id)
self.assertEqual(bundle.get_state(), Bundle.RESOLVED)
# Uninstall it
self._run_command('uninstall {0}', bundle_id)
self.assertEqual(bundle.get_state(), Bundle.UNINSTALLED)
self.assertNotIn(bundle, self.context.get_bundles())
# Test invalid command arguments
for command in ('update', 'stop', 'uninstall'):
output = self._run_command('{0} aaa', command)
self.assertIn("Invalid bundle ID", output)
output = self._run_command('{0} {1}', command, -1)
self.assertIn("Unknown bundle", output)
def testBundleNameStart(self):
"""
Tests the bundle start feature with
:return:
"""
# Install a bundle
output = self._run_command('start pelix.http.basic')
# Find it and check its state
bundle = self.context.get_bundles()[-1]
self.assertEqual(bundle.get_state(), Bundle.ACTIVE)
# Check command output
bundle_id = bundle.get_bundle_id()
self.assertIn(str(bundle_id), output)
def testServicesInfo(self):
"""
Tests the sl and sd commands
"""
# Get all services references
svc_refs = self.context.get_all_service_references(None, None)
specs = set()
for svc_ref in svc_refs:
specs.update(svc_ref.get_property(constants.OBJECTCLASS))
# List all services
output = self._run_command('sl')
# Check their presence
for svc_ref in svc_refs:
self.assertIn(str(svc_ref.get_property(constants.SERVICE_ID)),
output)
for spec in svc_ref.get_property(constants.OBJECTCLASS):
self.assertIn(spec, output)
# Check the specification filter
for spec in specs:
output = self._run_command('sl {0}', spec)
self.assertIn(spec, output)
for svc_ref in svc_refs:
svc_id = str(svc_ref.get_property(constants.SERVICE_ID))
if spec in svc_ref.get_property(constants.OBJECTCLASS):
self.assertIn(svc_id, output)
# Check invalid filter
output = self._run_command('sl <inexistent>')
self.assertIn("No service provides", output)
# Check details
for svc_ref in svc_refs:
svc_id = str(svc_ref.get_property(constants.SERVICE_ID))
output = self._run_command('sd {0}', svc_id)
self.assertIn(svc_id, output)
self.assertIn(str(svc_ref.get_bundle()), output)
for spec in svc_ref.get_property(constants.OBJECTCLASS):
self.assertIn(spec, output)
# Invalid IDs
for invalid in (-1, '<invalid>', '-10'):
output = self._run_command('sd {0}', invalid)
self.assertIn('Service not found', output)
def testProperties(self):
"""
Tests the properties and property commands
"""
output = self._run_command("properties")
# Extract all properties
props = {}
for line in output.split('\n'):
if line.startswith('|'):
# Value line, name column
values = line.split('|')
name, value = values[1].strip(), values[2].strip()
if name and name != 'Property Name':
props[name] = value
# Check their values
for name, value in self.framework.get_properties().items():
self.assertEqual(str(props[name]), value)
# Check each property
for name, value in props.items():
output = self._run_command("property {0}", name)
self.assertIn(value, output.strip())
# Check invalid property
output = self._run_command('property <<invalid>>')
self.assertEqual("", output.strip())
def testEnvironment(self):
"""
Tests the sysprops and sysprop commands
"""
output = self._run_command("sysprops")
# Extract all variables
props = {}
for line in output.split('\n'):
if line.startswith('|'):
# Value line, name column
idx_separator = line.find('|', 1)
name = line[1:idx_separator].strip()
value = line[idx_separator + 1:-1].strip()
if name and name != 'Environment Variable':
props[name] = value
# Check their values
for name, value in os.environ.items():
if '\n' not in value and '\\n' not in value:
self.assertEqual(str(props[name]), value.strip())
# Check each variable
for name, value in props.items():
output = self._run_command("sysprop {0}", name)
self.assertIn(value, output.strip())
# Check invalid variable
output = self._run_command('sysprop <<invalid>>')
self.assertEqual("", output.strip())
def testThreads(self):
"""
Tests the threads and thread commands
"""
try:
sys._current_frames
except AttributeError:
self.skipTest("sys._current_frames() isn't supported in this "
"interpreter")
output = self._run_command('threads')
# Get all threads
threads = []
for line in output.split('\n'):
if line.startswith('Thread ID:'):
thread_id = int(line.split(':')[1].split('-')[0])
threads.append(thread_id)
# Check each thread
for thread_id in threads:
output = self._run_command('thread {0}', thread_id)
# Check invalid thread
output = self._run_command('thread -1')
self.assertIn("Unknown thread", output)
output = self._run_command('thread aaa')
self.assertIn("Invalid thread", output)
# ------------------------------------------------------------------------------
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "3bc4acd62a3c999c3f76157f1ba712b0",
"timestamp": "",
"source": "github",
"line_count": 890,
"max_line_length": 80,
"avg_line_length": 33.825842696629216,
"alnum_prop": 0.5383158943697061,
"repo_name": "tcalmant/ipopo",
"id": "407f24374463d2376f788b17ac7da051d09d46aa",
"size": "30159",
"binary": false,
"copies": "1",
"ref": "refs/heads/v1",
"path": "tests/shell/test_core.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2183067"
}
],
"symlink_target": ""
} |
import ana
import simuvex
import claripy
import mulpyplexer
import logging
l = logging.getLogger('angr.path_group')
class CallReturn(simuvex.SimProcedure):
NO_RET = True
def run(self):
l.info("A PathGroup.call-created path returned!")
return
class PathGroup(ana.Storable):
'''
Path groups are the future.
Path groups allow you to wrangle multiple paths in a slick way. Paths are
organized into "stashes", which you can step forward, filter, merge, and
move around as you wish. This allows you to, for example, step two
different stashes of paths at different rates, then merge them together.
Note that path groups are immutable by default (all operations will return
new PathGroup objects). See the immutable argument to __init__.
Stashes can be accessed as attributes (i.e., pg.active). A mulpyplexed
stash can be retrieved by prepending the name with "mp_" (i.e.,
pg.mp_active).
'''
ALL = '_ALL'
DROP = '_DROP'
def __init__(self, project, active_paths=None, stashes=None, hierarchy=None, veritesting=None,
veritesting_options=None, immutable=None, resilience=None, save_unconstrained=None,
save_unsat=None, strong_path_mapping=None):
'''
Initializes a new PathGroup.
@param project: an angr.Project instance
@param active_paths: active paths to seed the "active" stash with.
@param stashes: a dictionary to use as the stash store
@param hierarchy: a PathHierarchy object to use to track path reachability
@param immutable: if True (the default), all operations will return a new
PathGroup. Otherwise, all operations will modify the
PathGroup (and return it, for consistency and chaining).
'''
self._project = project
self._hierarchy = PathHierarchy(strong_path_mapping=strong_path_mapping) if hierarchy is None else hierarchy
self._immutable = True if immutable is None else immutable
self._veritesting = False if veritesting is None else veritesting
self._resilience = False if resilience is None else resilience
self._veritesting_options = { } if veritesting_options is None else veritesting_options
# public options
self.save_unconstrained = False if save_unconstrained is None else save_unconstrained
self.save_unsat = False if save_unsat is None else save_unsat
self.stashes = {
'active': [ ] if active_paths is None else active_paths,
'stashed': [ ],
'pruned': [ ],
'unsat': [ ],
'errored': [ ],
'deadended': [ ],
'unconstrained': [ ]
} if stashes is None else stashes
@classmethod
def call(cls, project, target, args=(), start=None, prototype=None, **kwargs): #pylint:disable=unused-argument
""""Calls" a function in the binary, returning a PathGroup with the call set up.
:param project: :class:`angr.Project` instance
:param target: address of function to call
:param args: arguments to call the function with
:param start: Optional: path (or paths) to start the call with
:param prototype: Optional: A SimTypeFunction to typecheck arguments against
:param **kwargs: other kwargs to pass to construct :class:`PathGroup`
:return: a :class:`PathGroup` calling the function
"""
fake_return_addr = project._extern_obj.get_pseudo_addr('FAKE_RETURN_ADDR')
if not project.is_hooked(fake_return_addr):
project.hook(fake_return_addr, CallReturn)
cc = simuvex.DefaultCC[project.arch.name](project.arch)
active_paths = []
if start is None:
active_paths.append(project.factory.path(addr=target))
elif hasattr(start, '__iter__'):
active_paths.extend(start)
else:
active_paths.append(start)
ret_addr = claripy.BVV(fake_return_addr, project.arch.bits)
def fix_arg(st, arg):
if isinstance(arg, str):
# store the string, nul-terminated, at the current heap location
# then return a pointer to that location
ptr = st.libc.heap_location
st.memory.store(ptr, st.BVV(arg))
st.memory.store(ptr + len(arg), st.BVV(0, 8))
st.libc.heap_location += len(arg) + 1
return ptr
elif isinstance(arg, (tuple, list)):
# fix the entries of the list, then store them in a
# NULL-terminated array at the current heap location and return
# a pointer to there
# N.B.: uses host endianness to store entries!!! mostly useful for string arrays
fixed_entries = [fix_arg(st, entry) for entry in arg]
cur_ptr = start_ptr = st.libc.heap_location
for entry in fixed_entries:
st.memory.store(cur_ptr, entry, endness=st.arch.memory_endness)
cur_ptr += entry.length
entry_length = fixed_entries[0].length if len(fixed_entries) > 0 else st.arch.bits
st.memory.store(cur_ptr, st.se.BVV(0, entry_length))
st.libc.heap_location = cur_ptr + entry_length
return start_ptr
elif isinstance(arg, (int, long)):
return st.se.BVV(arg, st.arch.bits)
elif isinstance(arg, StringSpec):
ptr = st.libc.heap_location
arg.dump(st, ptr)
st.libc.heap_location += len(arg)
return ptr
else:
return arg
for p in active_paths:
p.state.ip = target
fixed_args = [fix_arg(p.state, arg) for arg in args]
cc.setup_callsite(p.state, ret_addr, fixed_args)
return cls(project, active_paths=active_paths, **kwargs)
#
# Util functions
#
def copy(self, stashes=None):
stashes = stashes if stashes is not None else self._copy_stashes(immutable=True)
return PathGroup(self._project, stashes=stashes, hierarchy=self._hierarchy, immutable=self._immutable, veritesting=self._veritesting, veritesting_options=self._veritesting_options, resilience=self._resilience, save_unconstrained=self.save_unconstrained, save_unsat=self.save_unsat)
def _copy_stashes(self, immutable=None):
'''
Returns a copy of the stashes (if immutable) or the stashes themselves
(if not immutable). Used to abstract away immutability.
'''
if self._immutable if immutable is None else immutable:
return { k:list(v) for k,v in self.stashes.items() }
else:
return self.stashes
def _copy_paths(self, paths):
'''
Returns a copy of a list of paths (if immutable) or the paths themselves
(if not immutable). Used to abstract away immutability.
'''
if self._immutable:
return [ p.copy() for p in paths ]
else:
return paths
def _successor(self, new_stashes):
'''
Creates a new PathGroup with the provided stashes (if immutable), or sets
the stashes (if not immutable). Used to abstract away immutability.
@returns a PathGroup
'''
if '_DROP' in new_stashes:
del new_stashes['_DROP']
if not self._immutable:
self.stashes = new_stashes
return self
else:
return self.copy(stashes=new_stashes)
@staticmethod
def _condition_to_lambda(condition, default=False):
'''
Translates an integer, set, or list into a lambda that checks a path address
against the given addresses.
@param condition: an integer, set, or list to convert to a lambda
@param default: the default return value of the lambda (in case condition
is None). Default: false.
@returns a lambda that takes a path and returns True or False
'''
if condition is None:
condition = lambda p: default
if isinstance(condition, (int, long)):
condition = { condition }
if isinstance(condition, (tuple, set, list)):
addrs = set(condition)
condition = lambda p: p.addr in addrs
return condition
@staticmethod
def _filter_paths(filter_func, paths):
'''
Filters a sequence of paths according to a filter_func.
@param filter_func: the filter function. Should take a path as input and
return a boolean
@param paths: a sequence of paths
@returns a tuple, with the first element the matching paths and the second
element the non-matching paths.
'''
l.debug("Filtering %d paths", len(paths))
match = [ ]
nomatch = [ ]
for p in paths:
if filter_func is None or filter_func(p):
l.debug("... path %s matched!", p)
match.append(p)
else:
l.debug("... path %s didn't match!", p)
nomatch.append(p)
l.debug("... returning %d matches and %d non-matches", len(match), len(nomatch))
return match, nomatch
def _one_step(self, stash=None, selector_func=None, successor_func=None, check_func=None, **kwargs):
'''
Takes a single step in a given stash.
@param stash: the name of the stash (default: 'active').
@param successor_func: if provided, this function is called with the path as
its only argument. It should return the path's
successors. If this is None, path.successors is used,
instead.
@param selector_func: if provided, should be a lambda that takes a Path and
returns a boolean. If True, the path will be stepped.
Otherwise, it will be kept as-is.
@param check_func: if provided, this function will be called to decide whether
the current path is errored or not. Path.errored will not be
called anymore.
@param returns the successor PathGroup
'''
stash = 'active' if stash is None else stash
new_stashes = self._copy_stashes()
new_active = [ ]
for a in self.stashes[stash]:
try:
if selector_func is not None and not selector_func(a):
new_active.append(a)
continue
has_stashed = False # Flag that whether we have put a into a stash or not
successors = [ ]
veritesting_worked = False
if self._veritesting:
veritesting = self._project.analyses.Veritesting(a, **self._veritesting_options)
if veritesting.result['result'] and veritesting.result['final_path_group']:
pg = veritesting.result['final_path_group']
pg.stash(from_stash='deviated', to_stash='active')
pg.stash(from_stash='successful', to_stash='active')
successors = pg.active
pg.drop(stash='active')
for s in pg.stashes:
if s not in new_stashes:
new_stashes[s] = []
new_stashes[s] += pg.stashes[s]
veritesting_worked = True
if not veritesting_worked:
# `check_func` will not be called for Veritesting, this is
# intended so that we can avoid unnecessarily creating
# Path._run
if (check_func is not None and check_func(a)) or (check_func is None and a.errored):
# This path has error(s)!
if hasattr(a, "error") and isinstance(a.error, PathUnreachableError):
new_stashes['pruned'].append(a)
else:
if self._hierarchy:
self._hierarchy.unreachable(a)
new_stashes['errored'].append(a)
has_stashed = True
else:
if successor_func is not None:
successors = successor_func(a)
else:
successors = a.step(**kwargs)
if self.save_unconstrained:
if 'unconstrained' not in new_stashes:
new_stashes['unconstrained'] = [ ]
new_stashes['unconstrained'] += a.unconstrained_successors
if self.save_unsat:
if 'unsat' not in new_stashes:
new_stashes['unsat'] = [ ]
new_stashes['unsat'] += a.unsat_successors
if self._hierarchy:
self._hierarchy.add_successors(a, successors)
if not has_stashed:
if len(successors) == 0:
new_stashes['deadended'].append(a)
else:
new_active.extend(successors)
except (AngrError, simuvex.SimError, claripy.ClaripyError):
if not self._resilience:
raise
else:
l.warning("PathGroup resilience squashed an exception", exc_info=True)
new_stashes['errored'].append(a)
new_stashes[stash] = new_active
return self._successor(new_stashes)
@staticmethod
def _move(stashes, filter_func, src, dst):
'''
Moves all stashes that match the filter_func from src to dst.
@returns a new stashes dictionary
'''
if dst == PathGroup.ALL:
raise AngrPathGroupError("Can't handle '_ALL' as a target stash.")
if src == PathGroup.DROP:
raise AngrPathGroupError("Can't handle '_DROP' as a source stash.")
if src == PathGroup.ALL:
srces = [ a for a in stashes.keys() if a != dst ]
else:
srces = [ src ]
matches = [ ]
for f in srces:
to_move, to_keep = PathGroup._filter_paths(filter_func, stashes[f])
stashes[f] = to_keep
matches.extend(to_move)
if dst != PathGroup.DROP:
if dst not in stashes:
stashes[dst] = [ ]
stashes[dst].extend(matches)
return stashes
def __repr__(self):
s = "<PathGroup with "
s += ', '.join(("%d %s" % (len(v),k)) for k,v in self.stashes.items() if len(v) != 0)
s += ">"
return s
def __getattr__(self, k):
if k.startswith('mp_'):
return mulpyplexer.MP(self.stashes[k[3:]])
else:
return self.stashes[k]
def __dir__(self):
return sorted(set(self.__dict__.keys() + dir(super(PathGroup, self)) + self.stashes.keys() + [ 'mp_'+k for k in self.stashes.keys() ]))
#
# Interface
#
def apply(self, path_func=None, stash_func=None, stash=None):
'''
Applies a given function to a given stash.
@param path_func: a function to apply to every path. Should take a path and
return a path. The returned path will take the place of the
old path. If the function *doesn't* return a path, the old
path will be used. If the function returns a list of paths,
they will replace the original paths.
@param stash_func: a function to apply to the whole stash. Should take a
list of paths and return a list of paths. The resulting
list will replace the stash.
If both path_func and stash_func are provided, path_func is applied first,
then stash_func is applied on the results.
@returns the resulting PathGroup
'''
stash = 'active' if stash is None else stash
new_stashes = self._copy_stashes()
new_paths = new_stashes[stash]
if path_func is not None:
new_new_paths = [ ]
for p in new_paths:
np = path_func(p)
if isinstance(np, Path):
new_new_paths.append(np)
elif isinstance(np, (list, tuple, set)):
new_new_paths.extend(np)
else:
new_new_paths.append(p)
new_paths = new_new_paths
if stash_func is not None:
new_paths = stash_func(new_paths)
new_stashes[stash] = new_paths
return self._successor(new_stashes)
def split(self, stash_splitter=None, stash_ranker=None, path_ranker=None, limit=None, from_stash=None, to_stash=None):
'''
Split a stash of paths. The stash from_stash will be split into two
stashes depending on the other options passed in. If to_stash is provided,
the second stash will be written there.
@param stash_splitter: a function that should take a list of paths and return
a tuple of two lists (the two resulting stashes).
@param stash_ranker: a function that should take a list of paths and return
a sorted list of paths. This list will then be split
according to "limit".
@param path_ranker: an alternative to stash_splitter. Paths will be sorted
with outputs of this function used as a key. The first
"limit" of them will be kept, the rest split off.
@param limit: for use with path_ranker. The number of paths to keep. Default: 8
@param from_stash: the stash to split (default: 'active')
@param to_stash: the stash to write to (default: 'stashed')
stash_splitter overrides stash_ranker, which in turn overrides path_ranker.
If no functions are provided, the paths are simply split according to the limit.
The sort done with path_ranker is ascending.
@returns the resulting PathGroup
'''
limit = 8 if limit is None else limit
from_stash = 'active' if from_stash is None else from_stash
to_stash = 'stashed' if to_stash is None else to_stash
new_stashes = self._copy_stashes()
old_paths = new_stashes[from_stash]
if stash_splitter is not None:
keep, split = stash_splitter(old_paths)
elif stash_ranker is not None:
ranked_paths = stash_ranker(old_paths)
keep, split = ranked_paths[:limit], ranked_paths[limit:]
elif path_ranker is not None:
ranked_paths = sorted(old_paths, key=path_ranker)
keep, split = ranked_paths[:limit], ranked_paths[limit:]
else:
keep, split = old_paths[:limit], old_paths[limit:]
new_stashes[from_stash] = keep
new_stashes[to_stash] = split if to_stash in new_stashes else new_stashes[to_stash] + split
return self._successor(new_stashes)
def step(self, n=None, selector_func=None, step_func=None, stash=None,
successor_func=None, until=None, check_func=None, **kwargs):
'''
Step a stash of paths forward.
@param n: the number of times to step (default: 1 if "until" is not provided)
@param selector_func: if provided, should be a lambda that takes a Path and
returns a boolean. If True, the path will be stepped.
Otherwise, it will be kept as-is.
@param step_func: if provided, should be a lambda that takes a PathGroup and
returns a PathGroup. Will be called with the PathGroup at
at every step.
@param stash: the name of the stash to step (default: 'active')
@param successor_func: if provided, this function will be called with a path
to get its successors. Otherwise, path.successors will
be used.
@param until: if provided, should be a lambda that takes a PathGroup and returns
True or False. Stepping will terminate when it is True.
@param check_func: if provided, this function will be called to decide whether
the current path is errored or not. Path.errored will not be
called anymore.
Additionally, you can pass in any of the following keyword args
for project.factory.sim_run:
@param jumpkind: the jumpkind of the previous exit
@param addr an address: to execute at instead of the state's ip
@param stmt_whitelist: a list of stmt indexes to which to confine execution
@param last_stmt: a statement index at which to stop execution
@param thumb: whether the block should be lifted in ARM's THUMB mode
@param backup_state: a state to read bytes from instead of using project memory
@param opt_level: the VEX optimization level to use
@param insn_bytes: a string of bytes to use for the block instead of the project
@param max_size: the maximum size of the block, in bytes
@param num_inst: the maximum number of instructions
@param traceflags: traceflags to be passed to VEX. Default: 0
@returns the resulting PathGroup
'''
stash = 'active' if stash is None else stash
n = n if n is not None else 1 if until is None else 100000
pg = self
for i in range(n):
l.debug("Round %d: stepping %s", i, pg)
pg = pg._one_step(stash=stash, selector_func=selector_func, successor_func=successor_func, check_func=check_func, **kwargs)
if step_func is not None:
pg = step_func(pg)
if len(pg.stashes[stash]) == 0:
l.debug("Out of paths in stash %s", stash)
break
if until is not None and until(pg):
l.debug("Until function returned true")
break
return pg
def prune(self, filter_func=None, from_stash=None, to_stash=None):
'''
Prune unsatisfiable paths from a stash.
@param filter_func: only prune paths that match this filter
@param from_stash: prune paths from this stash (default: 'active')
@param to_stash: put pruned paths in this stash (default: 'pruned')
@returns the resulting PathGroup
'''
to_stash = 'pruned' if to_stash is None else to_stash
from_stash = 'active' if from_stash is None else from_stash
to_prune, new_active = self._filter_paths(filter_func, self.stashes[from_stash])
new_stashes = self._copy_stashes()
for p in to_prune:
if p.errored or not p.state.satisfiable():
if to_stash not in new_stashes:
new_stashes[to_stash] = [ ]
new_stashes[to_stash].append(p)
if self._hierarchy:
self._hierarchy.unreachable(p)
else:
new_active.append(p)
new_stashes[from_stash] = new_active
return self._successor(new_stashes)
def move(self, from_stash, to_stash, filter_func=None):
'''
Move paths from one stash to another.
@param from_stash: take matching paths from this stash.
@param to_stash: put matching paths into this stash.
@param filter_func: stash paths that match this filter. Should be a function
that takes a path and returns True or False. Default: stash
all paths
@returns the resulting PathGroup
'''
new_stashes = self._copy_stashes()
self._move(new_stashes, filter_func, from_stash, to_stash)
return self._successor(new_stashes)
def stash(self, filter_func=None, from_stash=None, to_stash=None):
'''
Stash some paths. This is an alias for move(), with defaults for the stashes.
@param filter_func: stash paths that match this filter. Should be a function
that takes a path and returns True or False. Default: stash
all paths
@param from_stash: take matching paths from this stash (default: 'active')
@param to_stash: put matching paths into this stash: (default: 'stashed')
@returns the resulting PathGroup
'''
to_stash = 'stashed' if to_stash is None else to_stash
from_stash = 'active' if from_stash is None else from_stash
return self.move(from_stash, to_stash, filter_func=filter_func)
def drop(self, filter_func=None, stash=None):
'''
Drops paths from a stash. This is an alias for move(), with defaults for the
stashes.
@param filter_func: drop paths that match this filter. Should be a function that
takes a path and returns True or False. Default:
drop all paths
@param stash: drop matching paths from this stash (default: 'active')
@returns the resulting PathGroup
'''
stash = 'active' if stash is None else stash
return self.move(stash, self.DROP, filter_func=filter_func)
def unstash(self, filter_func=None, to_stash=None, from_stash=None):
'''
Unstash some paths. This is an alias for move(), with defaults for the stashes.
@param filter_func: unstash paths that match this filter. Should be a function
that takes a path and returns True or False. Default:
unstash all paths
@param from_stash: take matching paths from this stash (default: 'stashed')
@param to_stash: put matching paths into this stash: (default: 'active')
@returns the resulting PathGroup
'''
to_stash = 'active' if to_stash is None else to_stash
from_stash = 'stashed' if from_stash is None else from_stash
return self.move(from_stash, to_stash, filter_func=filter_func)
def merge(self, merge_func=None, stash=None):
'''
Merge the states in a given stash.
@param stash: the stash (default: 'active')
@param merge_func: if provided, instead of using path.merge, call this
function with the paths as the argument. Should return
the merged path.
@returns the result PathGroup
'''
stash = 'active' if stash is None else stash
to_merge = self.stashes[stash]
not_to_merge = [ ]
merge_groups = [ ]
while len(to_merge) > 0:
g, to_merge = self._filter_paths(lambda p: p.addr == to_merge[0].addr, to_merge)
if len(g) <= 1:
not_to_merge.extend(g)
else:
merge_groups.append(g)
for g in merge_groups:
try:
m = g[0].merge(*g[1:]) if merge_func is None else merge_func(*g)
not_to_merge.append(m)
except simuvex.SimMergeError:
l.warning("SimMergeError while merging %d paths", len(g), exc_info=True)
not_to_merge.extend(g)
new_stashes = self._copy_stashes()
new_stashes[stash] = not_to_merge
return self._successor(new_stashes)
#
# Various canned functionality
#
def stash_not_addr(self, addr, from_stash=None, to_stash=None):
'''
Stash all paths not at address addr from stash from_stash to stash to_stash.
'''
return self.stash(lambda p: p.addr != addr, from_stash=from_stash, to_stash=to_stash)
def stash_addr(self, addr, from_stash=None, to_stash=None):
'''
Stash all paths at address addr from stash from_stash to stash to_stash.
'''
return self.stash(lambda p: p.addr == addr, from_stash=from_stash, to_stash=to_stash)
def stash_addr_past(self, addr, from_stash=None, to_stash=None):
'''
Stash all paths containg address addr in their backtrace from stash
from_stash to stash to_stash.
'''
return self.stash(lambda p: addr in p.addr_backtrace, from_stash=from_stash, to_stash=to_stash)
def stash_not_addr_past(self, addr, from_stash=None, to_stash=None):
'''
Stash all paths not containg address addr in their backtrace from stash
from_stash to stash to_stash.
'''
return self.stash(lambda p: addr not in p.addr_backtrace, from_stash=from_stash, to_stash=to_stash)
def stash_all(self, from_stash=None, to_stash=None):
'''
Stash all paths from stash from_stash to stash to_stash.
'''
return self.stash(lambda p: True, from_stash=from_stash, to_stash=to_stash)
def unstash_addr(self, addr, from_stash=None, to_stash=None):
'''
Untash all paths at address addr.
'''
return self.unstash(lambda p: p.addr == addr, from_stash=from_stash, to_stash=to_stash)
def unstash_addr_past(self, addr, from_stash=None, to_stash=None):
'''
Untash all paths containing address addr in their backtrace.
'''
return self.unstash(lambda p: addr in p.addr_backtrace, from_stash=from_stash, to_stash=to_stash)
def unstash_not_addr(self, addr, from_stash=None, to_stash=None):
'''
Untash all paths not at address addr.
'''
return self.unstash(lambda p: p.addr != addr, from_stash=from_stash, to_stash=to_stash)
def unstash_not_addr_past(self, addr, from_stash=None, to_stash=None):
'''
Untash all paths not containing address addr in their backtrace.
'''
return self.unstash(lambda p: addr not in p.addr_backtrace, from_stash=from_stash, to_stash=to_stash)
def unstash_all(self, from_stash=None, to_stash=None):
'''
Untash all paths.
'''
return self.unstash(lambda p: True, from_stash=from_stash, to_stash=to_stash)
#
# High-level functionality
#
def explore(self, stash=None, n=None, find=None, avoid=None, num_find=None, found_stash=None, avoid_stash=None):
'''
A replacement for the Explorer surveyor. Tick stash "stash" forward (up to n
times or until num_find paths are found), looking for condition "find",
avoiding condition "avoid". Stashes found paths into "found_stash' and
avoided paths into "avoid_stash".
'''
find = self._condition_to_lambda(find)
avoid = self._condition_to_lambda(avoid)
found_stash = 'found' if found_stash is None else found_stash
avoid_stash = 'avoid' if avoid_stash is None else avoid_stash
num_find = 1 if num_find is None else num_find
cur_found = len(self.stashes[found_stash]) if found_stash in self.stashes else 0
explore_step_func = lambda pg: pg.stash(find, from_stash=stash, to_stash=found_stash) \
.stash(avoid, from_stash=stash, to_stash=avoid_stash) \
.prune(from_stash=found_stash)
until_func = lambda pg: len(pg.stashes[found_stash]) >= cur_found + num_find
return self.step(n=n, step_func=explore_step_func, until=until_func, stash=stash)
from .path_hierarchy import PathHierarchy
from .errors import PathUnreachableError, AngrError, AngrPathGroupError
from .path import Path
from .tablespecs import StringSpec
| {
"content_hash": "2b818b6faa7f51d47598a13c0ff7397e",
"timestamp": "",
"source": "github",
"line_count": 743,
"max_line_length": 289,
"avg_line_length": 43.17092866756393,
"alnum_prop": 0.5796233944382092,
"repo_name": "cureHsu/angr",
"id": "f695fb38f021534c9bb6ff8f14697e5734e23462",
"size": "32076",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "angr/path_group.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "824"
},
{
"name": "Makefile",
"bytes": "291"
},
{
"name": "Python",
"bytes": "767671"
}
],
"symlink_target": ""
} |
import cgi, cgitb
form = cgi.FieldStorage()
if form.getvalue('maths'):
math_flag = "ON"
else:
math_flag = "OFF"
if form.getvalue('physics'):
physics_flag = "ON"
else:
physics_flag = "OFF"
if form.getvalue('computer'):
computer_flag = "ON"
else:
computer_flag = "OFF"
print("Content-type:text/html\r\n\r\n")
print("<html>")
print("<head>")
print("<title>Checkbox for CGI Program</title>")
print("</head>")
print("<body>")
print("<h2> CheckBox Maths is : %s</h2>" % math_flag)
print("<h2> CheckBox Physics is : %s</h2>" % physics_flag)
print("<h2> CheckBox Computer is : %s</h2>" % computer_flag)
print("</body>")
print("</html>") | {
"content_hash": "656a90ec92e77b6a5afe9ab54d1d0321",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 61,
"avg_line_length": 26.92,
"alnum_prop": 0.6136701337295691,
"repo_name": "5610110083/Safety-in-residential-project",
"id": "326ee1b1930823f338a26c2efd7f05a8598a65af",
"size": "693",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cgi-bin/exam/Example14_4.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "130630"
},
{
"name": "Arduino",
"bytes": "45331"
},
{
"name": "Assembly",
"bytes": "337688"
},
{
"name": "Batchfile",
"bytes": "67280"
},
{
"name": "C",
"bytes": "31386611"
},
{
"name": "C#",
"bytes": "10401"
},
{
"name": "C++",
"bytes": "780076"
},
{
"name": "CMake",
"bytes": "107889"
},
{
"name": "CSS",
"bytes": "461723"
},
{
"name": "DIGITAL Command Language",
"bytes": "322017"
},
{
"name": "DTrace",
"bytes": "12419"
},
{
"name": "Emacs Lisp",
"bytes": "5297"
},
{
"name": "Forth",
"bytes": "212900"
},
{
"name": "Frege",
"bytes": "2320153"
},
{
"name": "GDB",
"bytes": "10566"
},
{
"name": "HTML",
"bytes": "1493042"
},
{
"name": "Inno Setup",
"bytes": "3960"
},
{
"name": "JavaScript",
"bytes": "1301174"
},
{
"name": "Lex",
"bytes": "9180"
},
{
"name": "Lua",
"bytes": "12941"
},
{
"name": "M4",
"bytes": "196634"
},
{
"name": "Makefile",
"bytes": "3105186"
},
{
"name": "PHP",
"bytes": "18667"
},
{
"name": "Pascal",
"bytes": "25226"
},
{
"name": "Perl",
"bytes": "3425327"
},
{
"name": "Prolog",
"bytes": "29177"
},
{
"name": "Protocol Buffer",
"bytes": "2764"
},
{
"name": "Python",
"bytes": "327971"
},
{
"name": "Roff",
"bytes": "779793"
},
{
"name": "Scheme",
"bytes": "4249"
},
{
"name": "Shell",
"bytes": "1957563"
},
{
"name": "TeX",
"bytes": "2582"
},
{
"name": "Visual Basic",
"bytes": "1074"
},
{
"name": "XS",
"bytes": "4319"
},
{
"name": "XSLT",
"bytes": "12579"
},
{
"name": "Yacc",
"bytes": "7701"
},
{
"name": "eC",
"bytes": "5158"
}
],
"symlink_target": ""
} |
"""Tests for google.protobuf.descriptor_database."""
__author__ = 'matthewtoia@google.com (Matt Toia)'
import unittest
import warnings
from google.protobuf import unittest_pb2
from google.protobuf import descriptor_pb2
from google.protobuf.internal import factory_test2_pb2
from google.protobuf.internal import no_package_pb2
from google.protobuf.internal import testing_refleaks
from google.protobuf import descriptor_database
@testing_refleaks.TestCase
class DescriptorDatabaseTest(unittest.TestCase):
def testAdd(self):
db = descriptor_database.DescriptorDatabase()
file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
factory_test2_pb2.DESCRIPTOR.serialized_pb)
file_desc_proto2 = descriptor_pb2.FileDescriptorProto.FromString(
no_package_pb2.DESCRIPTOR.serialized_pb)
db.Add(file_desc_proto)
db.Add(file_desc_proto2)
self.assertEqual(file_desc_proto, db.FindFileByName(
'google/protobuf/internal/factory_test2.proto'))
# Can find message type.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message'))
# Can find nested message type.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message.NestedFactory2Message'))
# Can find enum type.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Enum'))
# Can find nested enum type.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum'))
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.MessageWithNestedEnumOnly.NestedEnum'))
# Can find field.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message.list_field'))
# Can find enum value.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Enum.FACTORY_2_VALUE_0'))
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.FACTORY_2_VALUE_0'))
self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol(
'.NO_PACKAGE_VALUE_0'))
# Can find top level extension.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.another_field'))
# Can find nested extension inside a message.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message.one_more_field'))
# Can find service.
file_desc_proto2 = descriptor_pb2.FileDescriptorProto.FromString(
unittest_pb2.DESCRIPTOR.serialized_pb)
db.Add(file_desc_proto2)
self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol(
'protobuf_unittest.TestService'))
# Non-existent field under a valid top level symbol can also be
# found. The behavior is the same with protobuf C++.
self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol(
'protobuf_unittest.TestAllTypes.none_field'))
with self.assertRaisesRegex(KeyError, r'\'protobuf_unittest\.NoneMessage\''):
db.FindFileContainingSymbol('protobuf_unittest.NoneMessage')
def testConflictRegister(self):
db = descriptor_database.DescriptorDatabase()
unittest_fd = descriptor_pb2.FileDescriptorProto.FromString(
unittest_pb2.DESCRIPTOR.serialized_pb)
db.Add(unittest_fd)
conflict_fd = descriptor_pb2.FileDescriptorProto.FromString(
unittest_pb2.DESCRIPTOR.serialized_pb)
conflict_fd.name = 'other_file2'
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter('always')
db.Add(conflict_fd)
self.assertTrue(len(w))
self.assertIs(w[0].category, RuntimeWarning)
self.assertIn('Conflict register for file "other_file2": ',
str(w[0].message))
self.assertIn('already defined in file '
'"google/protobuf/unittest.proto"',
str(w[0].message))
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "eb3ce0c6b03d2b732777e61cc6621898",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 81,
"avg_line_length": 44.45360824742268,
"alnum_prop": 0.7314471243042672,
"repo_name": "chromium/chromium",
"id": "3c086b92471ad54be55fcfbf86dfd169dca2d3ac",
"size": "5943",
"binary": false,
"copies": "17",
"ref": "refs/heads/main",
"path": "third_party/protobuf/python/google/protobuf/internal/descriptor_database_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import pygtk
pygtk.require('2.0')
import gtk
import gtksourceview2
import gobject
import mimetypes
import pango
import subprocess
import fcntl
import os
import sys
import re
import ast
import optparse
import source
def patch_key_event(event, keyname):
keyval = int(gtk.gdk.keyval_from_name(keyname))
keymap = gtk.gdk.keymap_get_default()
keycode, group, level = keymap.get_entries_for_keyval(keyval)[0]
event.keyval = keyval
event.hardware_keycode = keycode
event.group = group
event.state = gtk.gdk.KEY_PRESS_MASK
def set_non_blocking(file):
fd = file.fileno()
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
class SourceView(gtksourceview2.View):
__gsignals__ = {
'key-press-event': 'override',
'key-release-event': 'override',
'file-changed': (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING,)),
}
def __init__(self, *args, **kwargs):
gtksourceview2.View.__init__(self, *args, **kwargs)
#self.set_overwrite(True)
#self.set_show_line_marks(True)
self.set_editable(False)
self.set_wrap_mode(gtk.WRAP_WORD)
self.buffers = {}
self.breakpoints = {}
self.pos = None
self.set_position(None)
c = gtk.gdk.Color(0xaaaa, 0xaaaa, 0xffff)
self.set_mark_category_background('position', c)
c = gtk.gdk.Color(0xffff, 0xaaaa, 0xaaaa)
self.set_mark_category_background('breakpoint', c)
self.langman = gtksourceview2.LanguageManager()
def patch_key_event(self, event):
key = gtk.gdk.keyval_name(event.keyval)
#print key
if key == 'j':
patch_key_event(event, 'Down')
elif key == 'k':
patch_key_event(event, 'Up')
elif key == 'h':
patch_key_event(event, 'Left')
elif key == 'l':
patch_key_event(event, 'Right')
elif key == 'dollar':
patch_key_event(event, 'End')
elif key == 'asciicircum':
patch_key_event(event, 'Home')
def do_key_press_event(self, event):
key = gtk.gdk.keyval_name(event.keyval)
if key == 'w':
self.cursor_word_forward()
elif key == 'b':
self.cursor_word_backward()
else:
self.patch_key_event(event)
return gtksourceview2.View.do_key_press_event(self, event)
return True
def do_key_release_event(self, event):
self.patch_key_event(event)
return gtksourceview2.View.do_key_release_event(self, event)
def get_buffer(self, path=None):
if path is None:
return gtksourceview2.View.get_buffer(self)
try:
buf = self.buffers[path]
except KeyError:
buf = source.Buffer()
lang = self.langman.guess_language(path)
buf.set_language(lang)
buf.set_text(open(path, 'r').read())
self.buffers[path] = buf
buf.filepath = path
return buf
def hide_position(self):
if self.pos:
buf = self.get_buffer()
buf.delete_mark(self.pos)
self.pos = None
def set_position(self, pos):
self.hide_position()
if pos is None:
self.set_show_line_numbers(False)
buf = source.Buffer()
buf.filepath = None
self.set_buffer(buf)
self.emit('file-changed', None)
else:
self.set_show_line_numbers(True)
path, line = pos
buf = self.get_buffer(path)
it = buf.get_iter_at_line(line)
self.pos = buf.create_source_mark('pos', 'position', it)
if buf != self.get_buffer():
buf.place_cursor(it)
self.set_buffer(buf)
self.emit('file-changed', path)
self.scroll_mark_onscreen(self.pos)
def add_breakpoint(self, id, pos):
path, line = pos
buf = self.get_buffer(path)
it = buf.get_iter_at_line(line)
mark = buf.create_source_mark(id, 'breakpoint', it)
self.breakpoints[id] = mark
def del_breakpoint(self, id):
mark = self.breakpoints[id]
buf = mark.get_buffer()
buf.delete_mark(mark)
def goto(self, pos):
self.set_show_line_numbers(True)
path, line = pos
buf = self.get_buffer(path)
if buf != self.get_buffer():
self.set_buffer(buf)
self.emit('file-changed', path)
it = buf.get_iter_at_line(line)
buf.place_cursor(it)
mark = buf.get_mark('insert')
self.scroll_mark_onscreen(mark)
def parse_value(str):
str = re.sub(r'([\w-]+)=', r'"\1": ', str)
return ast.literal_eval(str)
class GdbResponse:
def __init__(self, output):
self.event = output[0]
if self.event == '(':
self.data = output
elif self.event == '~':
self.data = parse_value(output[1:])
elif self.event == '@':
self.data = parse_value(output[1:])
elif self.event == '&':
self.data = parse_value(output[1:])
elif self.event == '^':
self.__parse_result(output)
elif self.event == '*':
self.__parse_result(output)
elif self.event == '=':
self.__parse_result(output)
else:
self.data = output[1:]
def __parse_result(self, output):
output = output.rstrip()
sep = output.find(',')
if sep != -1:
self.event = output[:sep]
output = output[sep+1:]
self.data = parse_value('{' + output + '}')
else:
self.event = output
self.data = None
class GdbCommand:
def __init__(self, *args):
self.cmd = ' '.join(args)
self.handle_ok = None
self.handle_error = None
class GdbDispatcher:
def __init__(self):
self.commands = []
self.pending = GdbCommand('dummy')
self.pending.prompted = False
self.pending.returned = True
self.handle_event = None
self.status_changed = None
self.gdb = subprocess.Popen(
['gdb', '--interpreter=mi2'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
set_non_blocking(self.gdb.stdout)
gobject.io_add_watch(self.gdb.stdout, gobject.IO_IN, self.__read_gdb)
def queue(self, command):
self.commands.append(command)
if not self.pending:
self.__change_status(is_working=True)
self.__exec_next()
def is_working(self):
return self.pending is not None
def __change_status(self, is_working):
if self.status_changed:
self.status_changed(is_working)
def __read_gdb(self, gdbout, condition):
output = gdbout.readline()
if output:
sys.stdout.write(output)
self.__parse_response(output)
return True
def __parse_response(self, output):
response = GdbResponse(output)
if response.event == '(':
if self.pending:
self.pending.prompted = True
elif response.event[0] == '^':
self.pending.returned = True
if response.event == '^error':
if self.pending.handle_error:
self.pending.handle_error(response.data['msg'])
elif self.pending.handle_ok:
self.pending.handle_ok(response.event, response.data)
elif self.handle_event:
self.handle_event(response.event, response.data)
if self.pending and self.pending.prompted and self.pending.returned:
self.pending = None
self.__exec_next()
def __exec_next(self):
if len(self.commands) == 0:
self.__change_status(is_working=False)
return
self.pending = self.commands[0]
self.pending.prompted = False
self.pending.returned = False
self.commands = self.commands[1:]
self.gdb.stdin.write(self.pending.cmd)
self.gdb.stdin.write('\n')
print '>>>', self.pending.cmd
def parse_breakpoints(data):
print data
return {}
class MyDebugger:
NOT_LOADED = 0
TERMINATED = 1
STOPPED = 2
RUNNING = 3
STATUS_TEXT = {
NOT_LOADED: 'not loaded',
TERMINATED: 'not running',
STOPPED: 'stopped',
RUNNING: 'running',
}
STATUS_ICON = {
NOT_LOADED: gtk.STOCK_INFO,
TERMINATED: gtk.STOCK_MEDIA_STOP,
STOPPED: gtk.STOCK_MEDIA_PAUSE,
RUNNING: gtk.STOCK_MEDIA_PLAY,
}
def __init__(self):
self.watch_for_cmd = False
self.status = MyDebugger.NOT_LOADED
self.gdb = GdbDispatcher()
self.gdb.handle_event = self.__gdb_event
self.gdb.status_changed = self.__update_gdb_status
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.connect('destroy', lambda w,d=None: gtk.main_quit())
self.window.set_default_size(640, 480)
box = gtk.VBox(False, 0)
scroll = gtk.ScrolledWindow()
scroll.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.view = SourceView()
self.view.modify_font(pango.FontDescription("monospace"))
self.view.connect('file-changed', self.__file_changed)
self.view.connect('key_press_event', self.key_pressed)
scroll.add(self.view)
box.pack_start(scroll, True, True, 2)
self.statusbar = gtk.Statusbar()
self.statusbar.set_spacing(2)
self.gdb_icon = gtk.Image()
self.gdb_label = gtk.Label()
self.gdb_label.set_width_chars(10)
self.gdb_label.set_single_line_mode(True)
self.gdb_label.set_alignment(0, 0.5)
self.prog_icon = gtk.Image()
self.prog_label = gtk.Label()
self.prog_label.set_width_chars(20)
self.prog_label.set_single_line_mode(True)
self.prog_label.set_alignment(0, 0.5)
self.statusbar.pack_start(self.gdb_icon, False, False, 0)
self.statusbar.pack_start(self.gdb_label, False, False, 0)
self.statusbar.pack_start(self.prog_icon, False, False, 0)
self.statusbar.pack_start(self.prog_label, False, False, 0)
self.__update_gdb_status(self.gdb.is_working())
self.__update_prog_status()
gobject.timeout_add(500, self.__timeout500)
box.pack_start(self.statusbar, False, False, 0)
self.cmdline = gtk.Entry()
self.cmdline.connect('activate', self.cmd_enter)
self.cmdline.connect('changed', self.cmd_changed)
box.pack_start(self.cmdline, False, False, 0)
self.window.add(box)
self.window.show_all()
self.cmdline.hide()
self.view.grab_focus()
def key_pressed(self, widget, event, data=None):
key = gtk.gdk.keyval_name(event.keyval)
if key == 'colon':
self.cmdline.show()
self.cmdline.grab_focus()
self.cmdline.set_text(':')
self.watch_for_cmd = True
elif key == 'r':
self.run()
elif key == 'n':
self.cmd('-exec-next')
elif key == 's':
self.cmd('-exec-step')
elif key == 'f':
self.cmd('-exec-finish')
elif key == 'space':
buf = self.view.get_buffer()
path = buf.filepath
mark = buf.get_insert()
it = buf.get_iter_at_mark(mark)
line = it.get_line()
marks = buf.get_source_marks_at_line(line, 'breakpoint')
if marks:
for mark in marks:
id = mark.get_name()
self.delete_breakpoint(id)
else:
where = '%s:%d' % (path, line+1)
self.place_breakpoint(where)
elif key == 'c':
self.cmd('-exec-continue')
elif key == 'p' or key == 'P':
if self.view.get_buffer().get_has_selection():
b, e = self.view.get_buffer().get_selection_bounds()
exp = b.get_slice(e)
else:
if key == 'p':
exp = self.view.get_buffer().get_symbol_under_cursor()
else:
exp = self.view.get_buffer().get_call_under_cursor()
if exp:
self.cmd('-data-evaluate-expression', exp, ok=self.__print)
else:
self.__msg('no expression')
elif key == 'P':
pass
else:
return False
return True
def cmd_enter(self, widget, data=None):
self.cmdline_close()
def cmd_changed(self, widget, data=None):
if not self.watch_for_cmd:
return
cmd = self.cmdline.get_text()
print '"' + cmd + '"'
if not cmd:
self.cmdline_close()
def cmdline_close(self):
self.cmdline.hide()
self.view.grab_focus()
self.watch_for_cmd = False
def main(self):
gtk.main()
def set_executable(self, path):
self.cmd('-file-exec-and-symbols', path, ok=self.__loaded)
def place_breakpoint(self, where):
self.cmd('-break-insert', where, ok=self.__breakpoint_set)
def delete_breakpoint(self, id):
def clean_view(event, data):
self.view.del_breakpoint(id)
self.cmd('-break-delete', id, ok=clean_view)
def cmd(self, *args, **kwargs):
cmd = GdbCommand(*args)
cmd.handle_ok = kwargs.get('ok', None)
cmd.handle_error = self.__gdb_error
self.gdb.queue(cmd)
def __gdb_error(self, msg):
dialog = gtk.MessageDialog(self.window, type=gtk.MESSAGE_ERROR,
buttons=gtk.BUTTONS_CLOSE, message_format=msg)
dialog.set_title('GDB error')
dialog.run()
dialog.destroy()
def __loaded(self, event, data):
self.status = MyDebugger.TERMINATED
self.__update_prog_status()
self.first_breakpoint = True
self.view.set_position(None)
self.place_breakpoint('main')
def __breakpoint_set(self, event, data):
id = data['bkpt']['number']
try:
path = data['bkpt']['fullname']
except KeyError:
msg = 'breakpoint %s set at %s'
self.__msg(msg % (id, data['bkpt']['addr']))
else:
line = int(data['bkpt']['line'])-1
self.view.add_breakpoint(id, (path, line))
if self.first_breakpoint:
self.view.goto((path, line))
self.first_breakpoint = False
msg = 'breakpoint %s set at %s:%s'
self.__msg(msg % (id, data['bkpt']['file'], data['bkpt']['line']))
def __gdb_event(self, event, data):
if event == '*stopped':
if data['reason'] == 'exited':
self.__msg('program exited with code %s' % data['exit-code'])
self.status = MyDebugger.TERMINATED
elif data['reason'] == 'exited-normally':
self.__msg('program exited normally')
self.status = MyDebugger.TERMINATED
elif data['reason'].startswith('exited-normally'):
self.__msg('program exited with reason %s' % data['reason'])
self.status = MyDebugger.TERMINATED
else:
self.status = MyDebugger.STOPPED
self.__update_prog_status()
try:
path = data['frame']['fullname']
except KeyError:
self.view.set_position(None)
else:
line = int(data['frame']['line'])-1
self.view.set_position((path, line))
elif event == '*running':
self.status = MyDebugger.RUNNING
self.__update_prog_status()
self.view.hide_position()
def __file_changed(self, widget, path):
path = 'no source' if not path else path
title = 'mydbg: %s' % path
self.window.set_title(title)
def __update_gdb_status(self, is_working):
work_icon = gtk.STOCK_MEDIA_RECORD
gdb_status = 'working' if is_working else 'ready'
self.gdb_label.set_text(gdb_status)
if is_working:
self.gdb_icon.set_from_stock(work_icon, gtk.ICON_SIZE_BUTTON)
else:
self.gdb_icon.clear()
def __update_prog_status(self):
prog_status = MyDebugger.STATUS_TEXT[self.status]
prog_icon = MyDebugger.STATUS_ICON[self.status]
self.prog_label.set_text('program %s' % prog_status)
self.prog_icon.set_from_stock(prog_icon, gtk.ICON_SIZE_BUTTON)
def __timeout500(self):
work_icon = gtk.STOCK_MEDIA_RECORD
if self.gdb.is_working():
icon, size = self.gdb_icon.get_stock()
if icon == work_icon:
self.gdb_icon.clear()
else:
self.gdb_icon.set_from_stock(work_icon, gtk.ICON_SIZE_BUTTON)
return True
def __msg(self, msg):
ctx = self.statusbar.get_context_id('message')
self.statusbar.pop(ctx)
self.statusbar.push(ctx, msg)
def run(self):
self.cmd('-exec-run', ok=self.__started)
def __started(self, event, data):
self.__msg('program started')
def __print(self, event, data):
self.__msg(data['value'])
if __name__ == '__main__':
usage = 'usage: %prog [options] [executable]'
parser = optparse.OptionParser(usage=usage)
(options, args) = parser.parse_args()
if len(args) > 1:
parser.error('too many arguments')
sys.exit(1)
dbg = MyDebugger()
if len(args) > 0:
dbg.set_executable(args[0])
dbg.main()
| {
"content_hash": "23864dbc9b78acc6ae051e0b5febf229",
"timestamp": "",
"source": "github",
"line_count": 532,
"max_line_length": 71,
"avg_line_length": 27.774436090225564,
"alnum_prop": 0.6650649702219816,
"repo_name": "nailgun/mydbg",
"id": "11d0945b8e7e821f3c494baabb49414c620db6bd",
"size": "14776",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mydbg.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17436"
}
],
"symlink_target": ""
} |
"""Tests for tf.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import operator
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
from tensorflow.python.util import compat
class VariablesTestCase(test.TestCase):
def testInitialization(self):
with self.test_session():
var0 = variables.Variable(0.0)
self.assertEqual("Variable:0", var0.name)
self.assertEqual([], var0.get_shape())
self.assertEqual([], var0.get_shape())
self.assertEqual([], var0.shape)
var1 = variables.Variable(1.1)
self.assertEqual("Variable_1:0", var1.name)
self.assertEqual([], var1.get_shape())
self.assertEqual([], var1.get_shape())
self.assertEqual([], var1.shape)
with self.assertRaisesOpError("Attempting to use uninitialized value"):
var0.eval()
with self.assertRaisesOpError("Attempting to use uninitialized value"):
var1.eval()
variables.global_variables_initializer().run()
self.assertAllClose(0.0, var0.eval())
self.assertAllClose(1.1, var1.eval())
def testInitializationOrder(self):
with self.test_session():
rnd = variables.Variable(random_ops.random_uniform([3, 6]), name="rnd")
self.assertEqual("rnd:0", rnd.name)
self.assertEqual([3, 6], rnd.get_shape())
self.assertEqual([3, 6], rnd.get_shape())
self.assertEqual([3, 6], rnd.shape)
dep = variables.Variable(rnd.initialized_value(), name="dep")
self.assertEqual("dep:0", dep.name)
self.assertEqual([3, 6], dep.get_shape())
self.assertEqual([3, 6], dep.get_shape())
self.assertEqual([3, 6], dep.shape)
# Currently have to set the shape manually for Add.
added_val = rnd.initialized_value() + dep.initialized_value() + 2.0
added_val.set_shape(rnd.get_shape())
depdep = variables.Variable(added_val, name="depdep")
self.assertEqual("depdep:0", depdep.name)
self.assertEqual([3, 6], depdep.get_shape())
self.assertEqual([3, 6], depdep.get_shape())
self.assertEqual([3, 6], depdep.shape)
variables.global_variables_initializer().run()
self.assertAllClose(rnd.eval(), dep.eval())
self.assertAllClose(rnd.eval() + dep.eval() + 2.0, depdep.eval())
def testIterable(self):
with self.assertRaisesRegexp(TypeError, "not iterable"):
for _ in variables.Variable(0.0):
pass
with self.assertRaisesRegexp(TypeError, "not iterable"):
for _ in variables.Variable([0.0, 1.0]):
pass
def testAssignments(self):
with self.test_session():
var = variables.Variable(0.0)
plus_one = var.assign_add(1.0)
minus_one = var.assign_sub(2.0)
four = var.assign(4.0)
variables.global_variables_initializer().run()
self.assertAllClose(0.0, var.eval())
self.assertAllClose(1.0, plus_one.eval())
self.assertAllClose(1.0, var.eval())
self.assertAllClose(-1.0, minus_one.eval())
self.assertAllClose(-1.0, var.eval())
self.assertAllClose(4.0, four.eval())
self.assertAllClose(4.0, var.eval())
def testResourceAssignments(self):
with self.test_session(use_gpu=True):
var = resource_variable_ops.ResourceVariable(0.0)
plus_one = var.assign_add(1.0)
minus_one = var.assign_sub(2.0)
four = var.assign(4.0)
variables.global_variables_initializer().run()
self.assertAllClose(0.0, var.eval())
plus_one.eval()
self.assertAllClose(1.0, var.eval())
minus_one.eval()
self.assertAllClose(-1.0, var.eval())
four.eval()
self.assertAllClose(4.0, var.eval())
def testZeroSizeStringAssign(self):
with self.test_session() as sess:
array = variables.Variable(
initial_value=array_ops.zeros((0,), dtype=dtypes.string),
name="foo",
trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES])
sess.run(variables.local_variables_initializer())
old_value = array.value()
copy_op = array.assign(old_value)
self.assertEqual([], list(sess.run(copy_op)))
def _countUpToTest(self, dtype):
with self.test_session():
zero = constant_op.constant(0, dtype=dtype)
var = variables.Variable(zero)
count_up_to = var.count_up_to(3)
variables.global_variables_initializer().run()
self.assertEqual(0, var.eval())
self.assertEqual(0, count_up_to.eval())
self.assertEqual(1, var.eval())
self.assertEqual(1, count_up_to.eval())
self.assertEqual(2, var.eval())
self.assertEqual(2, count_up_to.eval())
self.assertEqual(3, var.eval())
with self.assertRaisesOpError("Reached limit of 3"):
count_up_to.eval()
self.assertEqual(3, var.eval())
with self.assertRaisesOpError("Reached limit of 3"):
count_up_to.eval()
self.assertEqual(3, var.eval())
def testCountUpToInt32(self):
self._countUpToTest(dtypes.int32)
def testCountUpToInt64(self):
self._countUpToTest(dtypes.int64)
def testControlDepsNone(self):
with self.test_session():
c = constant_op.constant(1.0)
with ops.control_dependencies([c]):
# d get the control dep.
d = constant_op.constant(2.0)
# variables do not.
var_x = variables.Variable(2.0)
self.assertEqual([c.op], d.op.control_inputs)
self.assertEqual([], var_x.initializer.control_inputs)
self.assertEqual([], var_x.value().op.control_inputs)
self.assertEqual([], var_x._ref().op.control_inputs) # pylint: disable=protected-access
def testControlFlow(self):
with self.test_session() as sess:
v0 = variables.Variable(0, name="v0")
var_dict = {}
# Call get_variable in each of the cond clauses.
def var_in_then_clause():
v1 = variables.Variable(1, name="v1")
var_dict["v1"] = v1
return v1 + v0
def var_in_else_clause():
v2 = variables.Variable(2, name="v2")
var_dict["v2"] = v2
return v2 + v0
add = control_flow_ops.cond(
math_ops.less(v0, 10), var_in_then_clause, var_in_else_clause)
v1 = var_dict["v1"]
v2 = var_dict["v2"]
# We should be able to initialize and run v1 and v2 without initializing
# v0, even if the variable was created with a control dep on v0.
sess.run(v1.initializer)
self.assertEqual([1], sess.run(v1))
sess.run(v2.initializer)
self.assertEqual([2], sess.run(v2))
# v0 should still be uninitialized.
with self.assertRaisesRegexp(errors_impl.OpError, "uninitialized"):
sess.run(v0)
# We should not be able to run 'add' yet.
with self.assertRaisesRegexp(errors_impl.OpError, "uninitialized"):
sess.run(add)
# If we initialize v0 we should be able to run 'add'.
sess.run(v0.initializer)
sess.run(add)
def testControlFlowInitialization(self):
"""Expects an error if an initializer is in a control-flow scope."""
def cond(i, _):
return i < 10
def body(i, _):
zero = array_ops.zeros([], dtype=dtypes.int32)
v = variables.Variable(initial_value=zero)
return (i + 1, v.read_value())
with self.assertRaisesRegexp(ValueError, "inside a control-flow"):
control_flow_ops.while_loop(cond, body, [0, 0])
def testUseVariableAsTensor(self):
with self.test_session():
var_x = variables.Variable(2.0)
var_y = variables.Variable(3.0)
variables.global_variables_initializer().run()
self.assertAllClose(2.0, var_x.eval())
self.assertAllClose(3.0, var_y.eval())
self.assertAllClose(5.0, math_ops.add(var_x, var_y).eval())
def testZeroSizeVarSameAsConst(self):
with self.test_session():
zero_size_var = variables.Variable(array_ops.zeros([0, 2]))
zero_size_const = array_ops.ones([2, 0])
variable_mul = math_ops.matmul(zero_size_const, zero_size_var)
const_mul = math_ops.matmul(
zero_size_const, zero_size_const, transpose_b=True)
variables.global_variables_initializer().run()
variable_output = variable_mul.eval()
self.assertAllClose(const_mul.eval(), variable_output)
self.assertAllClose([[0., 0.], [0., 0.]], variable_output)
def testCachingDevice(self):
with self.test_session():
var = variables.Variable(2.0)
self.assertEqual(var.device, var.value().device)
self.assertEqual(var.device, var.initialized_value().device)
var_cached = variables.Variable(2.0, caching_device="/job:foo")
self.assertFalse(var_cached.device.startswith("/job:foo"))
self.assertTrue(var_cached.value().device.startswith("/job:foo"))
def testCollections(self):
with self.test_session():
var_x = variables.Variable(2.0)
var_y = variables.Variable(2.0, trainable=False)
var_z = variables.Variable(2.0, trainable=True)
var_t = variables.Variable(
2.0,
trainable=True,
collections=[
ops.GraphKeys.TRAINABLE_VARIABLES, ops.GraphKeys.GLOBAL_VARIABLES
])
self.assertEqual([var_x, var_y, var_z, var_t],
variables.global_variables())
self.assertEqual([var_x, var_z, var_t], variables.trainable_variables())
def testCollectionsWithScope(self):
with self.test_session():
with ops.name_scope("scope_1"):
var_x = variables.Variable(2.0)
with ops.name_scope("scope_2"):
var_y = variables.Variable(2.0)
self.assertEqual([var_x, var_y], variables.global_variables())
self.assertEqual([var_x], variables.global_variables("scope_1"))
self.assertEqual([var_y], variables.global_variables("scope_2"))
self.assertEqual([var_x, var_y], variables.trainable_variables())
self.assertEqual([var_x], variables.trainable_variables("scope_1"))
self.assertEqual([var_y], variables.trainable_variables("scope_2"))
def testOperators(self):
with self.test_session():
var_f = variables.Variable([2.0])
add = var_f + 0.0
radd = 1.0 + var_f
sub = var_f - 1.0
rsub = 1.0 - var_f
mul = var_f * 10.0
rmul = 10.0 * var_f
div = var_f / 10.0
rdiv = 10.0 / var_f
lt = var_f < 3.0
rlt = 3.0 < var_f
le = var_f <= 2.0
rle = 2.0 <= var_f
gt = var_f > 3.0
rgt = 3.0 > var_f
ge = var_f >= 2.0
rge = 2.0 >= var_f
neg = -var_f
abs_v = abs(var_f)
var_i = variables.Variable([20])
mod = var_i % 7
rmod = 103 % var_i
var_b = variables.Variable([True, False])
and_v = operator.and_(var_b, [True, True])
or_v = operator.or_(var_b, [False, True])
xor_v = operator.xor(var_b, [False, False])
invert_v = ~var_b
rnd = np.random.rand(4, 4).astype("f")
var_t = variables.Variable(rnd)
slice_v = var_t[2, 0:0]
var_m = variables.Variable([[2.0, 3.0]])
matmul = var_m.__matmul__([[10.0], [20.0]])
rmatmul = var_m.__rmatmul__([[10.0], [20.0]])
variables.global_variables_initializer().run()
self.assertAllClose([2.0], add.eval())
self.assertAllClose([3.0], radd.eval())
self.assertAllClose([1.0], sub.eval())
self.assertAllClose([-1.0], rsub.eval())
self.assertAllClose([20.0], mul.eval())
self.assertAllClose([20.0], rmul.eval())
self.assertAllClose([0.2], div.eval())
self.assertAllClose([5.0], rdiv.eval())
self.assertAllClose([-2.0], neg.eval())
self.assertAllClose([2.0], abs_v.eval())
self.assertAllClose([True], lt.eval())
self.assertAllClose([False], rlt.eval())
self.assertAllClose([True], le.eval())
self.assertAllClose([True], rle.eval())
self.assertAllClose([False], gt.eval())
self.assertAllClose([True], rgt.eval())
self.assertAllClose([True], ge.eval())
self.assertAllClose([True], rge.eval())
self.assertAllClose([6], mod.eval())
self.assertAllClose([3], rmod.eval())
self.assertAllClose([True, False], and_v.eval())
self.assertAllClose([True, True], or_v.eval())
self.assertAllClose([True, False], xor_v.eval())
self.assertAllClose([False, True], invert_v.eval())
self.assertAllClose(rnd[2, 0:0], slice_v.eval())
self.assertAllClose([[80.0]], matmul.eval())
self.assertAllClose([[20.0, 30.0], [40.0, 60.0]], rmatmul.eval())
def testSession(self):
with self.test_session() as sess:
var = variables.Variable([1, 12])
variables.global_variables_initializer().run()
self.assertAllClose([1, 12], sess.run(var))
def testDevicePlacement(self):
with self.test_session() as sess:
with ops.device("/cpu:0"):
var = variables.Variable([1, 12])
init_value = var.initialized_value()
init_op = variables.global_variables_initializer()
self.assertEqual(var.op.device, init_value.device)
self.assertEqual(var.op.device, init_op.device)
sess.run(init_op)
def testColocation(self):
with ops.device("/job:ps"):
var = variables.Variable(0, name="v")
with ops.device("/job:worker/task:7"):
assign_op = var.assign(1)
self.assertDeviceEqual("/job:ps", assign_op.device)
self.assertEqual([b"loc:@v"], assign_op.op.colocation_groups())
def testInitializerFunction(self):
value = [[-42], [133.7]]
shape = [2, 1]
with self.test_session():
initializer = lambda: constant_op.constant(value)
v1 = variables.Variable(initializer, dtype=dtypes.float32)
self.assertEqual(shape, v1.get_shape())
self.assertEqual(shape, v1.shape)
self.assertAllClose(value, v1.initial_value.eval())
with self.assertRaises(errors_impl.FailedPreconditionError):
v1.eval()
v2 = variables.Variable(
math_ops.negative(v1.initialized_value()), dtype=dtypes.float32)
self.assertEqual(v1.get_shape(), v2.get_shape())
self.assertEqual(v1.shape, v2.shape)
self.assertAllClose(np.negative(value), v2.initial_value.eval())
with self.assertRaises(errors_impl.FailedPreconditionError):
v2.eval()
variables.global_variables_initializer().run()
self.assertAllClose(np.negative(value), v2.eval())
def testConstraintArg(self):
constraint = lambda x: x
v = variables.Variable(
lambda: constant_op.constant(1.),
constraint=constraint)
self.assertEqual(v.constraint, constraint)
constraint = 0
with self.assertRaises(ValueError):
v = variables.Variable(
lambda: constant_op.constant(1.),
constraint=constraint)
def testNoRefDataRace(self):
with self.test_session():
a = variables.Variable([1, 2, 3], dtype=dtypes.float32)
b = variables.Variable(a.initialized_value() + 2)
c = variables.Variable(b.initialized_value() + 2)
variables.global_variables_initializer().run()
self.assertAllEqual(a.eval(), [1, 2, 3])
self.assertAllEqual(b.eval(), [3, 4, 5])
self.assertAllEqual(c.eval(), [5, 6, 7])
def testInitializerFunctionDevicePlacement(self):
with self.test_session():
initializer = lambda: constant_op.constant(42.0)
with ops.device("/cpu:100"):
v1 = variables.Variable(initializer, dtype=dtypes.float32, name="v1")
expected_device = "/device:CPU:100"
expected_group_v1 = [b"loc:@v1"]
self.assertEqual(expected_device, v1.op.device)
self.assertEqual(expected_group_v1, v1.op.colocation_groups())
for i in v1.initializer.inputs:
self.assertEqual(expected_group_v1, i.op.colocation_groups())
v2 = variables.Variable(initializer, dtype=dtypes.float32, name="v2")
expected_group_v2 = [b"loc:@v2"]
self.assertEqual(expected_group_v2, v2.op.colocation_groups())
for i in v2.initializer.inputs:
self.assertEqual(expected_group_v2, i.op.colocation_groups())
def testLoad(self):
with self.test_session():
var = variables.Variable(np.zeros((5, 5), np.float32))
variables.global_variables_initializer().run()
var.load(np.ones((5, 5), np.float32))
self.assertAllClose(np.ones((5, 5), np.float32), var.eval())
def testRepr(self):
var = variables.Variable(np.zeros((5, 5), np.float32), name='noop')
self.assertEqual(
"<tf.Variable 'noop:0' shape=(5, 5) dtype=float32_ref>",
repr(var))
class IsInitializedTest(test.TestCase):
def testNoVars(self):
with ops.Graph().as_default(), self.test_session() as sess:
uninited = variables.report_uninitialized_variables()
self.assertEqual(0, sess.run(uninited).size)
def testAssertVariablesInitialized(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable([1, 2], name="v")
w = variables.Variable([3, 4], name="w")
_ = v, w
uninited = variables.report_uninitialized_variables()
self.assertAllEqual(np.array([b"v", b"w"]), sess.run(uninited))
variables.global_variables_initializer().run()
self.assertEqual(0, sess.run(uninited).size)
def testVariableList(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable([1, 2], name="v")
w = variables.Variable([3, 4], name="w")
uninited = variables.report_uninitialized_variables()
self.assertAllEqual(np.array([b"v", b"w"]), sess.run(uninited))
sess.run(w.initializer)
self.assertAllEqual(np.array([b"v"]), sess.run(uninited))
v.initializer.run()
self.assertEqual(0, sess.run(uninited).size)
def testZeroSizeVarInitialized(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable(array_ops.zeros([0, 2]), name="v")
uninited = variables.report_uninitialized_variables()
v.initializer.run() # not strictly necessary
self.assertEqual(0, sess.run(uninited).size)
def testTrainingWithZeroSizeVar(self):
with ops.Graph().as_default(), self.test_session() as sess:
a = variables.Variable(array_ops.zeros([0, 2]))
b = variables.Variable(array_ops.ones([2, 2]))
objective = math_ops.reduce_sum(b + math_ops.matmul(
a, a, transpose_a=True))
variables.global_variables_initializer().run()
do_opt = gradient_descent.GradientDescentOptimizer(0.1).minimize(
objective)
sess.run([do_opt])
self.assertAllClose([[0.9, 0.9], [0.9, 0.9]], b.eval())
class ObsoleteIsInitializedTest(test.TestCase):
def testNoVars(self):
with ops.Graph().as_default():
self.assertEqual(None, variables.assert_variables_initialized())
def testVariables(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable([1, 2])
w = variables.Variable([3, 4])
_ = v, w
inited = variables.assert_variables_initialized()
with self.assertRaisesOpError("Attempting to use uninitialized value"):
sess.run(inited)
variables.global_variables_initializer().run()
sess.run(inited)
def testVariableList(self):
with ops.Graph().as_default(), self.test_session() as sess:
v = variables.Variable([1, 2])
w = variables.Variable([3, 4])
inited = variables.assert_variables_initialized([v])
with self.assertRaisesOpError("Attempting to use uninitialized value"):
inited.op.run()
sess.run(w.initializer)
with self.assertRaisesOpError("Attempting to use uninitialized value"):
inited.op.run()
v.initializer.run()
inited.op.run()
class PartitionedVariableTest(test.TestCase):
def testPartitionedVariable(self):
with ops.Graph().as_default():
v0 = variables.Variable([0])
v1 = variables.Variable([1])
v0._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1]))
v1._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [1], [1]))
partitions = [2]
# Pass variable_list as [v1, v0] to ensure they are properly
# re-sorted to [v0, v1] based on their slice info offsets.
partitioned_variable = variables.PartitionedVariable(
name="two_vars",
shape=[2],
dtype=v0.dtype,
variable_list=[v1, v0],
partitions=partitions)
concatenated = ops.convert_to_tensor(partitioned_variable)
num_partitions = len(partitioned_variable)
iterated_partitions = list(partitioned_variable)
self.assertEqual(2, num_partitions)
self.assertEqual([v0, v1], iterated_partitions)
self.assertEqual([2], concatenated.get_shape())
self.assertEqual([2], concatenated.shape)
def testPartitionedVariableFailures(self):
with ops.Graph().as_default():
with self.assertRaisesRegexp(ValueError, "empty"):
variables.PartitionedVariable(
name="fail",
shape=2,
dtype=dtypes.int32,
variable_list=[],
partitions=[])
with self.assertRaisesRegexp(ValueError, "must have a save_slice_info"):
v0 = variables.Variable([0])
partitions = [1]
variables.PartitionedVariable(
name="two_vars",
shape=[1],
dtype=v0.dtype,
variable_list=[v0],
partitions=partitions)
with self.assertRaisesRegexp(ValueError, "full shapes must match"):
v0 = variables.Variable([0])
v1 = variables.Variable([1])
v0._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1]))
v1._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [1], [1]))
partitions = [2]
variables.PartitionedVariable(
name="two_vars",
shape=[3],
dtype=v0.dtype,
variable_list=[v1, v0],
partitions=partitions)
with self.assertRaisesRegexp(ValueError, "must be positive"):
v0 = variables.Variable([0])
v0._set_save_slice_info(
variables.Variable.SaveSliceInfo(v0.name, [2], [0], [1]))
partitions = [0]
variables.PartitionedVariable(
name="two_vars",
shape=[2],
dtype=v0.dtype,
variable_list=[v0],
partitions=partitions)
class VariableContainerTest(test.TestCase):
def testContainer(self):
with ops.Graph().as_default():
v0 = variables.Variable([0])
with ops.container("l1"):
v1 = variables.Variable([1])
with ops.container("l2"):
v2 = variables.Variable([2])
special_v = gen_state_ops._variable(
shape=[1],
dtype=dtypes.float32,
name="VariableInL3",
container="l3",
shared_name="")
v3 = variables.Variable([3])
v4 = variables.Variable([4])
self.assertEqual(compat.as_bytes(""), v0.op.get_attr("container"))
self.assertEqual(compat.as_bytes("l1"), v1.op.get_attr("container"))
self.assertEqual(compat.as_bytes("l2"), v2.op.get_attr("container"))
self.assertEqual(compat.as_bytes("l3"), special_v.op.get_attr("container"))
self.assertEqual(compat.as_bytes("l1"), v3.op.get_attr("container"))
self.assertEqual(compat.as_bytes(""), v4.op.get_attr("container"))
if __name__ == "__main__":
test.main()
| {
"content_hash": "194becf91fd0d21647d2c6fb7245e1a8",
"timestamp": "",
"source": "github",
"line_count": 655,
"max_line_length": 94,
"avg_line_length": 36.5587786259542,
"alnum_prop": 0.6347197861855842,
"repo_name": "mavenlin/tensorflow",
"id": "12421c1dcc351939ce8f85b6597705bbb7f6a055",
"size": "24635",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tensorflow/python/kernel_tests/variables_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7666"
},
{
"name": "C",
"bytes": "193501"
},
{
"name": "C++",
"bytes": "28519915"
},
{
"name": "CMake",
"bytes": "636307"
},
{
"name": "Go",
"bytes": "946452"
},
{
"name": "Java",
"bytes": "403360"
},
{
"name": "Jupyter Notebook",
"bytes": "1833674"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "38060"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63210"
},
{
"name": "Perl",
"bytes": "6715"
},
{
"name": "Protocol Buffer",
"bytes": "261095"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "25109562"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "371205"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from my_app.models import Article
# Register your models here.
class ArticleAdmin(admin.ModelAdmin):
list_display = ('title', 'pub_date', 'update_time',)
admin.site.register(Article, ArticleAdmin)
| {
"content_hash": "c056a1a89a1e7dba53645711d668be75",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 56,
"avg_line_length": 23.8,
"alnum_prop": 0.7521008403361344,
"repo_name": "wmh-demos/django-first-demo",
"id": "ee5d39ab5ef64d95d7bc0bc4c63bd274a2a4b25a",
"size": "238",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "my_app/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "146"
},
{
"name": "Python",
"bytes": "6040"
}
],
"symlink_target": ""
} |
"""
This module defines constant variables used by the GUI. These variable should
never be modified by user.
Variables defining the style or theme of the GUI can be found in:
- style.py
- painterstyle.py
"""
import pygame.event as pygame_event
from pygame.font import get_fonts
from pygame.locals import USEREVENT, QUIT
try:
import numpy
HAS_NUMPY = True
except ImportError:
HAS_NUMPY = False
try:
import PIL
HAS_PIL = True
except ImportError:
HAS_PIL = False
CAN_SHADOWS = HAS_NUMPY and HAS_PIL
# fonts
AVAILABLE_FONTS = sorted(get_fonts())
# pygame events
EVENT_QUIT = pygame_event.Event(QUIT)
THORPY_EVENT = USEREVENT
# events types : these are the names of thorpy events.
# A ThorPy event has an attribute name, and the numbers below are these attributes, not pygame events!!!
# However, due to the working principle of Reactions, we follow the numbers after pygame.USEREVENT
EVENT_TIME = 1
EVENT_PRESS = 2 # posted when an element enter state pressed
# posted when sth has been inserted into an Inserter
EVENT_INSERT = 3
EVENT_SELECT = 4 # posted when sth has been selected into a DDL
# posted when mousewheel has been used on an element that handles it
EVENT_WHEEL = 5
EVENT_SLIDE = 6 # posted when a slider's dragger has been slided
EVENT_DONE = 7 # posted when a "Done" button has been clicked
EVENT_UNPRESS = 8
EVENT_HOVER = 9
EVENT_CANCEL = 10
EVENT_CHANGE_STATE = 11
# => post EVENT_PRESS. reacts to parameters.BUTTON_PRESS_EVENT
REAC_PRESSED = 0
# => post EVENT_UNPRESS. reacts to parameters.BUTTON_UNPRESS_EVENT
REAC_UNPRESS = 1
REAC_HOVER = 2 # => post EVENT_HOVER. reacts to pygame.MOUSEMOTION
REAC_TIME = 3 # => post nothing. Usually reacts to EVENT_TIME event
REAC_KEYPRESS = 4 # => post nothing. reacts to parameters.KEY_PRESS_EVENT
REAC_MOTION = 5 # => post nothing. reacts to pygame.MOUSEMOTION
REAC_SLIDER_PRESS = 7
REAC_SLIDER_UNPRESS = 8
REAC_SELECT = 9
REAC_PRESS_DDL = 10
REAC_WHEELUP = 11
REAC_WHEELDOWN = 12
REAC_HELP = 13
REAC_CLICKQUIT = 14
REAC_MOUSE_REPEAT = 15
REAC_PRESSED2 = 16
REAC_SLIDER_PRESS2 = 17
REAC_SLIDER_UNPRESS2 = 18
REAC_RIGHT_CLICK = 19
REAC_CHANGE_STATE = 20
REAC_USER = 100
# _states
STATE_NORMAL = 0 # should always stay 0 for compatibility reasons
STATE_PRESSED = 1
# colors
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
PINK = (255, 0, 255)
LIGHTBLUE = (150, 150, 255)
# colors (gray levels)
WHITE = (255, 255, 255)
ULTRABRIGHT = (240, 240, 240)
BRIGHT = (220, 220, 220)
BRAY = (200, 200, 200)
GRAY = (150, 150, 150)
MID = (127, 127, 127)
DARK = (50, 50, 50)
BLACK = (0, 0, 0)
# alphacolors
TRANSPARENT = (0, 0, 0, 0)
# cursors
CURSOR_NORMAL = 0
CURSOR_TEXT = 1
CURSOR_BROKEN = 2
CURSOR_BALL = 3
SYNTAX_BEG = "#SB"
SYNTAX_END = "#SE"
SYNTAX_FIRST = False
SYNTAX_LAST = False
DEFAULT_RANK = 0 | {
"content_hash": "41c9bd3cc6967d4f774ffdbea38757ef",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 104,
"avg_line_length": 26.174311926605505,
"alnum_prop": 0.706975113915177,
"repo_name": "YannThorimbert/Thorpy-1.4",
"id": "720cdba1c9b4b38aa99de955b0a4d7eb7df7f5b1",
"size": "2853",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "thorpy/miscgui/constants.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "388158"
}
],
"symlink_target": ""
} |
"""
Preprocess
----------
"""
import numpy as np
import sklearn.preprocessing as skl_preprocessing
import bottleneck as bn
import Orange.data
from Orange.data import Table
from . import impute, discretize
from ..misc.enum import Enum
__all__ = ["Continuize", "Discretize", "Impute", "SklImpute",
"Normalize", "Randomize", "RemoveNaNClasses",
"ProjectPCA", "ProjectCUR"]
class Preprocess:
"""
A generic preprocessor class. All preprocessors need to inherit this
class. Preprocessors can be instantiated without the data set to return
data preprocessor, or can be given a data set to return the preprocessed
data.
Parameters
----------
data : a data table (default=None)
An optional data set to be preprocessed.
"""
def __new__(cls, data=None, *args, **kwargs):
self = super().__new__(cls)
if isinstance(data, Orange.data.Storage):
self.__init__(*args, **kwargs)
return self(data)
else:
return self
def __call__(self, data):
raise NotImplementedError("Subclasses need to implement __call__")
class Continuize(Preprocess):
MultinomialTreatment = Enum(
"Indicators", "FirstAsBase", "FrequentAsBase",
"Remove", "RemoveMultinomial", "ReportError", "AsOrdinal",
"AsNormalizedOrdinal", "Leave"
)
(Indicators, FirstAsBase, FrequentAsBase, Remove, RemoveMultinomial,
ReportError, AsOrdinal, AsNormalizedOrdinal, Leave) = MultinomialTreatment
def __init__(self, zero_based=True, multinomial_treatment=Indicators):
self.zero_based = zero_based
self.multinomial_treatment = multinomial_treatment
def __call__(self, data):
from . import continuize
continuizer = continuize.DomainContinuizer(
zero_based=self.zero_based,
multinomial_treatment=self.multinomial_treatment)
domain = continuizer(data)
return data.from_table(domain, data)
class Discretize(Preprocess):
"""
Construct a discretizer, a preprocessor for discretization of
continuous features.
Parameters
----------
method : discretization method (default: Orange.preprocess.discretize.Discretization)
remove_const : bool (default=True)
Determines whether the features with constant values are removed
during discretization.
"""
def __init__(self, method=None, remove_const=True,
discretize_classes=False, discretize_metas=False):
self.method = method
self.remove_const = remove_const
self.discretize_classes = discretize_classes
self.discretize_metas = discretize_metas
def __call__(self, data):
"""
Compute and apply discretization of the given data. Returns a new
data table.
Parameters
----------
data : Orange.data.Table
A data table to be discretized.
"""
def transform(var):
if var.is_continuous:
new_var = method(data, var)
if new_var is not None and \
(len(new_var.values) >= 2 or not self.remove_const):
return new_var
else:
return None
else:
return var
def discretized(vars, do_discretize):
if do_discretize:
vars = (transform(var) for var in vars)
vars = [var for var in vars if var is not None]
return vars
method = self.method or discretize.EqualFreq()
domain = Orange.data.Domain(
discretized(data.domain.attributes, True),
discretized(data.domain.class_vars, self.discretize_classes),
discretized(data.domain.metas, self.discretize_metas))
return data.from_table(domain, data)
class Impute(Preprocess):
"""
Construct a imputer, a preprocessor for imputation of missing values in
the data table.
Parameters
----------
method : imputation method (default: Orange.preprocess.impute.Average())
"""
def __init__(self, method=Orange.preprocess.impute.Average()):
self.method = method
def __call__(self, data):
"""
Apply an imputation method to the given data set. Returns a new
data table with missing values replaced by their imputations.
Parameters
----------
data : Orange.data.Table
An input data table.
"""
method = self.method or impute.Average()
newattrs = [method(data, var) for var in data.domain.attributes]
domain = Orange.data.Domain(
newattrs, data.domain.class_vars, data.domain.metas)
return data.from_table(domain, data)
class SklImpute(Preprocess):
__wraps__ = skl_preprocessing.Imputer
def __init__(self, strategy='mean'):
self.strategy = strategy
def __call__(self, data):
from Orange.data.sql.table import SqlTable
if isinstance(data, SqlTable):
return Impute()(data)
self.imputer = skl_preprocessing.Imputer(strategy=self.strategy)
X = self.imputer.fit_transform(data.X)
# Create new variables with appropriate `compute_value`, but
# drop the ones which do not have valid `imputer.statistics_`
# (i.e. all NaN columns). `sklearn.preprocessing.Imputer` already
# drops them from the transformed X.
features = [impute.Average()(data, var, value)
for var, value in zip(data.domain.attributes,
self.imputer.statistics_)
if not np.isnan(value)]
assert X.shape[1] == len(features)
domain = Orange.data.Domain(features, data.domain.class_vars,
data.domain.metas)
new_data = Orange.data.Table(domain, X, data.Y, data.metas, W=data.W)
new_data.attributes = getattr(data, 'attributes', {})
return new_data
class RemoveConstant(Preprocess):
"""
Construct a preprocessor that removes features with constant values
from the data set.
"""
def __call__(self, data):
"""
Remove columns with constant values from the data set and return
the resulting data table.
Parameters
----------
data : an input data set
"""
oks = bn.nanmin(data.X, axis=0) != \
bn.nanmax(data.X, axis=0)
atts = [data.domain.attributes[i] for i, ok in enumerate(oks) if ok]
domain = Orange.data.Domain(atts, data.domain.class_vars,
data.domain.metas)
return Orange.data.Table(domain, data)
class RemoveNaNClasses(Preprocess):
"""
Construct preprocessor that removes examples with missing class
from the data set.
"""
def __call__(self, data):
"""
Remove rows that contain NaN in any class variable from the data set
and return the resulting data table.
Parameters
----------
data : an input data set
Returns
-------
data : data set without rows with missing classes
"""
if len(data.Y.shape) > 1:
nan_cls = np.any(np.isnan(data.Y), axis=1)
else:
nan_cls = np.isnan(data.Y)
return Table(data.domain, data, np.where(nan_cls == False))
class Normalize(Preprocess):
"""
Construct a preprocessor for normalization of features.
Given a data table, preprocessor returns a new table in
which the continuous attributes are normalized.
Parameters
----------
zero_based : bool (default=True)
Determines the value used as the “low” value of the variable.
It determines the interval for normalized continuous variables
(either [-1, 1] or [0, 1]).
norm_type : NormTypes (default: Normalize.NormalizeBySD)
Normalization type. If Normalize.NormalizeBySD, the values are
replaced with standardized values by subtracting the average
value and dividing by the standard deviation.
Attribute zero_based has no effect on this standardization.
If Normalize.NormalizeBySpan, the values are replaced with
normalized values by subtracting min value of the data and
dividing by span (max - min).
transform_class : bool (default=False)
If True the class is normalized as well.
Examples
--------
>>> from Orange.data import Table
>>> from Orange.preprocess import Normalize
>>> data = Table("iris")
>>> normalizer = Normalize(norm_type=Normalize.NormalizeBySpan)
>>> normalized_data = normalizer(data)
"""
NormTypes = Enum("NormalizeBySpan", "NormalizeBySD")
(NormalizeBySpan, NormalizeBySD) = NormTypes
def __init__(self,
zero_based=True,
norm_type=NormalizeBySD,
transform_class=False):
self.zero_based = zero_based
self.norm_type = norm_type
self.transform_class = transform_class
def __call__(self, data):
"""
Compute and apply normalization of the given data. Returns a new
data table.
Parameters
----------
data : Orange.data.Table
A data table to be normalized.
Returns
-------
data : Orange.data.Table
Normalized data table.
"""
from . import normalize
normalizer = normalize.Normalizer(
zero_based=self.zero_based,
norm_type=self.norm_type,
transform_class=self.transform_class)
return normalizer(data)
class Randomize(Preprocess):
"""
Construct a preprocessor for randomization of classes,
attributes or metas.
Given a data table, preprocessor returns a new table in
which the data is shuffled.
Parameters
----------
rand_type : RandTypes (default: Randomize.RandomizeClasses)
Randomization type. If Randomize.RandomizeClasses, classes
are shuffled.
If Randomize.RandomizeAttributes, attributes are shuffled.
If Randomize.RandomizeMetas, metas are shuffled.
Examples
--------
>>> from Orange.data import Table
>>> from Orange.preprocess import Randomize
>>> data = Table("iris")
>>> randomizer = Randomize(Randomize.RandomizeClasses)
>>> randomized_data = randomizer(data)
"""
RandTypes = Enum("RandomizeClasses", "RandomizeAttributes",
"RandomizeMetas")
(RandomizeClasses, RandomizeAttributes, RandomizeMetas) = RandTypes
def __init__(self, rand_type=RandomizeClasses):
self.rand_type = rand_type
def __call__(self, data):
"""
Apply randomization of the given data. Returns a new
data table.
Parameters
----------
data : Orange.data.Table
A data table to be randomized.
Returns
-------
data : Orange.data.Table
Randomized data table.
"""
new_data = Table(data)
new_data.ensure_copy()
if self.rand_type == Randomize.RandomizeClasses:
self.randomize(new_data.Y)
elif self.rand_type == Randomize.RandomizeAttributes:
self.randomize(new_data.X)
elif self.rand_type == Randomize.RandomizeMetas:
self.randomize(new_data.metas)
else:
raise TypeError('Unsupported type')
return new_data
def randomize(self, table):
if len(table.shape) > 1:
for i in range(table.shape[1]):
np.random.shuffle(table[:,i])
else:
np.random.shuffle(table)
class ProjectPCA(Preprocess):
def __init__(self, n_components=None):
self.n_components = n_components
def __call__(self, data):
pca = Orange.projection.PCA(n_components=self.n_components)(data)
return pca(data)
class ProjectCUR(Preprocess):
def __init__(self, rank=3, max_error=1):
self.rank = rank
self.max_error = max_error
def __call__(self, data):
rank = min(self.rank, min(data.X.shape)-1)
cur = Orange.projection.CUR(
rank=rank, max_error=self.max_error,
compute_U=False,
)(data)
return cur(data)
class PreprocessorList:
"""
Store a list of preprocessors and on call apply them to the data set.
Parameters
----------
preprocessors : list
A list of preprocessors.
"""
def __init__(self, preprocessors):
self.preprocessors = list(preprocessors)
def __call__(self, data):
"""
Applies a list of preprocessors to the data set.
Parameters
----------
data : an input data table
"""
for pp in self.preprocessors:
data = pp(data)
return data
| {
"content_hash": "ea9d3cf46bfaf5b58a44b04df7464789",
"timestamp": "",
"source": "github",
"line_count": 424,
"max_line_length": 89,
"avg_line_length": 30.568396226415093,
"alnum_prop": 0.6014967980865674,
"repo_name": "qPCR4vir/orange3",
"id": "5bfe56dbed7c74157540fdaf261e4a5558a824b8",
"size": "12965",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Orange/preprocess/preprocess.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "20412"
},
{
"name": "C++",
"bytes": "1992"
},
{
"name": "GLSL",
"bytes": "75"
},
{
"name": "HTML",
"bytes": "3503"
},
{
"name": "JavaScript",
"bytes": "12007"
},
{
"name": "Jupyter Notebook",
"bytes": "6662"
},
{
"name": "NSIS",
"bytes": "20281"
},
{
"name": "Python",
"bytes": "4205054"
},
{
"name": "Shell",
"bytes": "48335"
}
],
"symlink_target": ""
} |
from api_v3.models import Profile
from .support import DjangoModelFactory, Faker
class ProfileFactory(DjangoModelFactory):
class Meta:
model = Profile
email = Faker('email')
first_name = Faker('first_name')
last_name = Faker('last_name')
bio = Faker('job')
last_login = Faker('past_date', start_date='-30d')
| {
"content_hash": "c6c5683b1a27bf1d8f7e3df6434ab998",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 54,
"avg_line_length": 24.571428571428573,
"alnum_prop": 0.6686046511627907,
"repo_name": "occrp/id-backend",
"id": "aed2cb6458da57ddbf7e685b9944f98da51e3ee5",
"size": "344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api_v3/factories/profile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "486"
},
{
"name": "Python",
"bytes": "241127"
}
],
"symlink_target": ""
} |
from ..interface import GreatFETInterface
class I2CDevice(GreatFETInterface):
"""
Class representing an generic I2C device connected to a GreatFET I2C Bus.
This acts both as the base class for I2C devices, and as a generic class
that can be used to access I2C devices for which no existing driver exists.
"""
def __init__(self, bus, address, name='i2c device'):
"""
Initialize a new generic I2C device.
Args:
bus -- An object representing the I2C bus on which this device
resides.
address - The address for the given I2C device on the bus.
name -- The display name for the given I2C device.
"""
# Note: this will have to change if we decide to support 10-bit I2C addresses.
if address > 127 or address < 0:
raise ValueError("Tried to attach a device to an unsupported I2C address.")
# Store our device parameters.
self.bus = bus
self.address = address
self.name = name
# Attach our device to the parent bus.
self.bus.attach_device(self)
def transmit(self, data, receive_length):
"""
Sends data over the I2C bus, and recieves
data in response.
Args:
data -- The data to be sent to the given device.
receive_length -- If provided, the I2C controller will attempt
to read the provided amount of data, in bytes.
"""
return self.bus.transmit(self.address, data, receive_length)
def read(self, receive_length=0):
"""
Reads data from the I2C bus.
Args:
receive_length -- The I2C controller will attempt
to read the provided amount of data, in bytes.
"""
return self.bus.read(self.address, receive_length)
def write(self, data):
"""
Sends data over the I2C bus.
Args:
data -- The data to be sent to the given device.
"""
return self.bus.write(self.address, data)
| {
"content_hash": "473bc778715fff081539059dae2d24b6",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 87,
"avg_line_length": 32.417910447761194,
"alnum_prop": 0.5718232044198895,
"repo_name": "dominicgs/GreatFET-experimental",
"id": "35c66412a7eb3660b633bacca3f252b0c40a55e0",
"size": "2209",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "host/greatfet/interfaces/i2c_device.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "223018"
},
{
"name": "C++",
"bytes": "969"
},
{
"name": "CMake",
"bytes": "10105"
},
{
"name": "Python",
"bytes": "82219"
}
],
"symlink_target": ""
} |
import hashlib
import os.path
import re
import urllib.parse
import w3lib.url
def hash(url):
"""Returns hash of url"""
return hashlib.sha1(url.encode('utf-8')).hexdigest()
def hash_document(document):
"""Returns hash of document"""
return hashlib.sha1(document).hexdigest()
def remove_www(url):
"""Remove www from url"""
scheme, netloc, path, qs, anchor = urllib.parse.urlsplit(url)
if scheme is '':
raise ValueError('Domain has no scheme')
if netloc[:4] == 'www.':
netloc = netloc[4:]
url_without_www = urllib.parse.urlunsplit((scheme, netloc,
path, qs, anchor))
return url_without_www
def clean(url):
"""Remove last backslash from url"""
url = remove_www(url)
url = w3lib.url.url_query_cleaner(url, ('sid', 'SID'), remove=True)
url = w3lib.url.canonicalize_url(url, keep_blank_values=False)
return url
def is_url_absolute(url):
"""Test if url is absolute"""
return bool(urllib.parse.urlparse(url).netloc)
def get_filename(url):
"""Return filename from url"""
path = urllib.parse.urlparse(url).path
filename = os.path.basename(path)
filename = os.path.splitext(filename)[0]
return filename.capitalize()
def domain(url):
"""Return domain of the url"""
url = remove_www(url)
scheme, netloc, path, qs, anchor = urllib.parse.urlsplit(url)
if scheme is '':
raise ValueError('Domain has no scheme: {0}'.format(url))
if ':' in netloc:
netloc = netloc.split(':', 1)[0]
return netloc
def is_same_domain(url1, url2):
"""Check if two urls have some domain"""
return domain(url1) == domain(url2)
def decode(url):
"""Decode and return url"""
scheme, netloc, path, qs, anchor = urllib.parse.urlsplit(url)
path = urllib.parse.unquote(path)
qs = urllib.parse.unquote_plus(qs)
return urllib.parse.urlunsplit((scheme, netloc, path, qs, anchor))
def generate_regex(domain):
"""Generate regex for domain"""
return re.compile('^(https?:\/\/)?([a-z0-9]+[.])*' + domain + '.*$')
def load_urls_from_file(filepath):
"""Load urls from file, one per line,
ignore lines with #, ignores duplicity"""
urls = set()
if not os.path.isfile(filepath):
return urls
with open(filepath) as url_file:
for line in url_file:
# Ignore all white characters
url = line.strip()
# Take url only if is not commented
if not line.startswith("#") and (url != '') and (url is not None):
urls.add(url)
return list(urls)
def load_urls_from_text(text):
"""Load urls from text, one per line,
ignore lines with #, ignores duplicity"""
urls = set()
lines = text.split('\n')
for line in lines:
# Ignore all white characters
url = line.strip()
# Take url only if is not commented
if not line.startswith("#") and (url != '') and (url is not None):
urls.add(url)
return urls
def domain_replace_dots(domain):
"""Simple function which replace . in domain by -"""
return domain.replace('.', '-')
| {
"content_hash": "4f8e868fa808e0158e564219084350e3",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 78,
"avg_line_length": 25.03937007874016,
"alnum_prop": 0.6094339622641509,
"repo_name": "UPOLSearch/UPOL-Search-Engine",
"id": "583469cc78aba75b71ebe7f569fa34040dcfc9ce",
"size": "3180",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "upol_search_engine/utils/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6869"
},
{
"name": "HTML",
"bytes": "34142"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "110269"
},
{
"name": "Shell",
"bytes": "104"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('action_notifications', '0005_actionnotification_do_not_send_before'),
]
operations = [
migrations.AlterField(
model_name='actionnotificationpreference',
name='email_notification_frequency',
field=models.CharField(choices=[('* * * * *', 'Immediately'), ('*/30 * * * *', 'Every 30 minutes'), ('@daily', 'Daily')], default='@daily', max_length=64),
),
migrations.AlterField(
model_name='actionnotificationpreference',
name='use_user_preference',
field=models.BooleanField(default=False, help_text='Setting this true will cause frequency and is_email_separately to be ignored'),
),
]
| {
"content_hash": "e1dbae4a756c15e7974350d206c13a9e",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 167,
"avg_line_length": 38.61904761904762,
"alnum_prop": 0.6214549938347719,
"repo_name": "burnsred/django-action-notifications",
"id": "83cc7cdebfb43e546a164fb2fe63a44cde73bf4e",
"size": "860",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "action_notifications/migrations/0006_auto_20200416_1347.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "41534"
},
{
"name": "Shell",
"bytes": "262"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.utils import six
from reviewboard.reviews.models import FileAttachmentComment
from reviewboard.webapi.resources import resources
from reviewboard.webapi.tests.base import BaseWebAPITestCase
from reviewboard.webapi.tests.mimetypes import (
review_reply_file_attachment_comment_item_mimetype,
review_reply_file_attachment_comment_list_mimetype)
from reviewboard.webapi.tests.mixins import (
BasicTestsMetaclass,
ReviewRequestChildItemMixin,
ReviewRequestChildListMixin)
from reviewboard.webapi.tests.mixins_comment import (
CommentReplyItemMixin,
CommentReplyListMixin)
from reviewboard.webapi.tests.urls import (
get_review_reply_file_attachment_comment_item_url,
get_review_reply_file_attachment_comment_list_url)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceListTests(CommentReplyListMixin, ReviewRequestChildListMixin,
BaseWebAPITestCase):
"""Testing the ReviewReplyFileAttachmentCommentResource list APIs."""
fixtures = ['test_users']
sample_api_url = ('review-requests/<id>/reviews/<id>/replies/<id>/'
'file-attachment-comments/')
resource = resources.review_reply_file_attachment_comment
def setup_review_request_child_test(self, review_request):
file_attachment = self.create_file_attachment(review_request)
review = self.create_review(review_request, user=self.user,
publish=True)
self.create_file_attachment_comment(review, file_attachment)
reply = self.create_reply(review, user=self.user)
return (get_review_reply_file_attachment_comment_list_url(reply),
review_reply_file_attachment_comment_list_mimetype)
def compare_item(self, item_rsp, comment):
self.assertEqual(item_rsp['id'], comment.pk)
self.assertEqual(item_rsp['text'], comment.text)
if comment.rich_text:
self.assertEqual(item_rsp['text_type'], 'markdown')
else:
self.assertEqual(item_rsp['text_type'], 'plain')
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name,
populate_items):
review_request = self.create_review_request(
with_local_site=with_local_site,
submitter=user,
publish=True)
file_attachment = self.create_file_attachment(review_request)
review = self.create_review(review_request, user=user)
comment = self.create_file_attachment_comment(review, file_attachment)
reply = self.create_reply(review, user=user)
if populate_items:
items = [
self.create_file_attachment_comment(reply, file_attachment,
reply_to=comment),
]
else:
items = []
return (
get_review_reply_file_attachment_comment_list_url(
reply, local_site_name),
review_reply_file_attachment_comment_list_mimetype,
items)
#
# HTTP POST tests
#
def setup_basic_post_test(self, user, with_local_site, local_site_name,
post_valid_data):
review_request = self.create_review_request(
with_local_site=with_local_site,
submitter=user,
publish=True)
file_attachment = self.create_file_attachment(review_request)
review = self.create_review(review_request, user=user, publish=True)
comment = self.create_file_attachment_comment(review, file_attachment)
reply = self.create_reply(review, user=user)
return (
get_review_reply_file_attachment_comment_list_url(
reply, local_site_name),
review_reply_file_attachment_comment_item_mimetype,
{
'reply_to_id': comment.pk,
'text': 'Test comment',
},
[reply, comment, file_attachment])
def check_post_result(self, user, rsp, reply, comment, file_attachment):
reply_comment = FileAttachmentComment.objects.get(
pk=rsp['file_attachment_comment']['id'])
self.assertEqual(reply_comment.text, 'Test comment')
self.assertEqual(reply_comment.reply_to, comment)
self.assertFalse(reply_comment.rich_text)
self.compare_item(rsp['file_attachment_comment'], reply_comment)
def test_post_with_inactive_file_attachment(self):
"""Testing the POST
review-requests/<id>/reviews/<id>/replies/<id>/file-attachment-comments/
API with inactive file attachment
"""
review_request = self.create_review_request(submitter=self.user)
file_attachment = self.create_file_attachment(review_request)
review_request.publish(review_request.submitter)
review = self.create_review(review_request, username='doc')
comment = self.create_file_attachment_comment(review, file_attachment)
reply = self.create_reply(review, user=self.user)
comments_url = get_review_reply_file_attachment_comment_list_url(reply)
# Make the file attachment inactive.
file_attachment = comment.file_attachment
review_request = file_attachment.review_request.get()
review_request.inactive_file_attachments.add(file_attachment)
review_request.file_attachments.remove(file_attachment)
# Now make the reply.
rsp = self.api_post(
comments_url,
{
'reply_to_id': comment.id,
'text': 'Test comment',
},
expected_mimetype=(
review_reply_file_attachment_comment_item_mimetype))
self.assertEqual(rsp['stat'], 'ok')
self.check_post_result(self.user, rsp, reply, comment, file_attachment)
def test_post_with_http_303(self):
"""Testing the POST
review-requests/<id>/reviews/<id>/replies/<id>/file-attachment-comments/
API with second instance of same reply
"""
review_request = self.create_review_request(submitter=self.user,
publish=True)
file_attachment = self.create_file_attachment(review_request)
review = self.create_review(review_request)
comment = self.create_file_attachment_comment(review, file_attachment)
reply = self.create_reply(review, user=self.user)
self.create_file_attachment_comment(reply, file_attachment,
reply_to=comment)
# Now post another reply to the same comment in the same review.
rsp = self.api_post(
get_review_reply_file_attachment_comment_list_url(reply),
{
'reply_to_id': comment.pk,
'text': 'Test comment'
},
expected_status=303,
expected_mimetype=(
review_reply_file_attachment_comment_item_mimetype))
self.assertEqual(rsp['stat'], 'ok')
self.check_post_result(self.user, rsp, reply, comment, file_attachment)
@six.add_metaclass(BasicTestsMetaclass)
class ResourceItemTests(CommentReplyItemMixin, ReviewRequestChildItemMixin,
BaseWebAPITestCase):
"""Testing the ReviewReplyFileAttachmentCommentResource item APIs."""
fixtures = ['test_users']
sample_api_url = ('review-requests/<id>/reviews/<id>/replies/<id>/'
'file-attachment-comments/<id>/')
resource = resources.review_reply_file_attachment_comment
def setup_review_request_child_test(self, review_request):
file_attachment = self.create_file_attachment(review_request)
review = self.create_review(review_request, user=self.user,
publish=True)
comment = self.create_file_attachment_comment(review, file_attachment)
reply = self.create_reply(review, user=self.user)
reply_comment = self.create_file_attachment_comment(
reply, file_attachment, reply_to=comment)
return (
get_review_reply_file_attachment_comment_item_url(
reply, reply_comment.pk),
review_reply_file_attachment_comment_item_mimetype)
def compare_item(self, item_rsp, comment):
self.assertEqual(item_rsp['id'], comment.pk)
self.assertEqual(item_rsp['text'], comment.text)
if comment.rich_text:
self.assertEqual(item_rsp['text_type'], 'markdown')
else:
self.assertEqual(item_rsp['text_type'], 'plain')
#
# HTTP DELETE tests
#
def setup_basic_delete_test(self, user, with_local_site, local_site_name):
review_request = self.create_review_request(
with_local_site=with_local_site,
submitter=user,
publish=True)
file_attachment = self.create_file_attachment(review_request)
review = self.create_review(review_request, user=user, publish=True)
comment = self.create_file_attachment_comment(review, file_attachment)
reply = self.create_reply(review, user=user)
reply_comment = self.create_file_attachment_comment(
reply, file_attachment, reply_to=comment)
return (
get_review_reply_file_attachment_comment_item_url(
reply, reply_comment.pk, local_site_name),
[reply_comment, reply]
)
def check_delete_result(self, user, reply_comment, reply):
self.assertNotIn(reply_comment, reply.file_attachment_comments.all())
#
# HTTP GET tests
#
def setup_basic_get_test(self, user, with_local_site, local_site_name):
review_request = self.create_review_request(
with_local_site=with_local_site,
submitter=user,
publish=True)
file_attachment = self.create_file_attachment(review_request)
review = self.create_review(review_request, user=user, publish=True)
comment = self.create_file_attachment_comment(review, file_attachment)
reply = self.create_reply(review, user=user)
reply_comment = self.create_file_attachment_comment(
reply, file_attachment, reply_to=comment)
return (
get_review_reply_file_attachment_comment_item_url(
reply, reply_comment.pk, local_site_name),
review_reply_file_attachment_comment_item_mimetype,
reply_comment
)
#
# HTTP PUT tests
#
def setup_basic_put_test(self, user, with_local_site, local_site_name,
put_valid_data):
review_request = self.create_review_request(
with_local_site=with_local_site,
submitter=user,
publish=True)
file_attachment = self.create_file_attachment(review_request)
review = self.create_review(review_request, user=user, publish=True)
comment = self.create_file_attachment_comment(review, file_attachment)
reply = self.create_reply(review, user=user)
reply_comment = self.create_file_attachment_comment(
reply, file_attachment, reply_to=comment)
return (
get_review_reply_file_attachment_comment_item_url(
reply, reply_comment.pk, local_site_name),
review_reply_file_attachment_comment_item_mimetype,
{
'text': 'Test comment',
},
reply_comment,
[])
def check_put_result(self, user, item_rsp, comment, *args):
comment = FileAttachmentComment.objects.get(pk=comment.pk)
self.assertEqual(item_rsp['id'], comment.pk)
self.assertEqual(item_rsp['text'], 'Test comment')
self.assertFalse(comment.rich_text)
self.compare_item(item_rsp, comment)
| {
"content_hash": "23c88a94a160730d41fe10706e0b09f0",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 80,
"avg_line_length": 41.37152777777778,
"alnum_prop": 0.6287872429710449,
"repo_name": "bkochendorfer/reviewboard",
"id": "cf75dadb6bef9122c6784dff1b192f9f58bb804e",
"size": "11915",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "reviewboard/webapi/tests/test_review_reply_file_attachment_comment.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "212721"
},
{
"name": "HTML",
"bytes": "179427"
},
{
"name": "JavaScript",
"bytes": "1463002"
},
{
"name": "Python",
"bytes": "3686542"
},
{
"name": "Shell",
"bytes": "20225"
}
],
"symlink_target": ""
} |
import os
import sys
import time
import base64
import requests
from magnolia.utility import *
from magnolia.utility import LOG as L
from magnolia.script import testcase
def find_all_files(directory):
for root, dirs, files in os.walk(directory):
yield root
for f in files:
yield os.path.join(root, f)
class TestCase_Base(testcase.TestCase_Base):
def arg_parse(self, parser):
super(TestCase_Base, self).arg_parse(parser)
parser.add_argument("-c", "--config", action='store', dest="config", help="Config File Name.")
parser.add_argument("-i", "--slack", action='store', dest="slack", help="Slack Serial.")
parser.add_argument("-u", "--user", action='store', dest="user", help="Jenkins User Name.")
parser.add_argument("-p", "--password", action='store', dest="password", help="Jenkins User Password.")
parser.add_argument("-q", "--quest", action='store', dest="quest", help="Quest ID Number.")
parser.add_argument("-t", "--timeout", action='store', dest="timeout", help="Timeout.")
parser.add_argument("--debug", action='store_true', default=False, dest="debug", help="Debug Flag.")
return parser
def message(self, msg, channel=None):
if self.get("args.debug"): pass
else: self.slack_message(msg, channel)
def invoke_job(self, job, token, timeout=300):
if self.get("args.debug"): return False
else: return self.invoke_jenkins_job(job, token, timeout)
def upload(self, name=None, size="360P", channel=None):
if not self.get("args.debug"):
if name == None: name = self.adb.get().TMP_PICTURE
fname = self.minicap_screenshot(name)
if self.adb.get().LOCATE == "V": self.picture_rotate(fname, "90")
self.picture_resize(fname, size)
self.slack_upload(fname, channel)
def upload_file(self, fname, channel=None):
if not self.get("args.debug"):
self.slack_upload(fname, channel)
def __capture(self, name=None, size="360P"):
if name == None: name = self.adb.get().TMP_PICTURE
fname = self.minicap_screenshot(name)
if self.adb.get().LOCATE == "V": self.picture_rotate(fname, "90")
self.picture_resize(fname, size)
return fname
def home(self):
self.tap_check("basic\\home"); self.sleep()
return self.wait("home")
def login(self):
self.adb.stop(self.get("kancolle.app")); self.sleep()
self.adb.invoke(self.get("kancolle.app")); self.sleep()
self.tap_wait("login\\music"); self.sleep()
self.wait("login"); self.sleep(4)
self.tap_check("login"); self.sleep(5)
return self.wait("home")
def expedition_result(self):
if self.search("home\\expedition"):
self.tap_check("home\\expedition"); time.sleep(9)
if self.wait("home\\expedition\\success", loop="10"):
self.message(self.get("bot.expedition_success"))
elif self.search("home\\expedition\\failed"):
self.message(self.get("bot.expedition_failed"))
self.tap_wait("basic\\next"); self.sleep()
self.upload()
self.tap_wait("basic\\next"); self.sleep(3)
self.invoke_job(self.get("expedition.jenkins_job"), self.get("expedition.jenkins_token"), 60)
return self.search("home\\expedition")
else:
return False
def initialize(self, form=None):
if self.adb.rotate() == 0 or (not self.search("home")):
self.assertTrue(self.login(), "Can't Login.")
while self.expedition_result(): self.sleep(1)
self.tap_check("home\\formation"); self.sleep(3)
if form == None: return self.home()
else: return self.formation(form)
def formation(self, formation):
self.tap_wait("formation\\change"); self.sleep()
if not self.search("formation\\deploy"): return False
if formation == None: return False
fleet = int(formation)
if self.adb.get().ROTATE == "0":
p = POINT(self.conversion_w(int(self.adb.get().FORMATION_X)) - (self.conversion_w(int(self.adb.get().FORMATION_WIDTH)) * fleet),
self.conversion_h(int(self.adb.get().FORMATION_Y)),
self.conversion_w(int(self.adb.get().FORMATION_WIDTH)),
self.conversion_h(int(self.adb.get().FORMATION_HEIGHT)))
else:
p = POINT(self.conversion_w(int(self.adb.get().FORMATION_X)),
self.conversion_h(int(self.adb.get().FORMATION_Y)) + (self.conversion_h(int(self.adb.get().FORMATION_HEIGHT)) * fleet),
self.conversion_w(int(self.adb.get().FORMATION_WIDTH)),
self.conversion_h(int(self.adb.get().FORMATION_HEIGHT)))
L.info(p);
if not self.search("formation\\fleet_1_focus"):
self.tap_check("formation\\fleet_1"); self.sleep()
self.tap_check("formation\\select", p); self.sleep()
self.wait("formation\\fleet_name"); self.sleep()
self.upload("formation_%s.png" % self.adb.get().SERIAL)
return self.home()
def attack(self, fleet, id):
if not self.search("home"): return False
self.tap_check("home\\attack"); self.sleep()
self.tap_wait("attack"); self.sleep(2)
self.search("attack\\icon"); self.sleep(2)
self.__attack_stage(id)
self.__attack_extra(id)
self.__attack_id(id)
if not self.search("attack\\decide"):
self.home(); return False
self.tap_check("attack\\decide"); self.sleep()
if not self.search(self.fleet_focus(fleet)):
self.tap_check(self.fleet(fleet)); self.sleep(2)
if self.search("attack\\rack"):
self.slack_message(self.get("bot.attack_rack")); self.home(); return True
if self.search("attack\\damage"):
self.slack_message(self.get("bot.attack_damage")); self.home(); return True
if self.tap_check("attack\\start"): self.sleep(7)
if self.search("attack\\unable"):
self.slack_message(self.get("bot.attack_failed"))
self.home(); return False
self.slack_message(self.get("bot.attack_success"))
return self.wait("attack\\compass_b")
def __attack_stage(self, id):
if int(id) > 30: self.tap("attack\\stage", _id="6"); self.sleep()
elif int(id) > 24: self.tap("attack\\stage", _id="5"); self.sleep()
elif int(id) > 18: self.tap("attack\\stage", _id="4"); self.sleep()
elif int(id) > 12: self.tap("attack\\stage", _id="3"); self.sleep()
elif int(id) > 6: self.tap("attack\\stage", _id="2"); self.sleep()
else: pass
def __attack_extra(self, id):
if id in ["5", "6", "11", "12", "17", "18"]:
self.tap("attack\\extra"); self.sleep()
def __attack_id(self, id):
self.tap("attack\\id", _id=id); self.sleep()
def battle_all_stage(self, formation, withdrawal=False):
if not self.search("attack\\compass_b"):
if self.search("home"): return True
else: return False
while not self.search("home"):
while not self.search("basic\\next"):
if self.tap("attack\\compass"): self.sleep(10)
if self.tap("attack\\formation\\%s" % formation): self.sleep(10)
if self.tap("attack\\night_battle\\start"): self.sleep(15)
#if self.search("home"):
# self.slack_message(self.get("bot.attack_return"))
# return self.search("home")
if self.search("attack\\get"):
self.tap("attack\\return"); self.sleep(5)
return self.search("home")
self.sleep(10)
if self.tap("basic\\next"): self.sleep(5)
nextstage = "attack\\charge"
if withdrawal or self.search("attack\\result_damage"):
nextstage = "attack\\withdrawal"
while self.tap("basic\\next"): self.sleep(5)
if self.search("home"): break
while not self.search(nextstage):
if self.search("basic\\next"):
self.upload("drop_%s.png" % self.adb.get().SERIAL)
self.tap("basic\\next"); self.sleep(5)
if self.search("home"):
self.slack_message(self.get("bot.attack_return"))
return True
self.tap(nextstage); time.sleep(5)
self.slack_message(self.get("bot.attack_return"))
return self.search("home")
def fleet(self, fleet):
return "basic\\fleet\\%s" % fleet
def fleet_focus(self, fleet):
return "basic\\fleet_focus\\%s" % fleet
def supply(self, fleet="1"):
if not self.search("home"): return False
self.tap_check("home\\supply"); self.sleep()
if not self.search(self.fleet_focus(fleet)):
self.tap(self.fleet(fleet)); self.sleep()
self.tap_wait("supply"); self.sleep(4)
return self.home()
def supply_and_docking(self, fleet):
if not self.search("home"): return False
self.tap_check("home\\supply"); self.sleep()
if not self.search(self.fleet_focus(fleet)):
self.tap_check(self.fleet(fleet)); self.sleep()
self.slack_message(self.get("bot.supply") % fleet)
self.tap("supply"); self.sleep(4)
self.tap_check("basic\\menu\\docking"); self.sleep()
while self.search("supply"):
self.tap_check("basic\\menu\\docking"); self.sleep()
self.wait("docking\\select"); self.sleep()
self.slack_message(self.get("bot.docking"))
for _ in range(3):
position = self.find("docking\\room")
if position == None: break
self.tap_check("docking\\room")
self.sleep(3); result = self.__docking()
self._tap(position, threshold=0.49); self.sleep()
if not result: break
self.sleep(3)
self.upload("docking_%s.png" % self.adb.get().SERIAL)
return True
def __docking(self):
if not self.search("docking\\next"):
return False
p = POINT(self.conversion_w(int(self.adb.get().DOCKING_X)),
self.conversion_h(int(self.adb.get().DOCKING_Y)),
self.conversion_w(int(self.adb.get().DOCKING_WIDTH)),
self.conversion_h(int(self.adb.get().DOCKING_HEIGHT)))
for po in range(7):
L.info(p); self.sleep(1)
self._tap(p, threshold=0.49); self.sleep(4)
if self.search("docking\\unable"):
self.sleep(4); self._tap(p, threshold=0.49); self.sleep(4)
elif self.search("docking\\start"):
if not self.search("docking\\time"):
self.tap_check("docking\\bucket"); self.sleep(4)
self.tap_check("docking\\start"); self.sleep(4)
if self.tap_check("docking\\yes"):
self.sleep(4); return True
if self.adb.get().LOCATE == "V":
p.x = int(p.x) - int(p.width)
if int(p.x) < 0: return False
else:
p.y = int(p.y) + int(p.height)
if int(p.y) > int(self.adb.get().HEIGHT): return False
return False
def expedition(self, fleet, id):
if not self.search("home"): return False
self.sleep(2)
self.tap_check("home\\attack"); self.sleep()
self.tap_wait("expedition"); self.sleep(4)
self.expedition_stage(id)
self.expedition_id(id); self.sleep()
if self.search("expedition\\done"):
self.message(self.get("bot.expedition_done") % self.get("args.fleet"))
self.home()
return False
self.tap_wait("expedition\\decide")
if not self.search("expedition\\fleet_focus", _id=fleet):
self.tap_wait("expedition\\fleet", _id=fleet)
self.sleep()
if self.search("expedition\\unable"):
self.message(self.get("bot.expedition_unable") % self.get("args.fleet"))
self.home()
return False
self.tap_wait("expedition\\start"); self.sleep()
if self.wait("expedition\\done"):
self.message(self.get("bot.expedition_start") % self.get("args.fleet"))
self.sleep(3)
self.wait("expedition\\icon"); self.upload("expedition_%s.png" % self.adb.get().SERIAL)
return True
else:
self.message(self.get("bot.expedition_unable") % self.get("args.fleet"))
self.home()
return False
def expedition_stage(self, id):
if int(id) > 32: self.tap_wait("expedition\\stage", _id="5")
elif int(id) > 24: self.tap_wait("expedition\\stage", _id="4")
elif int(id) > 16: self.tap_wait("expedition\\stage", _id="3")
elif int(id) > 8: self.tap_wait("expedition\\stage", _id="2")
else: pass
def expedition_id(self, id):
self.tap_wait("expedition\\id", _id=id)
def exercises(self):
if not self.search("home"): return False
self.sleep(2)
self.tap_check("home\\attack"); self.sleep()
self.tap_wait("exercises"); self.sleep(4)
if not self.search("exercises\\select"):
self.home(); return False
p = POINT(self.conversion_w(int(self.adb.get().EXERCISES_X)),
self.conversion_h(int(self.adb.get().EXERCISES_Y)),
self.conversion_w(int(self.adb.get().EXERCISES_WIDTH)),
self.conversion_h(int(self.adb.get().EXERCISES_HEIGHT)))
flag = True
for _ in xrange(5):
if self.search("exercises\\win", p):
L.info("I'm already fighting. I won.")
elif self.search("exercises\\lose", p):
L.info("I'm already fighting. I lost.")
else:
L.info(p);
self._tap(p, threshold=0.49); self.sleep(5)
fname = self.__capture("exercises_%s.png" % self.adb.get().SERIAL)
if self.search("exercises\\x"):
self.tap_wait("exercises\\decide"); self.sleep()
if self.search("exercises\\unable"):
self.tap_check("exercises\\return"); self.sleep()
self.tap_check("exercises\\x"); self.sleep()
self.home(); return False
self.upload_file(fname)
if self.tap_check("exercises\\start"):
self.message(self.get("bot.exercises_start")); self.sleep(5)
self.exercises_battle(); flag = False
break
self.sleep(1)
if self.adb.get().LOCATE == "V":
p.x = int(p.x) - int(p.width); L.info("Point : %s" % str(p))
else:
p.y = int(p.y) + int(p.height); L.info("Point : %s" % str(p))
if flag:
self.message(self.get("bot.exercises_result"))
self.upload()
self.home(); return False
self.sleep(3)
return self.home()
def exercises_battle(self):
self.tap_wait("attack\\formation\\1")
while not self.search("basic\\next"):
self.sleep(10)
if self.tap("attack\\night_battle\\start", count=10):
self.message(self.get("bot.night_battle_start"))
self.sleep()
self.sleep(2)
if self.search("attack\\result\\d"): self.message(self.get("bot.result_d"))
elif self.search("attack\\result\\c"): self.message(self.get("bot.result_c"))
elif self.search("attack\\result\\b"): self.message(self.get("bot.result_b"))
elif self.search("attack\\result\\a"): self.message(self.get("bot.result_a"))
else: self.message(self.get("bot.result_s"))
self.sleep(5)
while self.tap("basic\\next"): self.sleep(5)
return True
def quest_search_id(self, _id):
for f in find_all_files(self.get_base("quest")):
if _id in str(f):
dst = f.replace("%s\\" % self.get_base("quest"), "")
L.debug("%s -> %s" % (str(f), dst))
return dst
def quest_done(self):
if not self.search("quest\\mission"):
return False
self.tap("quest\\perform"); self.sleep(3)
while self.tap("quest\\done"):
self.sleep()
self.tap_wait("quest\\close"); time.sleep(4)
return True
def quest_check(self, target, crop_target, threshold=0.2, count=5, _id=None):
box_result = self.find(crop_target, None, count, id=None)
if box_result == None: return False
result = self.find(target, box_result, count, id=_id)
if result == None:
self._tap(box_result, threshold); self.sleep()
return True
def quest_remove(self, target, crop_target, threshold=0.2, count=5, _id=None):
box_result = self.find(crop_target, None, count, id=None)
if box_result == None: return False
result = self.find(target, box_result, count, id=_id)
if result != None:
self._tap(box_result, threshold); self.sleep()
return True
def quest_open(self):
if not self.search("home"): return False
self.tap("home\\quest"); self.sleep()
self.tap_wait("quest"); self.sleep()
self.quest_done(); self.sleep()
if not self.search("quest\\mission"):
self.tap_wait("quest\\return"); self.sleep()
self.wait("home")
return False
return True
def quest_search(self, _id, remove=False):
q_path = "quest\\%s" % self.quest_search_id(_id)
if "daily" in q_path: self.tap_check("quest\\daily")
elif "weekly" in q_path: self.tap_check("quest\\weekly")
if remove:
if not self.quest_remove("quest\\acceptance", q_path): return False
else:
if not self.quest_check("quest\\acceptance", q_path): return False
return True
def quest_upload(self):
if not self.search("quest\\mission", count=5):
self.tap_wait("quest\\return"); self.sleep()
self.wait("home")
return False
if not self.search("quest\\perform_select"):
self.tap("quest\\perform"); self.sleep(4)
self.upload("quest_%s" % self.adb.get().TMP_PICTURE)
self.tap_wait("quest\\return"); self.sleep()
return self.wait("home")
def invoke_jenkins_job(self, job, token, timeout=300):
params = {
'token': token,
'delay': "%dsec" % timeout,
}
url = "%s/job/%s/build" % (self.get("jenkins.url"), job)
s = requests.Session(); s.auth = (self.get("args.user"), self.get("args.password"))
result = s.get(url, params=params)
L.debug("HTTP Status Code : %d" % result.status_code)
status = result.status_code == 201
return status
| {
"content_hash": "90c46223329638be1a2bb7be55798f53",
"timestamp": "",
"source": "github",
"line_count": 427,
"max_line_length": 141,
"avg_line_length": 44.78688524590164,
"alnum_prop": 0.5581468312068605,
"repo_name": "setsulla/stir",
"id": "5821efce173a9f73ca3a14983ca3d1903af3812e",
"size": "19124",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/magnolia/script/kancolle/testcase_base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "176459"
}
],
"symlink_target": ""
} |
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.contrib.auth import logout
class LogoutView(View):
def get(self, request):
logout(request)
return redirect(
reverse(
"home"
)
)
| {
"content_hash": "ea61155aaa51288605bf04ec6998b54e",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 44,
"avg_line_length": 22.3125,
"alnum_prop": 0.5910364145658263,
"repo_name": "manducku/blogram",
"id": "64cf916275e1f1d6191f4107f3c11ad486947cc8",
"size": "357",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blogram/users/views/logout.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2000"
},
{
"name": "Makefile",
"bytes": "191"
},
{
"name": "Python",
"bytes": "10069"
},
{
"name": "Shell",
"bytes": "471"
}
],
"symlink_target": ""
} |
import os.path
import xml.etree.cElementTree as CET
from botocore.compat import XMLParseError
from burp import IBurpExtender
from burp import IScannerCheck
from burp import ITab
from javax.swing import JTextField
from javax.swing import JLabel
from javax.swing import JFrame
from javax.swing import JPanel
from javax.swing import JButton
from javax.swing import JCheckBox
from javax.swing.border import EmptyBorder
from java.awt import BorderLayout
from java.awt import GridLayout
from org.xml.sax import SAXException
from scan import BucketScan, CognitoScan, RUN_TESTS
SSL_VERIFICATION = True
class BurpExtender(IBurpExtender, IScannerCheck, ITab):
def __init__(self):
self.ext_name = 'Cloud Storage Tester'
self.callbacks = None
self.gui_elements = None
self.aws_access_key_inpt = None
self.aws_secret_key_inpt = None
self.aws_session_token_inpt = None
self.gs_access_key_inpt = None
self.gs_secret_key_inpt = None
self.wordlist_path_inpt = None
self.passive_mode = None
self.ssl_verification = None
self.aws_access_key = ''
self.aws_secret_key = ''
self.aws_session_token = ''
self.gs_access_key = ''
self.gs_secret_key = ''
self.wordlist_path = ''
def registerExtenderCallbacks(self, callbacks):
self.callbacks = callbacks
self.callbacks.setExtensionName(self.ext_name)
self.callbacks.registerScannerCheck(self)
self.gui_elements = self.build_gui()
callbacks.customizeUiComponent(self.gui_elements)
callbacks.addSuiteTab(self)
self.check_loading_issues()
self.reload_config()
def show_errors(self, label):
"""Display error messages."""
top_label = JLabel(label, JLabel.CENTER)
frame = JFrame(self.ext_name)
frame.setSize(550, 300)
frame.setLayout(GridLayout(1, 1))
frame.add(top_label)
frame.setLocationRelativeTo(None)
frame.setVisible(True)
def check_loading_issues(self):
"""Check for any loading issues."""
missing_libs = []
tips = []
label = """<html>
<body style='margin: 10px'>
<b>The following dependencies could not be loaded successfully:</b><br>
<ul><li>%s</li></ul><br>
<b>Tips:</b><br>
<ul><li>%s</li><br></ul>
<b>For detailed information on how to load the plugin, see:</b><br>
<ul>
<li>
<a href='#'>https://github.com/VirtueSecurity/aws-extender#getting-started</a>
</li>
</ul>
</body>
</html>"""
if not RUN_TESTS:
missing_libs.append('boto/boto3')
tips.append('Make sure that the boto/boto3 library is installed properly, and\
the right path is specified in the "Folder for loading modules" setting.')
try:
CET.fromstring('<test></test>')
except SAXException:
# a workaround for "http://bugs.jython.org/issue1127"
try:
def xml_parser(**_):
class Parser(object):
def feed(*_):
raise XMLParseError
@staticmethod
def close(*_):
return None
return Parser()
CET.XMLParser = xml_parser
except TypeError:
missing_libs.append('SAXParser')
tips.append("""Run Burp Suite using the following command:
<br><code style='background: #f7f7f9; color: red'>$ java -classpath
xercesImpl.jar;burpsuite_pro.jar burp.StartBurp</code>""")
if not missing_libs:
return
label %= ('</li><li>'.join(missing_libs), '</li><li>'.join(tips))
self.show_errors(label)
def build_gui(self):
"""Construct GUI elements."""
panel = JPanel(BorderLayout(3, 3))
panel.setBorder(EmptyBorder(160, 160, 160, 160))
self.aws_access_key_inpt = JTextField(10)
self.aws_secret_key_inpt = JTextField(10)
self.aws_session_token_inpt = JTextField(10)
self.gs_access_key_inpt = JTextField(10)
self.gs_secret_key_inpt = JTextField(10)
self.wordlist_path_inpt = JTextField(10)
self.passive_mode = JCheckBox('Enabled')
self.ssl_verification = JCheckBox('Enabled')
save_btn = JButton('Save', actionPerformed=self.save_config)
labels = JPanel(GridLayout(0, 1))
inputs = JPanel(GridLayout(0, 1))
panel.add(labels, BorderLayout.WEST)
panel.add(inputs, BorderLayout.CENTER)
top_label = JLabel('<html><b>Settings</b><br><br></html>')
top_label.setHorizontalAlignment(JLabel.CENTER)
panel.add(top_label, BorderLayout.NORTH)
labels.add(JLabel('AWS Access Key:'))
inputs.add(self.aws_access_key_inpt)
labels.add(JLabel('AWS Secret Key:'))
inputs.add(self.aws_secret_key_inpt)
labels.add(JLabel('AWS Session Key (optional):'))
inputs.add(self.aws_session_token_inpt)
labels.add(JLabel('GS Access Key:'))
inputs.add(self.gs_access_key_inpt)
labels.add(JLabel('GS Secret Key:'))
inputs.add(self.gs_secret_key_inpt)
labels.add(JLabel('Wordlist Filepath (optional):'))
inputs.add(self.wordlist_path_inpt)
labels.add(JLabel('Passive Mode:'))
inputs.add(self.passive_mode)
labels.add(JLabel('SSL Verification:'))
inputs.add(self.ssl_verification)
panel.add(save_btn, BorderLayout.SOUTH)
return panel
def save_config(self, _):
"""Save settings."""
error_message = ''
wordlist_path = self.wordlist_path_inpt.getText()
save_setting = self.callbacks.saveExtensionSetting
save_setting('aws_access_key', self.aws_access_key_inpt.getText())
save_setting('aws_secret_key', self.aws_secret_key_inpt.getText())
save_setting('aws_session_token', self.aws_session_token_inpt.getText())
save_setting('gs_access_key', self.gs_access_key_inpt.getText())
save_setting('gs_secret_key', self.gs_secret_key_inpt.getText())
save_setting('wordlist_path', wordlist_path)
if self.passive_mode.isSelected():
save_setting('passive_mode', 'True')
else:
save_setting('passive_mode', '')
if self.ssl_verification.isSelected():
save_setting('SSL_VERIFICATION', '')
else:
save_setting('SSL_VERIFICATION', 'False')
if wordlist_path and not os.path.isfile(wordlist_path):
error_message = 'Error: Invalid filepath for the "Wordlist Filepath" setting.'
self.show_errors(error_message)
self.reload_config()
def reload_config(self):
"""Reload saved settings."""
global RUN_TESTS
global SSL_VERIFICATION
load_setting = self.callbacks.loadExtensionSetting
aws_access_key_val = load_setting('aws_access_key') or ''
aws_secret_key_val = load_setting('aws_secret_key') or ''
aws_session_token_val = load_setting('aws_session_token') or ''
gs_access_key_val = load_setting('gs_access_key') or ''
gs_secret_key_val = load_setting('gs_secret_key') or ''
wordlist_path_val = load_setting('wordlist_path') or ''
passive_mode_val = load_setting('passive_mode')
passive_mode_val = True if passive_mode_val else False
ssl_verification_val = load_setting('SSL_VERIFICATION')
ssl_verification_val = False if ssl_verification_val == 'False' else True
if passive_mode_val:
RUN_TESTS = False
if not ssl_verification_val:
SSL_VERIFICATION = False
self.aws_access_key = aws_access_key_val
self.aws_secret_key = aws_secret_key_val
self.aws_session_token = aws_session_token_val
self.gs_access_key = gs_access_key_val
self.gs_secret_key = gs_secret_key_val
self.wordlist_path = wordlist_path_val
self.aws_access_key_inpt.setText(aws_access_key_val)
self.aws_secret_key_inpt.setText(aws_secret_key_val)
self.aws_session_token_inpt.setText(aws_session_token_val)
self.gs_access_key_inpt.setText(gs_access_key_val)
self.gs_secret_key_inpt.setText(gs_secret_key_val)
self.wordlist_path_inpt.setText(wordlist_path_val)
self.passive_mode.setSelected(passive_mode_val)
self.ssl_verification.setSelected(ssl_verification_val)
def getTabCaption(self):
"""Return tab caption."""
return self.ext_name
def getUiComponent(self):
"""Return GUI elements."""
return self.gui_elements
def doPassiveScan(self, request_response):
"""Perform a passive scan."""
scan_issues = []
opts = {'aws_access_key': self.aws_access_key,
'aws_secret_key': self.aws_secret_key,
'aws_session_token': self.aws_session_token,
'gs_access_key': self.gs_access_key,
'gs_secret_key': self.gs_secret_key,
'wordlist_path': self.wordlist_path}
bucket_scan = BucketScan(request_response, self.callbacks, opts, SSL_VERIFICATION)
bucket_issues = bucket_scan.check_buckets()
cognito_scan = CognitoScan(request_response, self.callbacks)
cognito_issues = cognito_scan.identify_identity_pools()
scan_issues = bucket_issues + cognito_issues
return scan_issues
@staticmethod
def doActiveScan(*_):
pass
@staticmethod
def consolidateDuplicateIssues(existing_issue, new_issue):
"""Eliminate duplicate issues."""
if existing_issue.getIssueDetail() == new_issue.getIssueDetail():
return -1
return 0
| {
"content_hash": "99bd81f7299889fc1159217d107518d7",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 98,
"avg_line_length": 39.03112840466926,
"alnum_prop": 0.6067191705712291,
"repo_name": "VirtueSecurity/aws-extender",
"id": "270eddf7522f5e03fb7c1672ce857bd1abe212cc",
"size": "10069",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46260"
}
],
"symlink_target": ""
} |
"""
:codeauthor: Rahul Handay <rahulha@saltstack.com>
"""
import salt.modules.pagerduty as pagerduty
import salt.utils.json
import salt.utils.pagerduty
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase
class PagerdutyTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.pagerduty
"""
def setup_loader_modules(self):
return {
pagerduty: {"__salt__": {"config.option": MagicMock(return_value=None)}}
}
def test_list_services(self):
"""
Test for List services belonging to this account
"""
with patch.object(salt.utils.pagerduty, "list_items", return_value="A"):
self.assertEqual(pagerduty.list_services(), "A")
def test_list_incidents(self):
"""
Test for List incidents belonging to this account
"""
with patch.object(salt.utils.pagerduty, "list_items", return_value="A"):
self.assertEqual(pagerduty.list_incidents(), "A")
def test_list_users(self):
"""
Test for List users belonging to this account
"""
with patch.object(salt.utils.pagerduty, "list_items", return_value="A"):
self.assertEqual(pagerduty.list_users(), "A")
def test_list_schedules(self):
"""
Test for List schedules belonging to this account
"""
with patch.object(salt.utils.pagerduty, "list_items", return_value="A"):
self.assertEqual(pagerduty.list_schedules(), "A")
def test_list_windows(self):
"""
Test for List maintenance windows belonging to this account
"""
with patch.object(salt.utils.pagerduty, "list_items", return_value="A"):
self.assertEqual(pagerduty.list_windows(), "A")
def test_list_policies(self):
"""
Test for List escalation policies belonging to this account
"""
with patch.object(salt.utils.pagerduty, "list_items", return_value="A"):
self.assertEqual(pagerduty.list_policies(), "A")
def test_create_event(self):
"""
Test for Create an event in PagerDuty. Designed for use in states.
"""
with patch.object(salt.utils.json, "loads", return_value=["A"]):
with patch.object(salt.utils.pagerduty, "query", return_value="A"):
self.assertListEqual(pagerduty.create_event(), ["A"])
| {
"content_hash": "50f057015e77664196d62666b8eea6b1",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 84,
"avg_line_length": 34.56944444444444,
"alnum_prop": 0.627159501807955,
"repo_name": "saltstack/salt",
"id": "494000e8a239e702dbc71f767dbab5d16600df1c",
"size": "2489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/modules/test_pagerduty.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
} |
__all__ = [
"Provider",
"State",
"LibcloudLBError",
"LibcloudLBImmutableError",
"OLD_CONSTANT_TO_NEW_MAPPING",
]
from libcloud.common.types import LibcloudError
class LibcloudLBError(LibcloudError):
pass
class LibcloudLBImmutableError(LibcloudLBError):
pass
class Provider(object):
"""
Defines for each of the supported providers
Non-Dummy drivers are sorted in alphabetical order. Please preserve this
ordering when adding new drivers.
:cvar ALIYUN_SLB: Aliyun SLB loadbalancer driver
"""
ALB = "alb"
ALIYUN_SLB = "aliyun_slb"
BRIGHTBOX = "brightbox"
CLOUDSTACK = "cloudstack"
DIMENSIONDATA = "dimensiondata"
ELB = "elb"
GCE = "gce"
GOGRID = "gogrid"
NINEFOLD = "ninefold"
NTTCIS = "nttcis"
RACKSPACE = "rackspace"
SOFTLAYER = "softlayer"
# Deprecated
RACKSPACE_US = "rackspace_us"
RACKSPACE_UK = "rackspace_uk"
OLD_CONSTANT_TO_NEW_MAPPING = {
Provider.RACKSPACE_US: Provider.RACKSPACE,
Provider.RACKSPACE_UK: Provider.RACKSPACE,
}
class State(object):
"""
Standard states for a loadbalancer
:cvar RUNNING: loadbalancer is running and ready to use
:cvar UNKNOWN: loabalancer state is unknown
"""
RUNNING = 0
PENDING = 1
UNKNOWN = 2
ERROR = 3
DELETED = 4
class MemberCondition(object):
"""
Each member of a load balancer can have an associated condition
which determines its role within the load balancer.
"""
ENABLED = 0
DISABLED = 1
DRAINING = 2
| {
"content_hash": "41202531e4c0c0dc91db04c929e2cd23",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 76,
"avg_line_length": 20.207792207792206,
"alnum_prop": 0.6606683804627249,
"repo_name": "mistio/libcloud",
"id": "b97663f33ff7882ef3ea2ce60ae42ff879f20c6f",
"size": "2338",
"binary": false,
"copies": "1",
"ref": "refs/heads/trunk",
"path": "libcloud/loadbalancer/types.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1819"
},
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "PowerShell",
"bytes": "410"
},
{
"name": "Python",
"bytes": "9067225"
},
{
"name": "Shell",
"bytes": "12994"
}
],
"symlink_target": ""
} |
import os
from flask import Flask, g
def create_app(test_config=None):
# create and configure the app
app = Flask(__name__, instance_relative_config=True)
app.config.from_mapping(
SECRET_KEY='dev',
DATABASE=os.path.join(app.instance_path, 'label.sqlite'),
)
if test_config is None:
# load the instance config, if it exists, when not testing
app.config.from_pyfile('config.py', silent=True)
else:
# load the test config if passed in
app.config.from_mapping(test_config)
# ensure the instance folder exists
try:
os.makedirs(app.instance_path)
except OSError:
pass
from . import db
db.init_app(app)
from . import routes
app.register_blueprint(routes.bp)
return app
| {
"content_hash": "aedbd7d8b3ab8ac0e7390406c7493b22",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 66,
"avg_line_length": 24.65625,
"alnum_prop": 0.6400506970849176,
"repo_name": "ivankeller/set-game",
"id": "5c1f46f4829b4357195a9595b86047b472331f70",
"size": "789",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "labelcards/app/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2481"
},
{
"name": "Jupyter Notebook",
"bytes": "2821686"
},
{
"name": "Makefile",
"bytes": "233"
},
{
"name": "Python",
"bytes": "14905"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os
import sys
import signal
from tornado.ioloop import IOLoop
from ipykernel.kernelbase import Kernel
from pexpect import replwrap, EOF
__version__ = '0.2'
try:
from traitlets import Unicode
except ImportError:
from IPython.utils.traitlets import Unicode
class MPUnixInterpreter(replwrap.REPLWrapper):
"""
Extension of replwrap to micropython for the unix port
"""
def __init__(self, cmd, **kw):
self.prompt = '>>> '
self.buffer = []
self.output = ''
super(MPUnixInterpreter, self).__init__(cmd, self.prompt, None, **kw)
def run_command(self, command, timeout=-1):
"""Send a command to the REPL, wait for and return output.
:param str command: The command to send. Trailing newlines are not needed.
This should be a complete block of input that will trigger execution;
if a continuation prompt is found after sending input, :exc:`ValueError`
will be raised.
:param int timeout: How long to wait for the next prompt. -1 means the
default from the :class:`pexpect.spawn` object (default 30 seconds).
None means to wait indefinitely.
"""
# Split up multiline commands and feed them in bit-by-bit
cmdlines = [command]
# splitlines ignores trailing newlines - add it back in manually
if command.endswith('\n'):
cmdlines.append('')
if not cmdlines:
raise ValueError("No command was given")
res = []
self.child.sendline(cmdlines[0])
for line in cmdlines[1:]:
self._expect_prompt(timeout=timeout)
res.append(self.child.before)
self.child.sendline(line)
# Command was fully submitted, now wait for the next prompt
if self._expect_prompt(timeout=timeout) == 1:
# We got the continuation prompt - command was incomplete
self.child.kill(signal.SIGINT)
self._expect_prompt(timeout=1)
raise ValueError("Continuation prompt found - input was incomplete:\n"
+ command)
return u''.join(res + [self.child.before])
class MPKernelUnix(Kernel):
"""
Kernel for the Unix Port of micropython
"""
implementation = 'mpkernel'
implementation_version = __version__
banner = 'Welcome to the Unix port of MicroPython'
language_info = {
'name': 'micropython',
'version': '3',
'codemirror_mode': {
'name': 'python',
'version': 3
},
'mimetype': 'text/x-python',
'file_extension': '.py',
'pygments_lexer': 'python3',
}
def __init__(self, **kwargs):
Kernel.__init__(self, **kwargs)
self.micropython_exe = 'micropython'
self.start_interpreter()
def start_interpreter(self):
# Signal handlers are inherited by forked processes, we can't easily
# reset it from the subprocess. Kernelapp ignores SIGINT except in
# message handlers, we need to temporarily reset the SIGINT handler
# so that bash and its children are interruptible.
sig = signal.signal(signal.SIGINT, signal.SIG_DFL)
try:
self.interpreter = MPUnixInterpreter(self.micropython_exe)
finally:
signal.signal(signal.SIGINT, sig)
def do_execute(self, code, silent, store_history=True,
user_expressions=None, allow_stdin=False):
if not code.strip():
return {
'status': 'ok',
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {},
}
status = 'ok'
traceback = None
try:
# compile the code then run an exec of that code object
compile_output = self.interpreter.run_command("c = compile({0!r}, 'mpkernel', 'exec')".format(code), timeout=5)
if compile_output is not None:
output = self.interpreter.run_command('exec(c)', timeout=5)
else:
raise Exception("Error in compile: ({})\n".format(compile_output))
except KeyboardInterrupt:
self.interpreter.child.sendintr()
status = 'interrupted'
self.interpreter._expect_prompt()
output = self.interpreter.output
except ValueError:
output = self.interpreter.output + 'Incomplete input, restarting'
self.start_interpreter()
except EOF:
output = self.interpreter.output + ' Restarting MPKernelUnix'
self.start_interpreter()
status = 'error'
traceback = []
if not self.interpreter.child.isalive():
self.log.error("MPKernelUnix interpreter died")
loop = IOLoop.current()
loop.add_callback(loop.stop)
if not silent:
# Send output on stdout
stream_content = {'name': 'stdout', 'text': output}
self.send_response(self.iopub_socket, 'stream', stream_content)
reply = {
'status': status,
'execution_count': self.execution_count,
}
if status == 'interrupted':
pass
elif status == 'error':
err = {
'ename': 'ename',
'evalue': 'evalue',
'traceback': traceback,
}
self.send_response(self.iopub_socket, 'error', err)
reply.update(err)
elif status == 'ok':
reply.update({
'payload': [],
'user_expressions': {},
})
else:
raise ValueError("Invalid status: %r" % status)
return reply
| {
"content_hash": "d31b6d1368e765923484f401541d5c2a",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 123,
"avg_line_length": 35.64670658682635,
"alnum_prop": 0.5602217369393583,
"repo_name": "TDAbboud/mpkernel",
"id": "05fbce185588fecdb30d9d9824a1b4c3b8498558",
"size": "5999",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unix/unix.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "4701"
},
{
"name": "Python",
"bytes": "13971"
},
{
"name": "Shell",
"bytes": "590"
}
],
"symlink_target": ""
} |
from mozdns.views import MozdnsDeleteView
from mozdns.views import MozdnsCreateView
from mozdns.views import MozdnsDetailView
from mozdns.views import MozdnsUpdateView
from mozdns.views import MozdnsListView
from mozdns.srv.models import SRV
from mozdns.srv.forms import SRVForm
class SRVView(object):
model = SRV
form_class = SRVForm
queryset = SRV.objects.all()
class SRVDeleteView(SRVView, MozdnsDeleteView):
"""SRV Delete View"""
class SRVDetailView(SRVView, MozdnsDetailView):
"""SRV Detail View"""
template_name = 'srv/srv_detail.html'
class SRVCreateView(SRVView, MozdnsCreateView):
"""SRV Create View"""
class SRVUpdateView(SRVView, MozdnsUpdateView):
"""SRV Update View"""
class SRVListView(SRVView, MozdnsListView):
"""SRV List View"""
| {
"content_hash": "c2856cff67fbf94105e9ebccfbc67181",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 47,
"avg_line_length": 23.352941176470587,
"alnum_prop": 0.7518891687657431,
"repo_name": "mozilla/inventory",
"id": "7f8eafdba6c55b7c3385087b2418689b681a0567",
"size": "794",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mozdns/srv/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5104"
},
{
"name": "CSS",
"bytes": "362837"
},
{
"name": "CoffeeScript",
"bytes": "9538"
},
{
"name": "HTML",
"bytes": "1195738"
},
{
"name": "JavaScript",
"bytes": "1300342"
},
{
"name": "Makefile",
"bytes": "14421"
},
{
"name": "PHP",
"bytes": "27273"
},
{
"name": "Python",
"bytes": "3642733"
},
{
"name": "Shell",
"bytes": "1783"
}
],
"symlink_target": ""
} |
import time
import datetime
import json
import platform
import lora_interface
#transmission_mode = 1 # Max range, slow data rate.
transmission_mode = 5 # Better reach, medium time on air. Test this mode because it doesn't mandate Low Data Rate Optimisation, which is not supported on Hope RF95.
transmission_channel = lora_interface.cvar.LORA_CH_10_868
transmission_power = "H"
receive_timeout = 10000
loop_delay = 10
device_address = 2 # My own address.
gateway_address = 1 # Address of gateway.
print("Calling setupLoRa...")
status = lora_interface.setupLoRa(device_address, transmission_mode, transmission_channel, transmission_power)
print("Status: " + str(status))
# Loop forever.
while True:
try:
# Wait a while to receive a message.
print("Calling receiveLoRaMessage to receive message...")
msg = lora_interface.receiveLoRaMessage(receive_timeout)
status = lora_interface.getLoRaStatus()
print("Msg: " + msg + ", Status: " + str(status))
# Read the LoRa counters.
status = lora_interface.getLoRaStatus()
setup_done = lora_interface.getLoRaSetupDone()
send_count = lora_interface.getLoRaSendCount()
receive_count = lora_interface.getLoRaReceiveCount()
snr = lora_interface.getLoRaSNRValue()
rssi = lora_interface.getLoRaRSSIValue()
rssi_packet = lora_interface.getLoRaRSSIpacketValue()
timestamp = datetime.datetime.now().isoformat()
msg = str(device_address) + "|" + \
str(gateway_address) + "|" + \
str(status) + "|" + \
str(setup_done) + "|" + \
str(send_count) + "|" + \
str(receive_count) + "|" + \
str(snr) + "|" + \
str(rssi) + "|" + \
str(rssi_packet) + "|" + \
str(timestamp)
print("Calling sendLoRaMessage to send device state to LoRa gateway " + str(gateway_address) + "...\n" + msg)
status = lora_interface.sendLoRaMessage(gateway_address, msg)
print("Status: " + str(status))
# Wait 10 seconds before sending the next message.
time.sleep(loop_delay)
except KeyboardInterrupt:
# Stop the program when we press Ctrl-C.
break
except Exception as e:
# For all other errors, we wait a while and resume.
print("Exception: " + str(e))
time.sleep(10)
continue
| {
"content_hash": "d396b8122797d83412fcdb8830199ca3",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 164,
"avg_line_length": 37.921875,
"alnum_prop": 0.6254635352286774,
"repo_name": "lupyuen/RaspberryPiImage",
"id": "c934af50cf4608a16ab226e066f3a4aa4be62044",
"size": "2548",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "home/pi/LoRa/test_lora_interface.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "82308"
},
{
"name": "C",
"bytes": "3197439"
},
{
"name": "C#",
"bytes": "33056"
},
{
"name": "C++",
"bytes": "1020255"
},
{
"name": "CSS",
"bytes": "208338"
},
{
"name": "CoffeeScript",
"bytes": "87200"
},
{
"name": "Eagle",
"bytes": "1632170"
},
{
"name": "Go",
"bytes": "3646"
},
{
"name": "Groff",
"bytes": "286691"
},
{
"name": "HTML",
"bytes": "41527"
},
{
"name": "JavaScript",
"bytes": "403603"
},
{
"name": "Makefile",
"bytes": "33808"
},
{
"name": "Objective-C",
"bytes": "69457"
},
{
"name": "Perl",
"bytes": "96047"
},
{
"name": "Processing",
"bytes": "1304"
},
{
"name": "Python",
"bytes": "13358098"
},
{
"name": "Shell",
"bytes": "68795"
},
{
"name": "TeX",
"bytes": "4317"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurlcustom import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs') or {}
locale = utils.supported_language(reverse_kwargs.pop(
'locale', translation.get_language()))
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
| {
"content_hash": "c45dd992cbd218c255fd1fcacc710938",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 75,
"avg_line_length": 30.5,
"alnum_prop": 0.7061790668348046,
"repo_name": "pyconjp/pyconjp-website",
"id": "63bd806cc59d7101b0e79d8a9d2fd1e408895ca6",
"size": "793",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "localeurlcustom/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "90583"
},
{
"name": "HTML",
"bytes": "320833"
},
{
"name": "JavaScript",
"bytes": "523940"
},
{
"name": "Makefile",
"bytes": "101"
},
{
"name": "Python",
"bytes": "1086795"
},
{
"name": "Shell",
"bytes": "6455"
}
],
"symlink_target": ""
} |
import inspect
from app.services import auth
class InjectPlugin(object):
name = 'inject'
api = 2
def __init__(self, keyword='inject', **kwargs):
self.keyword = keyword
def setup(self, app):
self.app = app
def apply(self, callback, context):
args = inspect.getargspec(context.callback)[0]
moreargs = {}
if 'db' in args:
moreargs['db'] = None
if 'auth' in args:
moreargs['auth'] = auth
if 'logger' in args:
moreargs['logger'] = logger
if moreargs:
def wrapper(*args, **kwargs):
for k in moreargs:
kwargs[k] = moreargs[k]
body = callback(*args, **kwargs)
return body
return wrapper
else:
return callback
| {
"content_hash": "42ae3bddae179990aaf9d1c660d7bd72",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 54,
"avg_line_length": 24.705882352941178,
"alnum_prop": 0.5107142857142857,
"repo_name": "kianby/sandbox",
"id": "53d90c49932813f6153613f4c927b7bcf6f893a8",
"size": "883",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyserver/app/bottle/inject_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16703"
},
{
"name": "Go",
"bytes": "1990"
},
{
"name": "HTML",
"bytes": "45427"
},
{
"name": "JavaScript",
"bytes": "103973"
},
{
"name": "Pug",
"bytes": "477"
},
{
"name": "Python",
"bytes": "12257"
}
],
"symlink_target": ""
} |
'''
Regular Expression
Ref:
https://swtch.com/~rsc/regexp/regexp1.html
'''
| {
"content_hash": "d343faa937ee64be2aa4324e2a233390",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 46,
"avg_line_length": 11.571428571428571,
"alnum_prop": 0.654320987654321,
"repo_name": "oxnz/algorithms",
"id": "24615718dc305547e577640f04799a8d4561cee3",
"size": "632",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/algo/regex.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "66689"
},
{
"name": "C++",
"bytes": "83605"
},
{
"name": "CMake",
"bytes": "2123"
},
{
"name": "Java",
"bytes": "11763"
},
{
"name": "Makefile",
"bytes": "7070"
},
{
"name": "Perl",
"bytes": "24488"
},
{
"name": "Python",
"bytes": "80441"
},
{
"name": "Shell",
"bytes": "4247"
}
],
"symlink_target": ""
} |
"""
Atmosphere allocation rest api.
"""
from django.shortcuts import get_object_or_404
from rest_framework.response import Response
from rest_framework import status
from core.models import Allocation, AllocationStrategy
from core.query import only_active_memberships
from api.v1.serializers import AllocationSerializer, AllocationResultSerializer
from api.v1.views.base import AuthAPIView
class AllocationList(AuthAPIView):
"""
Lists or creates new Allocations
"""
def get(self, request):
"""
Returns a list of all existing Allocations
"""
quotas = Allocation.objects.all()
serialized_data = AllocationSerializer(quotas, many=True).data
return Response(serialized_data)
def post(self, request):
"""
Creates a new Allocation
"""
data = request.data
serializer = AllocationSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class AllocationDetail(AuthAPIView):
"""
Fetches or updates an Allocation
"""
def get(self, request, allocation_id):
"""
Fetch the specified Allocation
"""
allocation = get_object_or_404(Allocation, id=allocation_id)
serialized_data = AllocationSerializer(allocation).data
return Response(serialized_data)
def put(self, request, quota_id):
"""
Updates the specified Allocation
"""
data = request.data
allocation = get_object_or_404(Allocation, id=quota_id)
serializer = AllocationSerializer(allocation, data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def patch(self, request, quota_id):
"""
Partially updates the specified Allocation
"""
data = request.data
allocation = get_object_or_404(Allocation, id=quota_id)
serializer = AllocationSerializer(allocation, data=data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class MonitoringList(AuthAPIView):
"""
Runs the allocation engine and returns detailed monitoring information
"""
def get(self, request):
"""
Fetch the specified Allocation
"""
user = request.user
allocation_results = []
memberships = only_active_memberships(user)
for membership in memberships:
strat = AllocationStrategy.objects.get(
provider=membership.identity.provider)
allocation_result = strat.execute(
membership.identity, membership.allocation)
allocation_results.append(allocation_result)
serialized_data = AllocationResultSerializer(
allocation_results, many=True).data
return Response(serialized_data)
| {
"content_hash": "47a3962809c5b7b6018d0ac6b49151d5",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 79,
"avg_line_length": 30.314814814814813,
"alnum_prop": 0.6551618814905315,
"repo_name": "CCI-MOC/GUI-Backend",
"id": "b17ddc01d364db713079e23c8ca28850ba35fb0c",
"size": "3274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/v1/views/allocation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "11571"
},
{
"name": "Python",
"bytes": "2565922"
},
{
"name": "Ruby",
"bytes": "1345"
},
{
"name": "Shell",
"bytes": "42018"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.