sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def set_user_role(new_user_id, role_id, **kwargs):
"""
Apply `role_id` to `new_user_id`
Note this function returns the `Role` instance associated with `role_id`
"""
#check_perm(kwargs.get('user_id'), 'edit_role')
try:
_get_user(new_user_id)
role_i = _get_role(role_id)
roleuser_i = RoleUser(user_id=new_user_id, role_id=role_id)
role_i.roleusers.append(roleuser_i)
db.DBSession.flush()
except Exception as e: # Will occur if the foreign keys do not exist
log.exception(e)
raise ResourceNotFoundError("User or Role does not exist")
return role_i | Apply `role_id` to `new_user_id`
Note this function returns the `Role` instance associated with `role_id` | entailment |
def delete_user_role(deleted_user_id, role_id,**kwargs):
"""
Remove a user from a role
"""
#check_perm(kwargs.get('user_id'), 'edit_role')
try:
_get_user(deleted_user_id)
_get_role(role_id)
roleuser_i = db.DBSession.query(RoleUser).filter(RoleUser.user_id==deleted_user_id, RoleUser.role_id==role_id).one()
db.DBSession.delete(roleuser_i)
except NoResultFound:
raise ResourceNotFoundError("User Role does not exist")
return 'OK' | Remove a user from a role | entailment |
def set_role_perm(role_id, perm_id,**kwargs):
"""
Insert a permission into a role
"""
#check_perm(kwargs.get('user_id'), 'edit_perm')
_get_perm(perm_id)
role_i = _get_role(role_id)
roleperm_i = RolePerm(role_id=role_id, perm_id=perm_id)
role_i.roleperms.append(roleperm_i)
db.DBSession.flush()
return role_i | Insert a permission into a role | entailment |
def delete_role_perm(role_id, perm_id,**kwargs):
"""
Remove a permission from a role
"""
#check_perm(kwargs.get('user_id'), 'edit_perm')
_get_perm(perm_id)
_get_role(role_id)
try:
roleperm_i = db.DBSession.query(RolePerm).filter(RolePerm.role_id==role_id, RolePerm.perm_id==perm_id).one()
db.DBSession.delete(roleperm_i)
except NoResultFound:
raise ResourceNotFoundError("Role Perm does not exist")
return 'OK' | Remove a permission from a role | entailment |
def update_role(role,**kwargs):
"""
Update the role.
Used to add permissions and users to a role.
"""
#check_perm(kwargs.get('user_id'), 'edit_role')
try:
role_i = db.DBSession.query(Role).filter(Role.id==role.id).one()
role_i.name = role.name
role_i.code = role.code
except NoResultFound:
raise ResourceNotFoundError("Role (role_id=%s) does not exist"%(role.id))
for perm in role.permissions:
_get_perm(perm.id)
roleperm_i = RolePerm(role_id=role.id,
perm_id=perm.id
)
db.DBSession.add(roleperm_i)
for user in role.users:
_get_user(user.id)
roleuser_i = RoleUser(user_id=user.id,
perm_id=perm.id
)
db.DBSession.add(roleuser_i)
db.DBSession.flush()
return role_i | Update the role.
Used to add permissions and users to a role. | entailment |
def get_all_users(**kwargs):
"""
Get the username & ID of all users.
Use the the filter if it has been provided
The filter has to be a list of values
"""
users_qry = db.DBSession.query(User)
filter_type = kwargs.get('filter_type')
filter_value = kwargs.get('filter_value')
if filter_type is not None:
# Filtering the search of users
if filter_type == "id":
if isinstance(filter_value, str):
# Trying to read a csv string
log.info("[HB.users] Getting user by Filter ID : %s", filter_value)
filter_value = eval(filter_value)
if type(filter_value) is int:
users_qry = users_qry.filter(User.id==filter_value)
else:
users_qry = users_qry.filter(User.id.in_(filter_value))
elif filter_type == "username":
if isinstance(filter_value, str):
# Trying to read a csv string
log.info("[HB.users] Getting user by Filter Username : %s", filter_value)
filter_value = filter_value.split(",")
for i, em in enumerate(filter_value):
log.info("[HB.users] >>> Getting user by single Username : %s", em)
filter_value[i] = em.strip()
if isinstance(filter_value, str):
users_qry = users_qry.filter(User.username==filter_value)
else:
users_qry = users_qry.filter(User.username.in_(filter_value))
else:
raise Exception("Filter type '{}' not allowed".format(filter_type))
else:
log.info('[HB.users] Getting All Users')
rs = users_qry.all()
return rs | Get the username & ID of all users.
Use the the filter if it has been provided
The filter has to be a list of values | entailment |
def get_role(role_id,**kwargs):
"""
Get a role by its ID.
"""
try:
role = db.DBSession.query(Role).filter(Role.id==role_id).one()
return role
except NoResultFound:
raise HydraError("Role not found (role_id={})".format(role_id)) | Get a role by its ID. | entailment |
def get_user_roles(uid,**kwargs):
"""
Get the roles for a user.
@param user_id
"""
try:
user_roles = db.DBSession.query(Role).filter(Role.id==RoleUser.role_id,
RoleUser.user_id==uid).all()
return user_roles
except NoResultFound:
raise HydraError("Roles not found for user (user_id={})".format(uid)) | Get the roles for a user.
@param user_id | entailment |
def get_user_permissions(uid, **kwargs):
"""
Get the roles for a user.
@param user_id
"""
try:
_get_user(uid)
user_perms = db.DBSession.query(Perm).filter(Perm.id==RolePerm.perm_id,
RolePerm.role_id==Role.id,
Role.id==RoleUser.role_id,
RoleUser.user_id==uid).all()
return user_perms
except:
raise HydraError("Permissions not found for user (user_id={})".format(uid)) | Get the roles for a user.
@param user_id | entailment |
def get_role_by_code(role_code,**kwargs):
"""
Get a role by its code
"""
try:
role = db.DBSession.query(Role).filter(Role.code==role_code).one()
return role
except NoResultFound:
raise ResourceNotFoundError("Role not found (role_code={})".format(role_code)) | Get a role by its code | entailment |
def get_perm(perm_id,**kwargs):
"""
Get all permissions
"""
try:
perm = db.DBSession.query(Perm).filter(Perm.id==perm_id).one()
return perm
except NoResultFound:
raise ResourceNotFoundError("Permission not found (perm_id={})".format(perm_id)) | Get all permissions | entailment |
def get_perm_by_code(perm_code,**kwargs):
"""
Get a permission by its code
"""
try:
perm = db.DBSession.query(Perm).filter(Perm.code==perm_code).one()
return perm
except NoResultFound:
raise ResourceNotFoundError("Permission not found (perm_code={})".format(perm_code)) | Get a permission by its code | entailment |
def _create_dataframe(cls, value):
"""
Builds a dataframe from the value
"""
try:
ordered_jo = json.loads(six.text_type(value), object_pairs_hook=collections.OrderedDict)
#Pandas does not maintain the order of dicts, so we must break the dict
#up and put it into the dataframe manually to maintain the order.
cols = list(ordered_jo.keys())
if len(cols) == 0:
raise ValueError("Dataframe has no columns")
#Assume all sub-dicts have the same set of keys
if isinstance(ordered_jo[cols[0]], list):
index = range(len(ordered_jo[cols[0]]))
else:
index = list(ordered_jo[cols[0]].keys())
data = []
for c in cols:
if isinstance(ordered_jo[c], list):
data.append(ordered_jo[c])
else:
data.append(list(ordered_jo[c].values()))
# This goes in 'sideways' (cols=index, index=cols), so it needs to be transposed after to keep
# the correct structure
# We also try to coerce the data to a regular numpy array first. If the shape is correct
# this is a much faster way of creating the DataFrame instance.
try:
np_data = np.array(data)
except ValueError:
np_data = None
if np_data is not None and np_data.shape == (len(cols), len(index)):
df = pd.DataFrame(np_data, columns=index, index=cols).transpose()
else:
# TODO should these heterogenous structure be supported?
# See https://github.com/hydraplatform/hydra-base/issues/72
df = pd.DataFrame(data, columns=index, index=cols).transpose()
except ValueError as e:
""" Raised on scalar types used as pd.DataFrame values
in absence of index arg
"""
raise HydraError(str(e))
except AssertionError as e:
log.warning("An error occurred creating the new data frame: %s. Defaulting to a simple read_json"%(e))
df = pd.read_json(value).fillna(0)
return df | Builds a dataframe from the value | entailment |
def parse_value(self):
"""
Turn the value of an incoming dataset into a hydra-friendly value.
"""
try:
if self.value is None:
log.warning("Cannot parse dataset. No value specified.")
return None
# attr_data.value is a dictionary but the keys have namespaces which must be stripped
data = six.text_type(self.value)
if data.upper().strip() in ("NULL", ""):
return "NULL"
data = data[0:100]
log.info("[Dataset.parse_value] Parsing %s (%s)", data, type(data))
return HydraObjectFactory.valueFromDataset(self.type, self.value, self.get_metadata_as_dict())
except Exception as e:
log.exception(e)
raise HydraError("Error parsing value %s: %s"%(self.value, e)) | Turn the value of an incoming dataset into a hydra-friendly value. | entailment |
def get_metadata_as_dict(self, user_id=None, source=None):
"""
Convert a metadata json string into a dictionary.
Args:
user_id (int): Optional: Insert user_id into the metadata if specified
source (string): Optional: Insert source (the name of the app typically) into the metadata if necessary.
Returns:
dict: THe metadata as a python dictionary
"""
if self.metadata is None or self.metadata == "":
return {}
metadata_dict = self.metadata if isinstance(self.metadata, dict) else json.loads(self.metadata)
# These should be set on all datasets by default, but we don't enforce this rigidly
metadata_keys = [m.lower() for m in metadata_dict]
if user_id is not None and 'user_id' not in metadata_keys:
metadata_dict['user_id'] = six.text_type(user_id)
if source is not None and 'source' not in metadata_keys:
metadata_dict['source'] = six.text_type(source)
return { k : six.text_type(v) for k, v in metadata_dict.items() } | Convert a metadata json string into a dictionary.
Args:
user_id (int): Optional: Insert user_id into the metadata if specified
source (string): Optional: Insert source (the name of the app typically) into the metadata if necessary.
Returns:
dict: THe metadata as a python dictionary | entailment |
def add_resourcegroup(group, network_id,**kwargs):
"""
Add a new group to a network.
"""
group_i = ResourceGroup()
group_i.name = group.name
group_i.description = group.description
group_i.status = group.status
group_i.network_id = network_id
db.DBSession.add(group_i)
db.DBSession.flush()
return group_i | Add a new group to a network. | entailment |
def delete_resourcegroup(group_id,**kwargs):
"""
Add a new group to a scenario.
"""
group_i = _get_group(group_id)
#This should cascaded to delete all the group items.
db.DBSession.delete(group_i)
db.DBSession.flush()
return 'OK' | Add a new group to a scenario. | entailment |
def update_resourcegroup(group,**kwargs):
"""
Add a new group to a network.
"""
group_i = _get_group(group.id)
group_i.name = group.name
group_i.description = group.description
group_i.status = group.status
db.DBSession.flush()
return group_i | Add a new group to a network. | entailment |
def create_sqlite_backup_db(audit_tables):
"""
return an inspector object
"""
#we always want to create a whole new DB, so delete the old one first
#if it exists.
try:
Popen("rm %s"%(config.get('sqlite', 'backup_url')), shell=True)
logging.warn("Old sqlite backup DB removed")
except Exception as e:
logging.warn(e)
try:
aux_dir = config.get('DEFAULT', 'hydra_aux_dir')
os.mkdir(aux_dir)
logging.warn("%s created", aux_dir)
except Exception as e:
logging.warn(e)
try:
backup_dir = config.get('db', 'export_target')
os.mkdir(backup_dir)
logging.warn("%s created", backup_dir)
except Exception as e:
logging.warn(e)
db = create_engine(sqlite_engine, echo=True)
db.connect()
metadata = MetaData(db)
for main_audit_table in audit_tables:
cols = []
for c in main_audit_table.columns:
col = c.copy()
if col.type.python_type == Decimal:
col.type = DECIMAL()
cols.append(col)
Table(main_audit_table.name, metadata, *cols, sqlite_autoincrement=True)
metadata.create_all(db) | return an inspector object | entailment |
def _is_admin(user_id):
"""
Is the specified user an admin
"""
user = get_session().query(User).filter(User.id==user_id).one()
if user.is_admin():
return True
else:
return False | Is the specified user an admin | entailment |
def set_metadata(self, metadata_dict):
"""
Set the metadata on a dataset
**metadata_dict**: A dictionary of metadata key-vals.
Transforms this dict into an array of metadata objects for
storage in the DB.
"""
if metadata_dict is None:
return
existing_metadata = []
for m in self.metadata:
existing_metadata.append(m.key)
if m.key in metadata_dict:
if m.value != metadata_dict[m.key]:
m.value = metadata_dict[m.key]
for k, v in metadata_dict.items():
if k not in existing_metadata:
m_i = Metadata(key=str(k),value=str(v))
self.metadata.append(m_i)
metadata_to_delete = set(existing_metadata).difference(set(metadata_dict.keys()))
for m in self.metadata:
if m.key in metadata_to_delete:
get_session().delete(m) | Set the metadata on a dataset
**metadata_dict**: A dictionary of metadata key-vals.
Transforms this dict into an array of metadata objects for
storage in the DB. | entailment |
def check_user(self, user_id):
"""
Check whether this user can read this dataset
"""
if self.hidden == 'N':
return True
for owner in self.owners:
if int(owner.user_id) == int(user_id):
if owner.view == 'Y':
return True
return False | Check whether this user can read this dataset | entailment |
def get_network(self):
"""
Get the network that this resource attribute is in.
"""
ref_key = self.ref_key
if ref_key == 'NETWORK':
return self.network
elif ref_key == 'NODE':
return self.node.network
elif ref_key == 'LINK':
return self.link.network
elif ref_key == 'GROUP':
return self.group.network
elif ref_key == 'PROJECT':
return None | Get the network that this resource attribute is in. | entailment |
def check_read_permission(self, user_id, do_raise=True):
"""
Check whether this user can read this resource attribute
"""
return self.get_resource().check_read_permission(user_id, do_raise=do_raise) | Check whether this user can read this resource attribute | entailment |
def check_write_permission(self, user_id, do_raise=True):
"""
Check whether this user can write this node
"""
return self.get_resource().check_write_permission(user_id, do_raise=do_raise) | Check whether this user can write this node | entailment |
def add_link(self, name, desc, layout, node_1, node_2):
"""
Add a link to a network. Links are what effectively
define the network topology, by associating two already
existing nodes.
"""
existing_link = get_session().query(Link).filter(Link.name==name, Link.network_id==self.id).first()
if existing_link is not None:
raise HydraError("A link with name %s is already in network %s"%(name, self.id))
l = Link()
l.name = name
l.description = desc
l.layout = json.dumps(layout) if layout is not None else None
l.node_a = node_1
l.node_b = node_2
get_session().add(l)
self.links.append(l)
return l | Add a link to a network. Links are what effectively
define the network topology, by associating two already
existing nodes. | entailment |
def add_node(self, name, desc, layout, node_x, node_y):
"""
Add a node to a network.
"""
existing_node = get_session().query(Node).filter(Node.name==name, Node.network_id==self.id).first()
if existing_node is not None:
raise HydraError("A node with name %s is already in network %s"%(name, self.id))
node = Node()
node.name = name
node.description = desc
node.layout = str(layout) if layout is not None else None
node.x = node_x
node.y = node_y
#Do not call save here because it is likely that we may want
#to bulk insert nodes, not one at a time.
get_session().add(node)
self.nodes.append(node)
return node | Add a node to a network. | entailment |
def add_group(self, name, desc, status):
"""
Add a new group to a network.
"""
existing_group = get_session().query(ResourceGroup).filter(ResourceGroup.name==name, ResourceGroup.network_id==self.id).first()
if existing_group is not None:
raise HydraError("A resource group with name %s is already in network %s"%(name, self.id))
group_i = ResourceGroup()
group_i.name = name
group_i.description = desc
group_i.status = status
get_session().add(group_i)
self.resourcegroups.append(group_i)
return group_i | Add a new group to a network. | entailment |
def check_read_permission(self, user_id, do_raise=True):
"""
Check whether this user can read this network
"""
if _is_admin(user_id):
return True
if int(self.created_by) == int(user_id):
return True
for owner in self.owners:
if int(owner.user_id) == int(user_id):
if owner.view == 'Y':
break
else:
if do_raise is True:
raise PermissionError("Permission denied. User %s does not have read"
" access on network %s" %
(user_id, self.id))
else:
return False
return True | Check whether this user can read this network | entailment |
def check_share_permission(self, user_id):
"""
Check whether this user can write this project
"""
if _is_admin(user_id):
return
if int(self.created_by) == int(user_id):
return
for owner in self.owners:
if owner.user_id == int(user_id):
if owner.view == 'Y' and owner.share == 'Y':
break
else:
raise PermissionError("Permission denied. User %s does not have share"
" access on network %s" %
(user_id, self.id)) | Check whether this user can write this project | entailment |
def check_read_permission(self, user_id, do_raise=True):
"""
Check whether this user can read this link
"""
return self.network.check_read_permission(user_id, do_raise=do_raise) | Check whether this user can read this link | entailment |
def check_write_permission(self, user_id, do_raise=True):
"""
Check whether this user can write this link
"""
return self.network.check_write_permission(user_id, do_raise=do_raise) | Check whether this user can write this link | entailment |
def get_items(self, scenario_id):
"""
Get all the items in this group, in the given scenario
"""
items = get_session().query(ResourceGroupItem)\
.filter(ResourceGroupItem.group_id==self.id).\
filter(ResourceGroupItem.scenario_id==scenario_id).all()
return items | Get all the items in this group, in the given scenario | entailment |
def set_ref(self, ref_key, ref_id):
"""
Using a ref key and ref id set the
reference to the appropriate resource type.
"""
if ref_key == 'NETWORK':
self.network_id = ref_id
elif ref_key == 'NODE':
self.node_id = ref_id
elif ref_key == 'LINK':
self.link_id = ref_id
elif ref_key == 'GROUP':
self.group_id = ref_id
elif ref_key == 'SCENARIO':
self.scenario_id = ref_id
elif ref_key == 'PROJECT':
self.project_id = ref_id
else:
raise HydraError("Ref Key %s not recognised."%ref_key) | Using a ref key and ref id set the
reference to the appropriate resource type. | entailment |
def get_ref_id(self):
"""
Return the ID of the resource to which this not is attached
"""
if self.ref_key == 'NETWORK':
return self.network_id
elif self.ref_key == 'NODE':
return self.node_id
elif self.ref_key == 'LINK':
return self.link_id
elif self.ref_key == 'GROUP':
return self.group_id
elif self.ref_key == 'SCENARIO':
return self.scenario_id
elif self.ref_key == 'PROJECT':
return self.project_id | Return the ID of the resource to which this not is attached | entailment |
def get_ref(self):
"""
Return the ID of the resource to which this not is attached
"""
if self.ref_key == 'NETWORK':
return self.network
elif self.ref_key == 'NODE':
return self.node
elif self.ref_key == 'LINK':
return self.link
elif self.ref_key == 'GROUP':
return self.group
elif self.ref_key == 'SCENARIO':
return self.scenario
elif self.ref_key == 'PROJECT':
return self.project | Return the ID of the resource to which this not is attached | entailment |
def permissions(self):
"""Return a set with all permissions granted to the user."""
perms = set()
for r in self.roles:
perms = perms | set(r.permissions)
return perms | Return a set with all permissions granted to the user. | entailment |
def roles(self):
"""Return a set with all roles granted to the user."""
roles = []
for ur in self.roleusers:
roles.append(ur.role)
return set(roles) | Return a set with all roles granted to the user. | entailment |
def is_admin(self):
"""
Check that the user has a role with the code 'admin'
"""
for ur in self.roleusers:
if ur.role.code == 'admin':
return True
return False | Check that the user has a role with the code 'admin' | entailment |
def _check_dimension(typeattr, unit_id=None):
"""
Check that the unit and dimension on a type attribute match.
Alternatively, pass in a unit manually to check against the dimension
of the type attribute
"""
if unit_id is None:
unit_id = typeattr.unit_id
dimension_id = _get_attr(typeattr.attr_id).dimension_id
if unit_id is not None and dimension_id is None:
# First error case
unit_dimension_id = units.get_dimension_by_unit_id(unit_id).id
raise HydraError("Unit %s (abbreviation=%s) has dimension_id %s(name=%s), but attribute has no dimension"%
(unit_id, units.get_unit(unit_id).abbreviation,
unit_dimension_id, units.get_dimension(unit_dimension_id, do_accept_dimension_id_none=True).name))
elif unit_id is not None and dimension_id is not None:
unit_dimension_id = units.get_dimension_by_unit_id(unit_id).id
if unit_dimension_id != dimension_id:
# Only error case
raise HydraError("Unit %s (abbreviation=%s) has dimension_id %s(name=%s), but attribute has dimension_id %s(name=%s)"%
(unit_id, units.get_unit(unit_id).abbreviation,
unit_dimension_id, units.get_dimension(unit_dimension_id, do_accept_dimension_id_none=True).name,
dimension_id, units.get_dimension(dimension_id, do_accept_dimension_id_none=True).name)) | Check that the unit and dimension on a type attribute match.
Alternatively, pass in a unit manually to check against the dimension
of the type attribute | entailment |
def get_types_by_attr(resource, template_id=None):
"""
Using the attributes of the resource, get all the
types that this resource matches.
@returns a dictionary, keyed on the template name, with the
value being the list of type names which match the resources
attributes.
"""
resource_type_templates = []
#Create a list of all of this resources attributes.
attr_ids = []
for res_attr in resource.attributes:
attr_ids.append(res_attr.attr_id)
all_resource_attr_ids = set(attr_ids)
all_types = db.DBSession.query(TemplateType).options(joinedload_all('typeattrs')).filter(TemplateType.resource_type==resource.ref_key)
if template_id is not None:
all_types = all_types.filter(TemplateType.template_id==template_id)
all_types = all_types.all()
#tmpl type attrs must be a subset of the resource's attrs
for ttype in all_types:
type_attr_ids = []
for typeattr in ttype.typeattrs:
type_attr_ids.append(typeattr.attr_id)
if set(type_attr_ids).issubset(all_resource_attr_ids):
resource_type_templates.append(ttype)
return resource_type_templates | Using the attributes of the resource, get all the
types that this resource matches.
@returns a dictionary, keyed on the template name, with the
value being the list of type names which match the resources
attributes. | entailment |
def _get_attr_by_name_and_dimension(name, dimension_id):
"""
Search for an attribute with the given name and dimension_id.
If such an attribute does not exist, create one.
"""
attr = db.DBSession.query(Attr).filter(Attr.name==name, Attr.dimension_id==dimension_id).first()
if attr is None:
# In this case the attr does not exists so we must create it
attr = Attr()
attr.dimension_id = dimension_id
attr.name = name
log.debug("Attribute not found, creating new attribute: name:%s, dimen:%s",
attr.name, attr.dimension_id)
db.DBSession.add(attr)
return attr | Search for an attribute with the given name and dimension_id.
If such an attribute does not exist, create one. | entailment |
def get_template_as_json(template_id, **kwargs):
"""
Get a template (including attribute and dataset definitions) as a JSON
string. This is just a wrapper around the get_template_as_dict function.
"""
user_id = kwargs['user_id']
return json.dumps(get_template_as_dict(template_id, user_id=user_id)) | Get a template (including attribute and dataset definitions) as a JSON
string. This is just a wrapper around the get_template_as_dict function. | entailment |
def get_template_as_xml(template_id,**kwargs):
"""
Turn a template into an xml template
"""
template_xml = etree.Element("template_definition")
template_i = db.DBSession.query(Template).filter(
Template.id==template_id).options(
#joinedload_all('templatetypes.typeattrs.default_dataset.metadata')
joinedload('templatetypes').joinedload('typeattrs').joinedload('default_dataset').joinedload('metadata')
).one()
template_name = etree.SubElement(template_xml, "template_name")
template_name.text = template_i.name
template_description = etree.SubElement(template_xml, "template_description")
template_description.text = template_i.description
resources = etree.SubElement(template_xml, "resources")
for type_i in template_i.templatetypes:
xml_resource = etree.SubElement(resources, "resource")
resource_type = etree.SubElement(xml_resource, "type")
resource_type.text = type_i.resource_type
name = etree.SubElement(xml_resource, "name")
name.text = type_i.name
description = etree.SubElement(xml_resource, "description")
description.text = type_i.description
alias = etree.SubElement(xml_resource, "alias")
alias.text = type_i.alias
if type_i.layout is not None and type_i.layout != "":
layout = _get_layout_as_etree(type_i.layout)
xml_resource.append(layout)
for type_attr in type_i.typeattrs:
attr = _make_attr_element_from_typeattr(xml_resource, type_attr)
resources.append(xml_resource)
xml_string = etree.tostring(template_xml, encoding="unicode")
return xml_string | Turn a template into an xml template | entailment |
def import_template_json(template_json_string,allow_update=True, **kwargs):
"""
Add the template, type and typeattrs described
in a JSON file.
Delete type, typeattr entries in the DB that are not in the XML file
The assumption is that they have been deleted and are no longer required.
The allow_update indicates whether an existing template of the same name should
be updated, or whether it should throw an 'existing name' error.
"""
user_id = kwargs.get('user_id')
try:
template_dict = json.loads(template_json_string)
except:
raise HydraError("Unable to parse JSON string. Plese ensure it is JSON compatible.")
return import_template_dict(template_dict, allow_update=allow_update, user_id=user_id) | Add the template, type and typeattrs described
in a JSON file.
Delete type, typeattr entries in the DB that are not in the XML file
The assumption is that they have been deleted and are no longer required.
The allow_update indicates whether an existing template of the same name should
be updated, or whether it should throw an 'existing name' error. | entailment |
def import_template_xml(template_xml, allow_update=True, **kwargs):
"""
Add the template, type and typeattrs described
in an XML file.
Delete type, typeattr entries in the DB that are not in the XML file
The assumption is that they have been deleted and are no longer required.
"""
template_xsd_path = config.get('templates', 'template_xsd_path')
xmlschema_doc = etree.parse(template_xsd_path)
xmlschema = etree.XMLSchema(xmlschema_doc)
xml_tree = etree.fromstring(template_xml)
xmlschema.assertValid(xml_tree)
template_name = xml_tree.find('template_name').text
template_description = xml_tree.find('template_description').text
template_layout = None
if xml_tree.find('layout') is not None and \
xml_tree.find('layout').text is not None:
layout = xml_tree.find('layout')
layout_string = get_etree_layout_as_dict(layout)
template_layout = json.dumps(layout_string)
try:
tmpl_i = db.DBSession.query(Template).filter(Template.name==template_name).options(joinedload_all('templatetypes.typeattrs.attr')).one()
if allow_update == False:
raise HydraError("Existing Template Found with name %s"%(template_name,))
else:
log.debug("Existing template found. name=%s", template_name)
tmpl_i.layout = template_layout
tmpl_i.description = template_description
except NoResultFound:
log.debug("Template not found. Creating new one. name=%s", template_name)
tmpl_i = Template(name=template_name, description=template_description, layout=template_layout)
db.DBSession.add(tmpl_i)
types = xml_tree.find('resources')
#Delete any types which are in the DB but no longer in the XML file
type_name_map = {r.name:r.id for r in tmpl_i.templatetypes}
attr_name_map = {}
for type_i in tmpl_i.templatetypes:
for attr in type_i.typeattrs:
attr_name_map[attr.attr.name] = (attr.id, attr.type_id)
existing_types = set([r.name for r in tmpl_i.templatetypes])
new_types = set([r.find('name').text for r in types.findall('resource')])
types_to_delete = existing_types - new_types
for type_to_delete in types_to_delete:
type_id = type_name_map[type_to_delete]
try:
type_i = db.DBSession.query(TemplateType).filter(TemplateType.id==type_id).one()
log.debug("Deleting type %s", type_i.name)
db.DBSession.delete(type_i)
except NoResultFound:
pass
#Add or update types.
for resource in types.findall('resource'):
type_name = resource.find('name').text
#check if the type is already in the DB. If not, create a new one.
type_is_new = False
if type_name in existing_types:
type_id = type_name_map[type_name]
type_i = db.DBSession.query(TemplateType).filter(TemplateType.id==type_id).options(joinedload_all('typeattrs.attr')).one()
else:
log.debug("Type %s not found, creating new one.", type_name)
type_i = TemplateType()
type_i.name = type_name
tmpl_i.templatetypes.append(type_i)
type_is_new = True
if resource.find('alias') is not None:
type_i.alias = resource.find('alias').text
if resource.find('description') is not None:
type_i.description = resource.find('description').text
if resource.find('type') is not None:
type_i.resource_type = resource.find('type').text
if resource.find('layout') is not None and \
resource.find('layout').text is not None:
layout = resource.find('layout')
layout_string = get_etree_layout_as_dict(layout)
type_i.layout = json.dumps(layout_string)
#delete any TypeAttrs which are in the DB but not in the XML file
existing_attrs = []
if not type_is_new:
for r in tmpl_i.templatetypes:
if r.name == type_name:
for typeattr in r.typeattrs:
existing_attrs.append(typeattr.attr.name)
existing_attrs = set(existing_attrs)
template_attrs = set([r.find('name').text for r in resource.findall('attribute')])
attrs_to_delete = existing_attrs - template_attrs
for attr_to_delete in attrs_to_delete:
attr_id, type_id = attr_name_map[attr_to_delete]
try:
attr_i = db.DBSession.query(TypeAttr).filter(TypeAttr.attr_id==attr_id, TypeAttr.type_id==type_id).options(joinedload_all('attr')).one()
db.DBSession.delete(attr_i)
log.debug("Attr %s in type %s deleted",attr_i.attr.name, attr_i.templatetype.name)
except NoResultFound:
log.debug("Attr %s not found in type %s"%(attr_id, type_id))
continue
#Add or update type typeattrs
for attribute in resource.findall('attribute'):
new_typeattr = parse_xml_typeattr(type_i, attribute)
db.DBSession.flush()
return tmpl_i | Add the template, type and typeattrs described
in an XML file.
Delete type, typeattr entries in the DB that are not in the XML file
The assumption is that they have been deleted and are no longer required. | entailment |
def apply_template_to_network(template_id, network_id, **kwargs):
"""
For each node and link in a network, check whether it matches
a type in a given template. If so, assign the type to the node / link.
"""
net_i = db.DBSession.query(Network).filter(Network.id==network_id).one()
#There should only ever be one matching type, but if there are more,
#all we can do is pick the first one.
try:
network_type_id = db.DBSession.query(TemplateType.id).filter(TemplateType.template_id==template_id,
TemplateType.resource_type=='NETWORK').one()
assign_type_to_resource(network_type_id.id, 'NETWORK', network_id,**kwargs)
except NoResultFound:
log.debug("No network type to set.")
pass
for node_i in net_i.nodes:
templates = get_types_by_attr(node_i, template_id)
if len(templates) > 0:
assign_type_to_resource(templates[0].id, 'NODE', node_i.id,**kwargs)
for link_i in net_i.links:
templates = get_types_by_attr(link_i, template_id)
if len(templates) > 0:
assign_type_to_resource(templates[0].id, 'LINK', link_i.id,**kwargs)
for group_i in net_i.resourcegroups:
templates = get_types_by_attr(group_i, template_id)
if len(templates) > 0:
assign_type_to_resource(templates[0].id, 'GROUP', group_i.id,**kwargs)
db.DBSession.flush() | For each node and link in a network, check whether it matches
a type in a given template. If so, assign the type to the node / link. | entailment |
def set_network_template(template_id, network_id, **kwargs):
"""
Apply an existing template to a network. Used when a template has changed, and additional attributes
must be added to the network's elements.
"""
resource_types = []
#There should only ever be one matching type, but if there are more,
#all we can do is pick the first one.
try:
network_type = db.DBSession.query(ResourceType).filter(ResourceType.ref_key=='NETWORK',
ResourceType.network_id==network_id,
ResourceType.type_id==TemplateType.type_id,
TemplateType.template_id==template_id).one()
resource_types.append(network_type)
except NoResultFound:
log.debug("No network type to set.")
pass
node_types = db.DBSession.query(ResourceType).filter(ResourceType.ref_key=='NODE',
ResourceType.node_id==Node.node_id,
Node.network_id==network_id,
ResourceType.type_id==TemplateType.type_id,
TemplateType.template_id==template_id).all()
link_types = db.DBSession.query(ResourceType).filter(ResourceType.ref_key=='LINK',
ResourceType.link_id==Link.link_id,
Link.network_id==network_id,
ResourceType.type_id==TemplateType.type_id,
TemplateType.template_id==template_id).all()
group_types = db.DBSession.query(ResourceType).filter(ResourceType.ref_key=='GROUP',
ResourceType.group_id==ResourceGroup.group_id,
ResourceGroup.network_id==network_id,
ResourceType.type_id==TemplateType.type_id,
TemplateType.template_id==template_id).all()
resource_types.extend(node_types)
resource_types.extend(link_types)
resource_types.extend(group_types)
assign_types_to_resources(resource_types)
log.debug("Finished setting network template") | Apply an existing template to a network. Used when a template has changed, and additional attributes
must be added to the network's elements. | entailment |
def remove_template_from_network(network_id, template_id, remove_attrs, **kwargs):
"""
Remove all resource types in a network relating to the specified
template.
remove_attrs
Flag to indicate whether the attributes associated with the template
types should be removed from the resources in the network. These will
only be removed if they are not shared with another template on the network
"""
try:
network = db.DBSession.query(Network).filter(Network.id==network_id).one()
except NoResultFound:
raise HydraError("Network %s not found"%network_id)
try:
template = db.DBSession.query(Template).filter(Template.id==template_id).one()
except NoResultFound:
raise HydraError("Template %s not found"%template_id)
type_ids = [tmpltype.id for tmpltype in template.templatetypes]
node_ids = [n.id for n in network.nodes]
link_ids = [l.id for l in network.links]
group_ids = [g.id for g in network.resourcegroups]
if remove_attrs == 'Y':
#find the attributes to remove
resource_attrs_to_remove = _get_resources_to_remove(network, template)
for n in network.nodes:
resource_attrs_to_remove.extend(_get_resources_to_remove(n, template))
for l in network.links:
resource_attrs_to_remove.extend(_get_resources_to_remove(l, template))
for g in network.resourcegroups:
resource_attrs_to_remove.extend(_get_resources_to_remove(g, template))
for ra in resource_attrs_to_remove:
db.DBSession.delete(ra)
resource_types = db.DBSession.query(ResourceType).filter(
and_(or_(
ResourceType.network_id==network_id,
ResourceType.node_id.in_(node_ids),
ResourceType.link_id.in_(link_ids),
ResourceType.group_id.in_(group_ids),
), ResourceType.type_id.in_(type_ids))).all()
for resource_type in resource_types:
db.DBSession.delete(resource_type)
db.DBSession.flush() | Remove all resource types in a network relating to the specified
template.
remove_attrs
Flag to indicate whether the attributes associated with the template
types should be removed from the resources in the network. These will
only be removed if they are not shared with another template on the network | entailment |
def _get_resources_to_remove(resource, template):
"""
Given a resource and a template being removed, identify the resource attribtes
which can be removed.
"""
type_ids = [tmpltype.id for tmpltype in template.templatetypes]
node_attr_ids = dict([(ra.attr_id, ra) for ra in resource.attributes])
attrs_to_remove = []
attrs_to_keep = []
for nt in resource.types:
if nt.templatetype.id in type_ids:
for ta in nt.templatetype.typeattrs:
if node_attr_ids.get(ta.attr_id):
attrs_to_remove.append(node_attr_ids[ta.attr_id])
else:
for ta in nt.templatetype.typeattrs:
if node_attr_ids.get(ta.attr_id):
attrs_to_keep.append(node_attr_ids[ta.attr_id])
#remove any of the attributes marked for deletion as they are
#marked for keeping based on being in another type.
final_attrs_to_remove = set(attrs_to_remove) - set(attrs_to_keep)
return list(final_attrs_to_remove) | Given a resource and a template being removed, identify the resource attribtes
which can be removed. | entailment |
def get_matching_resource_types(resource_type, resource_id,**kwargs):
"""
Get the possible types of a resource by checking its attributes
against all available types.
@returns A list of TypeSummary objects.
"""
resource_i = None
if resource_type == 'NETWORK':
resource_i = db.DBSession.query(Network).filter(Network.id==resource_id).one()
elif resource_type == 'NODE':
resource_i = db.DBSession.query(Node).filter(Node.id==resource_id).one()
elif resource_type == 'LINK':
resource_i = db.DBSession.query(Link).filter(Link.id==resource_id).one()
elif resource_type == 'GROUP':
resource_i = db.DBSession.query(ResourceGroup).filter(ResourceGroup.id==resource_id).one()
matching_types = get_types_by_attr(resource_i)
return matching_types | Get the possible types of a resource by checking its attributes
against all available types.
@returns A list of TypeSummary objects. | entailment |
def assign_types_to_resources(resource_types,**kwargs):
"""
Assign new types to list of resources.
This function checks if the necessary
attributes are present and adds them if needed. Non existing attributes
are also added when the type is already assigned. This means that this
function can also be used to update resources, when a resource type has
changed.
"""
#Remove duplicate values from types by turning it into a set
type_ids = list(set([rt.type_id for rt in resource_types]))
db_types = db.DBSession.query(TemplateType).filter(TemplateType.id.in_(type_ids)).options(joinedload_all('typeattrs')).all()
types = {}
for db_type in db_types:
if types.get(db_type.id) is None:
types[db_type.id] = db_type
log.debug("Retrieved all the appropriate template types")
res_types = []
res_attrs = []
res_scenarios = []
net_id = None
node_ids = []
link_ids = []
grp_ids = []
for resource_type in resource_types:
ref_id = resource_type.ref_id
ref_key = resource_type.ref_key
if resource_type.ref_key == 'NETWORK':
net_id = ref_id
elif resource_type.ref_key == 'NODE':
node_ids.append(ref_id)
elif resource_type.ref_key == 'LINK':
link_ids.append(ref_id)
elif resource_type.ref_key == 'GROUP':
grp_ids.append(ref_id)
if net_id:
net = db.DBSession.query(Network).filter(Network.id==net_id).one()
nodes = _get_nodes(node_ids)
links = _get_links(link_ids)
groups = _get_groups(grp_ids)
for resource_type in resource_types:
ref_id = resource_type.ref_id
ref_key = resource_type.ref_key
type_id = resource_type.type_id
if ref_key == 'NETWORK':
resource = net
elif ref_key == 'NODE':
resource = nodes[ref_id]
elif ref_key == 'LINK':
resource = links[ref_id]
elif ref_key == 'GROUP':
resource = groups[ref_id]
ra, rt, rs= set_resource_type(resource, type_id, types)
if rt is not None:
res_types.append(rt)
if len(ra) > 0:
res_attrs.extend(ra)
if len(rs) > 0:
res_scenarios.extend(rs)
log.debug("Retrieved all the appropriate resources")
if len(res_types) > 0:
new_types = db.DBSession.execute(ResourceType.__table__.insert(), res_types)
if len(res_attrs) > 0:
new_res_attrs = db.DBSession.execute(ResourceAttr.__table__.insert(), res_attrs)
new_ras = db.DBSession.query(ResourceAttr).filter(and_(ResourceAttr.id>=new_res_attrs.lastrowid, ResourceAttr.id<(new_res_attrs.lastrowid+len(res_attrs)))).all()
ra_map = {}
for ra in new_ras:
ra_map[(ra.ref_key, ra.attr_id, ra.node_id, ra.link_id, ra.group_id, ra.network_id)] = ra.id
for rs in res_scenarios:
rs['resource_attr_id'] = ra_map[(rs['ref_key'], rs['attr_id'], rs['node_id'], rs['link_id'], rs['group_id'], rs['network_id'])]
if len(res_scenarios) > 0:
new_scenarios = db.DBSession.execute(ResourceScenario.__table__.insert(), res_scenarios)
#Make DBsession 'dirty' to pick up the inserts by doing a fake delete.
db.DBSession.query(ResourceAttr).filter(ResourceAttr.attr_id==None).delete()
ret_val = [t for t in types.values()]
return ret_val | Assign new types to list of resources.
This function checks if the necessary
attributes are present and adds them if needed. Non existing attributes
are also added when the type is already assigned. This means that this
function can also be used to update resources, when a resource type has
changed. | entailment |
def check_type_compatibility(type_1_id, type_2_id):
"""
When applying a type to a resource, it may be the case that the resource already
has an attribute specified in the new type, but the template which defines this
pre-existing attribute has a different unit specification to the new template.
This function checks for any situations where different types specify the same
attributes, but with different units.
"""
errors = []
type_1 = db.DBSession.query(TemplateType).filter(TemplateType.id==type_1_id).options(joinedload_all('typeattrs')).one()
type_2 = db.DBSession.query(TemplateType).filter(TemplateType.id==type_2_id).options(joinedload_all('typeattrs')).one()
template_1_name = type_1.template.name
template_2_name = type_2.template.name
type_1_attrs=set([t.attr_id for t in type_1.typeattrs])
type_2_attrs=set([t.attr_id for t in type_2.typeattrs])
shared_attrs = type_1_attrs.intersection(type_2_attrs)
if len(shared_attrs) == 0:
return []
type_1_dict = {}
for t in type_1.typeattrs:
if t.attr_id in shared_attrs:
type_1_dict[t.attr_id]=t
for ta in type_2.typeattrs:
type_2_unit_id = ta.unit_id
type_1_unit_id = type_1_dict[ta.attr_id].unit_id
fmt_dict = {
'template_1_name': template_1_name,
'template_2_name': template_2_name,
'attr_name': ta.attr.name,
'type_1_unit_id': type_1_unit_id,
'type_2_unit_id': type_2_unit_id,
'type_name' : type_1.name
}
if type_1_unit_id is None and type_2_unit_id is not None:
errors.append("Type %(type_name)s in template %(template_1_name)s"
" stores %(attr_name)s with no units, while template"
"%(template_2_name)s stores it with unit %(type_2_unit_id)s"%fmt_dict)
elif type_1_unit_id is not None and type_2_unit_id is None:
errors.append("Type %(type_name)s in template %(template_1_name)s"
" stores %(attr_name)s in %(type_1_unit_id)s."
" Template %(template_2_name)s stores it with no unit."%fmt_dict)
elif type_1_unit_id != type_2_unit_id:
errors.append("Type %(type_name)s in template %(template_1_name)s"
" stores %(attr_name)s in %(type_1_unit_id)s, while"
" template %(template_2_name)s stores it in %(type_2_unit_id)s"%fmt_dict)
return errors | When applying a type to a resource, it may be the case that the resource already
has an attribute specified in the new type, but the template which defines this
pre-existing attribute has a different unit specification to the new template.
This function checks for any situations where different types specify the same
attributes, but with different units. | entailment |
def assign_type_to_resource(type_id, resource_type, resource_id,**kwargs):
"""Assign new type to a resource. This function checks if the necessary
attributes are present and adds them if needed. Non existing attributes
are also added when the type is already assigned. This means that this
function can also be used to update resources, when a resource type has
changed.
"""
if resource_type == 'NETWORK':
resource = db.DBSession.query(Network).filter(Network.id==resource_id).one()
elif resource_type == 'NODE':
resource = db.DBSession.query(Node).filter(Node.id==resource_id).one()
elif resource_type == 'LINK':
resource = db.DBSession.query(Link).filter(Link.id==resource_id).one()
elif resource_type == 'GROUP':
resource = db.DBSession.query(ResourceGroup).filter(ResourceGroup.id==resource_id).one()
res_attrs, res_type, res_scenarios = set_resource_type(resource, type_id, **kwargs)
type_i = db.DBSession.query(TemplateType).filter(TemplateType.id==type_id).one()
if resource_type != type_i.resource_type:
raise HydraError("Cannot assign a %s type to a %s"%
(type_i.resource_type,resource_type))
if res_type is not None:
db.DBSession.bulk_insert_mappings(ResourceType, [res_type])
if len(res_attrs) > 0:
db.DBSession.bulk_insert_mappings(ResourceAttr, res_attrs)
if len(res_scenarios) > 0:
db.DBSession.bulk_insert_mappings(ResourceScenario, res_scenarios)
#Make DBsession 'dirty' to pick up the inserts by doing a fake delete.
db.DBSession.query(Attr).filter(Attr.id==None).delete()
db.DBSession.flush()
return db.DBSession.query(TemplateType).filter(TemplateType.id==type_id).one() | Assign new type to a resource. This function checks if the necessary
attributes are present and adds them if needed. Non existing attributes
are also added when the type is already assigned. This means that this
function can also be used to update resources, when a resource type has
changed. | entailment |
def set_resource_type(resource, type_id, types={}, **kwargs):
"""
Set this resource to be a certain type.
Type objects (a dictionary keyed on type_id) may be
passed in to save on loading.
This function does not call save. It must be done afterwards.
New resource attributes are added to the resource if the template
requires them. Resource attributes on the resource but not used by
the template are not removed.
@returns list of new resource attributes
,new resource type object
"""
ref_key = resource.ref_key
existing_attr_ids = []
for res_attr in resource.attributes:
existing_attr_ids.append(res_attr.attr_id)
if type_id in types:
type_i = types[type_id]
else:
type_i = db.DBSession.query(TemplateType).filter(TemplateType.id==type_id).options(joinedload_all('typeattrs')).one()
type_attrs = dict()
for typeattr in type_i.typeattrs:
type_attrs[typeattr.attr_id]={
'is_var':typeattr.attr_is_var,
'default_dataset_id': typeattr.default_dataset.id if typeattr.default_dataset else None}
# check if attributes exist
missing_attr_ids = set(type_attrs.keys()) - set(existing_attr_ids)
# add attributes if necessary
new_res_attrs = []
#This is a dict as the length of the list may not match the new_res_attrs
#Keyed on attr_id, as resource_attr_id doesn't exist yet, and there should only
#be one attr_id per template.
new_res_scenarios = {}
for attr_id in missing_attr_ids:
ra_dict = dict(
ref_key = ref_key,
attr_id = attr_id,
attr_is_var = type_attrs[attr_id]['is_var'],
node_id = resource.id if ref_key == 'NODE' else None,
link_id = resource.id if ref_key == 'LINK' else None,
group_id = resource.id if ref_key == 'GROUP' else None,
network_id = resource.id if ref_key == 'NETWORK' else None,
)
new_res_attrs.append(ra_dict)
if type_attrs[attr_id]['default_dataset_id'] is not None:
if hasattr(resource, 'network'):
for s in resource.network.scenarios:
if new_res_scenarios.get(attr_id) is None:
new_res_scenarios[attr_id] = {}
new_res_scenarios[attr_id][s.id] = dict(
dataset_id = type_attrs[attr_id]['default_dataset_id'],
scenario_id = s.id,
#Not stored in the DB, but needed to connect the RA ID later.
attr_id = attr_id,
ref_key = ref_key,
node_id = ra_dict['node_id'],
link_id = ra_dict['link_id'],
group_id = ra_dict['group_id'],
network_id = ra_dict['network_id'],
)
resource_type = None
for rt in resource.types:
if rt.type_id == type_i.id:
break
else:
errors = check_type_compatibility(rt.type_id, type_i.id)
if len(errors) > 0:
raise HydraError("Cannot apply type %s to resource as it "
"conflicts with type %s. Errors are: %s"
%(type_i.name, resource.get_name(),
rt.templatetype.name, ','.join(errors)))
else:
# add type to tResourceType if it doesn't exist already
resource_type = dict(
node_id = resource.id if ref_key == 'NODE' else None,
link_id = resource.id if ref_key == 'LINK' else None,
group_id = resource.id if ref_key == 'GROUP' else None,
network_id = resource.id if ref_key == 'NETWORK' else None,
ref_key = ref_key,
type_id = type_id,
)
return new_res_attrs, resource_type, new_res_scenarios | Set this resource to be a certain type.
Type objects (a dictionary keyed on type_id) may be
passed in to save on loading.
This function does not call save. It must be done afterwards.
New resource attributes are added to the resource if the template
requires them. Resource attributes on the resource but not used by
the template are not removed.
@returns list of new resource attributes
,new resource type object | entailment |
def remove_type_from_resource( type_id, resource_type, resource_id,**kwargs):
"""
Remove a resource type trom a resource
"""
node_id = resource_id if resource_type == 'NODE' else None
link_id = resource_id if resource_type == 'LINK' else None
group_id = resource_id if resource_type == 'GROUP' else None
resourcetype = db.DBSession.query(ResourceType).filter(
ResourceType.type_id==type_id,
ResourceType.ref_key==resource_type,
ResourceType.node_id == node_id,
ResourceType.link_id == link_id,
ResourceType.group_id == group_id).one()
db.DBSession.delete(resourcetype)
db.DBSession.flush()
return 'OK' | Remove a resource type trom a resource | entailment |
def add_template(template, **kwargs):
"""
Add template and a type and typeattrs.
"""
tmpl = Template()
tmpl.name = template.name
if template.description:
tmpl.description = template.description
if template.layout:
tmpl.layout = get_layout_as_string(template.layout)
db.DBSession.add(tmpl)
if template.templatetypes is not None:
types = template.templatetypes
for templatetype in types:
ttype = _update_templatetype(templatetype)
tmpl.templatetypes.append(ttype)
db.DBSession.flush()
return tmpl | Add template and a type and typeattrs. | entailment |
def update_template(template,**kwargs):
"""
Update template and a type and typeattrs.
"""
tmpl = db.DBSession.query(Template).filter(Template.id==template.id).one()
tmpl.name = template.name
if template.description:
tmpl.description = template.description
#Lazy load the rest of the template
for tt in tmpl.templatetypes:
for ta in tt.typeattrs:
ta.attr
if template.layout:
tmpl.layout = get_layout_as_string(template.layout)
type_dict = dict([(t.id, t) for t in tmpl.templatetypes])
existing_templatetypes = []
if template.types is not None or template.templatetypes is not None:
types = template.types if template.types is not None else template.templatetypes
for templatetype in types:
if templatetype.id is not None:
type_i = type_dict[templatetype.id]
_update_templatetype(templatetype, type_i)
existing_templatetypes.append(type_i.id)
else:
#Give it a template ID if it doesn't have one
templatetype.template_id = template.id
new_templatetype_i = _update_templatetype(templatetype)
existing_templatetypes.append(new_templatetype_i.id)
for tt in tmpl.templatetypes:
if tt.id not in existing_templatetypes:
delete_templatetype(tt.id)
db.DBSession.flush()
return tmpl | Update template and a type and typeattrs. | entailment |
def delete_template(template_id,**kwargs):
"""
Delete a template and its type and typeattrs.
"""
try:
tmpl = db.DBSession.query(Template).filter(Template.id==template_id).one()
except NoResultFound:
raise ResourceNotFoundError("Template %s not found"%(template_id,))
db.DBSession.delete(tmpl)
db.DBSession.flush()
return 'OK' | Delete a template and its type and typeattrs. | entailment |
def get_templates(load_all=True, **kwargs):
"""
Get all templates.
Args:
load_all Boolean: Returns just the template entry or the full template structure (template types and type attrs)
Returns:
List of Template objects
"""
if load_all is False:
templates = db.DBSession.query(Template).all()
else:
templates = db.DBSession.query(Template).options(joinedload_all('templatetypes.typeattrs')).all()
return templates | Get all templates.
Args:
load_all Boolean: Returns just the template entry or the full template structure (template types and type attrs)
Returns:
List of Template objects | entailment |
def get_template(template_id,**kwargs):
"""
Get a specific resource template template, by ID.
"""
try:
tmpl_i = db.DBSession.query(Template).filter(Template.id==template_id).options(joinedload_all('templatetypes.typeattrs.default_dataset.metadata')).one()
#Load the attributes.
for tmpltype_i in tmpl_i.templatetypes:
for typeattr_i in tmpltype_i.typeattrs:
typeattr_i.attr
return tmpl_i
except NoResultFound:
raise HydraError("Template %s not found"%template_id) | Get a specific resource template template, by ID. | entailment |
def get_template_by_name(name,**kwargs):
"""
Get a specific resource template, by name.
"""
try:
tmpl_i = db.DBSession.query(Template).filter(Template.name == name).options(joinedload_all('templatetypes.typeattrs.default_dataset.metadata')).one()
return tmpl_i
except NoResultFound:
log.info("%s is not a valid identifier for a template",name)
raise HydraError('Template "%s" not found'%name) | Get a specific resource template, by name. | entailment |
def add_templatetype(templatetype,**kwargs):
"""
Add a template type with typeattrs.
"""
type_i = _update_templatetype(templatetype)
db.DBSession.flush()
return type_i | Add a template type with typeattrs. | entailment |
def update_templatetype(templatetype,**kwargs):
"""
Update a resource type and its typeattrs.
New typeattrs will be added. typeattrs not sent will be ignored.
To delete typeattrs, call delete_typeattr
"""
tmpltype_i = db.DBSession.query(TemplateType).filter(TemplateType.id == templatetype.id).one()
_update_templatetype(templatetype, tmpltype_i)
db.DBSession.flush()
return tmpltype_i | Update a resource type and its typeattrs.
New typeattrs will be added. typeattrs not sent will be ignored.
To delete typeattrs, call delete_typeattr | entailment |
def _set_typeattr(typeattr, existing_ta = None):
"""
Add or updsate a type attribute.
If an existing type attribute is provided, then update.
Checks are performed to ensure that the dimension provided on the
type attr (not updateable) is the same as that on the referring attribute.
The unit provided (stored on tattr) must conform to the dimension stored
on the referring attribute (stored on tattr).
This is done so that multiple tempaltes can all use the same attribute,
but specify different units.
If no attr_id is provided, but an attr_name and dimension are provided,
then a new attribute can be created (or retrived) and used. I.e., no
attribute ID must be specified if attr_name and dimension are specified.
***WARNING***
Setting attribute ID to null means a new type attribute (and even a new attr)
may be added, None are removed or replaced. To remove other type attrs, do it
manually using delete_typeattr
"""
if existing_ta is None:
ta = TypeAttr(attr_id=typeattr.attr_id)
else:
ta = existing_ta
ta.unit_id = typeattr.unit_id
ta.type_id = typeattr.type_id
ta.data_type = typeattr.data_type
if hasattr(typeattr, 'default_dataset_id') and typeattr.default_dataset_id is not None:
ta.default_dataset_id = typeattr.default_dataset_id
ta.description = typeattr.description
ta.properties = typeattr.get_properties()
ta.attr_is_var = typeattr.is_var if typeattr.is_var is not None else 'N'
ta.data_restriction = _parse_data_restriction(typeattr.data_restriction)
if typeattr.dimension_id is None:
# All right. Check passed
pass
else:
if typeattr.attr_id is not None and typeattr.attr_id > 0:
# Getting the passed attribute, so we need to check consistency between attr dimension id and typeattr dimension id
attr = ta.attr
if attr is not None and attr.dimension_id is not None and attr.dimension_id != typeattr.dimension_id or \
attr is not None and attr.dimension_id is not None:
# In this case there is an inconsistency between attr.dimension_id and typeattr.dimension_id
raise HydraError("Cannot set a dimension on type attribute which "
"does not match its attribute. Create a new attribute if "
"you want to use attribute %s with dimension_id %s"%
(attr.name, typeattr.dimension_id))
elif typeattr.attr_id is None and typeattr.name is not None:
# Getting/creating the attribute by typeattr dimension id and typeattr name
# In this case the dimension_id "null"/"not null" status is ininfluent
attr = _get_attr_by_name_and_dimension(typeattr.name, typeattr.dimension_id)
ta.attr_id = attr.id
ta.attr = attr
_check_dimension(ta)
if existing_ta is None:
log.debug("Adding ta to DB")
db.DBSession.add(ta)
return ta | Add or updsate a type attribute.
If an existing type attribute is provided, then update.
Checks are performed to ensure that the dimension provided on the
type attr (not updateable) is the same as that on the referring attribute.
The unit provided (stored on tattr) must conform to the dimension stored
on the referring attribute (stored on tattr).
This is done so that multiple tempaltes can all use the same attribute,
but specify different units.
If no attr_id is provided, but an attr_name and dimension are provided,
then a new attribute can be created (or retrived) and used. I.e., no
attribute ID must be specified if attr_name and dimension are specified.
***WARNING***
Setting attribute ID to null means a new type attribute (and even a new attr)
may be added, None are removed or replaced. To remove other type attrs, do it
manually using delete_typeattr | entailment |
def _update_templatetype(templatetype, existing_tt=None):
"""
Add or update a templatetype. If an existing template type is passed in,
update that one. Otherwise search for an existing one. If not found, add.
"""
if existing_tt is None:
if "id" in templatetype and templatetype.id is not None:
tmpltype_i = db.DBSession.query(TemplateType).filter(TemplateType.id == templatetype.id).one()
else:
tmpltype_i = TemplateType()
else:
tmpltype_i = existing_tt
tmpltype_i.template_id = templatetype.template_id
tmpltype_i.name = templatetype.name
tmpltype_i.description = templatetype.description
tmpltype_i.alias = templatetype.alias
if templatetype.layout is not None:
tmpltype_i.layout = get_layout_as_string(templatetype.layout)
tmpltype_i.resource_type = templatetype.resource_type
ta_dict = {}
for t in tmpltype_i.typeattrs:
ta_dict[t.attr_id] = t
existing_attrs = []
if templatetype.typeattrs is not None:
for typeattr in templatetype.typeattrs:
if typeattr.attr_id in ta_dict:
ta = _set_typeattr(typeattr, ta_dict[typeattr.attr_id])
existing_attrs.append(ta.attr_id)
else:
ta = _set_typeattr(typeattr)
tmpltype_i.typeattrs.append(ta)
existing_attrs.append(ta.attr_id)
log.debug("Deleting any type attrs not sent")
for ta in ta_dict.values():
if ta.attr_id not in existing_attrs:
delete_typeattr(ta)
if existing_tt is None:
db.DBSession.add(tmpltype_i)
return tmpltype_i | Add or update a templatetype. If an existing template type is passed in,
update that one. Otherwise search for an existing one. If not found, add. | entailment |
def delete_templatetype(type_id,template_i=None, **kwargs):
"""
Delete a template type and its typeattrs.
"""
try:
tmpltype_i = db.DBSession.query(TemplateType).filter(TemplateType.id == type_id).one()
except NoResultFound:
raise ResourceNotFoundError("Template Type %s not found"%(type_id,))
if template_i is None:
template_i = db.DBSession.query(Template).filter(Template.id==tmpltype_i.template_id).one()
template_i.templatetypes.remove(tmpltype_i)
db.DBSession.delete(tmpltype_i)
db.DBSession.flush() | Delete a template type and its typeattrs. | entailment |
def get_templatetype(type_id,**kwargs):
"""
Get a specific resource type by ID.
"""
templatetype = db.DBSession.query(TemplateType).filter(
TemplateType.id==type_id).options(
joinedload_all("typeattrs")).one()
return templatetype | Get a specific resource type by ID. | entailment |
def get_templatetype_by_name(template_id, type_name,**kwargs):
"""
Get a specific resource type by name.
"""
try:
templatetype = db.DBSession.query(TemplateType).filter(TemplateType.id==template_id, TemplateType.name==type_name).one()
except NoResultFound:
raise HydraError("%s is not a valid identifier for a type"%(type_name))
return templatetype | Get a specific resource type by name. | entailment |
def add_typeattr(typeattr,**kwargs):
"""
Add an typeattr to an existing type.
"""
tmpltype = get_templatetype(typeattr.type_id, user_id=kwargs.get('user_id'))
ta = _set_typeattr(typeattr)
tmpltype.typeattrs.append(ta)
db.DBSession.flush()
return ta | Add an typeattr to an existing type. | entailment |
def delete_typeattr(typeattr,**kwargs):
"""
Remove an typeattr from an existing type
"""
tmpltype = get_templatetype(typeattr.type_id, user_id=kwargs.get('user_id'))
ta = db.DBSession.query(TypeAttr).filter(TypeAttr.type_id == typeattr.type_id,
TypeAttr.attr_id == typeattr.attr_id).one()
tmpltype.typeattrs.remove(ta)
db.DBSession.flush()
return 'OK' | Remove an typeattr from an existing type | entailment |
def validate_attr(resource_attr_id, scenario_id, template_id=None):
"""
Check that a resource attribute satisfies the requirements of all the types of the
resource.
"""
rs = db.DBSession.query(ResourceScenario).\
filter(ResourceScenario.resource_attr_id==resource_attr_id,
ResourceScenario.scenario_id==scenario_id).options(
joinedload_all("resourceattr")).options(
joinedload_all("dataset")
).one()
error = None
try:
_do_validate_resourcescenario(rs, template_id)
except HydraError as e:
error = JSONObject(dict(
ref_key = rs.resourceattr.ref_key,
ref_id = rs.resourceattr.get_resource_id(),
ref_name = rs.resourceattr.get_resource().get_name(),
resource_attr_id = rs.resource_attr_id,
attr_id = rs.resourceattr.attr.id,
attr_name = rs.resourceattr.attr.name,
dataset_id = rs.dataset_id,
scenario_id=scenario_id,
template_id=template_id,
error_text=e.args[0]))
return error | Check that a resource attribute satisfies the requirements of all the types of the
resource. | entailment |
def validate_attrs(resource_attr_ids, scenario_id, template_id=None):
"""
Check that multiple resource attribute satisfy the requirements of the types of resources to
which the they are attached.
"""
multi_rs = db.DBSession.query(ResourceScenario).\
filter(ResourceScenario.resource_attr_id.in_(resource_attr_ids),\
ResourceScenario.scenario_id==scenario_id).\
options(joinedload_all("resourceattr")).\
options(joinedload_all("dataset")).all()
errors = []
for rs in multi_rs:
try:
_do_validate_resourcescenario(rs, template_id)
except HydraError as e:
error = dict(
ref_key = rs.resourceattr.ref_key,
ref_id = rs.resourceattr.get_resource_id(),
ref_name = rs.resourceattr.get_resource().get_name(),
resource_attr_id = rs.resource_attr_id,
attr_id = rs.resourceattr.attr.id,
attr_name = rs.resourceattr.attr.name,
dataset_id = rs.dataset_id,
scenario_id = scenario_id,
template_id = template_id,
error_text = e.args[0])
errors.append(error)
return errors | Check that multiple resource attribute satisfy the requirements of the types of resources to
which the they are attached. | entailment |
def _do_validate_resourcescenario(resourcescenario, template_id=None):
"""
Perform a check to ensure a resource scenario's datasets are correct given what the
definition of that resource (its type) specifies.
"""
res = resourcescenario.resourceattr.get_resource()
types = res.types
dataset = resourcescenario.dataset
if len(types) == 0:
return
if template_id is not None:
if template_id not in [r.templatetype.template_id for r in res.types]:
raise HydraError("Template %s is not used for resource attribute %s in scenario %s"%\
(template_id, resourcescenario.resourceattr.attr.name,
resourcescenario.scenario.name))
#Validate against all the types for the resource
for resourcetype in types:
#If a specific type has been specified, then only validate
#against that type and ignore all the others
if template_id is not None:
if resourcetype.templatetype.template_id != template_id:
continue
#Identify the template types for the template
tmpltype = resourcetype.templatetype
for ta in tmpltype.typeattrs:
#If we find a template type which mactches the current attribute.
#we can do some validation.
if ta.attr_id == resourcescenario.resourceattr.attr_id:
if ta.data_restriction:
log.debug("Validating against %s", ta.data_restriction)
validation_dict = eval(ta.data_restriction)
dataset_util.validate_value(validation_dict, dataset.get_val()) | Perform a check to ensure a resource scenario's datasets are correct given what the
definition of that resource (its type) specifies. | entailment |
def validate_network(network_id, template_id, scenario_id=None):
"""
Given a network, scenario and template, ensure that all the nodes, links & groups
in the network have the correct resource attributes as defined by the types in the template.
Also ensure valid entries in tresourcetype.
This validation will not fail if a resource has more than the required type, but will fail if
it has fewer or if any attribute has a conflicting dimension or unit.
"""
network = db.DBSession.query(Network).filter(Network.id==network_id).options(noload('scenarios')).first()
if network is None:
raise HydraError("Could not find network %s"%(network_id))
resource_scenario_dict = {}
if scenario_id is not None:
scenario = db.DBSession.query(Scenario).filter(Scenario.id==scenario_id).first()
if scenario is None:
raise HydraError("Could not find scenario %s"%(scenario_id,))
for rs in scenario.resourcescenarios:
resource_scenario_dict[rs.resource_attr_id] = rs
template = db.DBSession.query(Template).filter(Template.id == template_id).options(joinedload_all('templatetypes')).first()
if template is None:
raise HydraError("Could not find template %s"%(template_id,))
resource_type_defs = {
'NETWORK' : {},
'NODE' : {},
'LINK' : {},
'GROUP' : {},
}
for tt in template.templatetypes:
resource_type_defs[tt.resource_type][tt.id] = tt
errors = []
#Only check if there are type definitions for a network in the template.
if resource_type_defs.get('NETWORK'):
net_types = resource_type_defs['NETWORK']
errors.extend(_validate_resource(network, net_types, resource_scenario_dict))
#check all nodes
if resource_type_defs.get('NODE'):
node_types = resource_type_defs['NODE']
for node in network.nodes:
errors.extend(_validate_resource(node, node_types, resource_scenario_dict))
#check all links
if resource_type_defs.get('LINK'):
link_types = resource_type_defs['LINK']
for link in network.links:
errors.extend(_validate_resource(link, link_types, resource_scenario_dict))
#check all groups
if resource_type_defs.get('GROUP'):
group_types = resource_type_defs['GROUP']
for group in network.resourcegroups:
errors.extend(_validate_resource(group, group_types, resource_scenario_dict))
return errors | Given a network, scenario and template, ensure that all the nodes, links & groups
in the network have the correct resource attributes as defined by the types in the template.
Also ensure valid entries in tresourcetype.
This validation will not fail if a resource has more than the required type, but will fail if
it has fewer or if any attribute has a conflicting dimension or unit. | entailment |
def get_network_as_xml_template(network_id,**kwargs):
"""
Turn an existing network into an xml template
using its attributes.
If an optional scenario ID is passed in, default
values will be populated from that scenario.
"""
template_xml = etree.Element("template_definition")
net_i = db.DBSession.query(Network).filter(Network.id==network_id).one()
template_name = etree.SubElement(template_xml, "template_name")
template_name.text = "TemplateType from Network %s"%(net_i.name)
layout = _get_layout_as_etree(net_i.layout)
resources = etree.SubElement(template_xml, "resources")
if net_i.attributes:
net_resource = etree.SubElement(resources, "resource")
resource_type = etree.SubElement(net_resource, "type")
resource_type.text = "NETWORK"
resource_name = etree.SubElement(net_resource, "name")
resource_name.text = net_i.name
layout = _get_layout_as_etree(net_i.layout)
if layout is not None:
net_resource.append(layout)
for net_attr in net_i.attributes:
_make_attr_element_from_resourceattr(net_resource, net_attr)
resources.append(net_resource)
existing_types = {'NODE': [], 'LINK': [], 'GROUP': []}
for node_i in net_i.nodes:
node_attributes = node_i.attributes
attr_ids = [res_attr.attr_id for res_attr in node_attributes]
if len(attr_ids) > 0 and attr_ids not in existing_types['NODE']:
node_resource = etree.Element("resource")
resource_type = etree.SubElement(node_resource, "type")
resource_type.text = "NODE"
resource_name = etree.SubElement(node_resource, "name")
resource_name.text = node_i.node_name
layout = _get_layout_as_etree(node_i.layout)
if layout is not None:
node_resource.append(layout)
for node_attr in node_attributes:
_make_attr_element_from_resourceattr(node_resource, node_attr)
existing_types['NODE'].append(attr_ids)
resources.append(node_resource)
for link_i in net_i.links:
link_attributes = link_i.attributes
attr_ids = [link_attr.attr_id for link_attr in link_attributes]
if len(attr_ids) > 0 and attr_ids not in existing_types['LINK']:
link_resource = etree.Element("resource")
resource_type = etree.SubElement(link_resource, "type")
resource_type.text = "LINK"
resource_name = etree.SubElement(link_resource, "name")
resource_name.text = link_i.link_name
layout = _get_layout_as_etree(link_i.layout)
if layout is not None:
link_resource.append(layout)
for link_attr in link_attributes:
_make_attr_element_from_resourceattr(link_resource, link_attr)
existing_types['LINK'].append(attr_ids)
resources.append(link_resource)
for group_i in net_i.resourcegroups:
group_attributes = group_i.attributes
attr_ids = [group_attr.attr_id for group_attr in group_attributes]
if len(attr_ids) > 0 and attr_ids not in existing_types['GROUP']:
group_resource = etree.Element("resource")
resource_type = etree.SubElement(group_resource, "type")
resource_type.text = "GROUP"
resource_name = etree.SubElement(group_resource, "name")
resource_name.text = group_i.group_name
for group_attr in group_attributes:
_make_attr_element_from_resourceattr(group_resource, group_attr)
existing_types['GROUP'].append(attr_ids)
resources.append(group_resource)
xml_string = etree.tostring(template_xml, encoding="unicode")
return xml_string | Turn an existing network into an xml template
using its attributes.
If an optional scenario ID is passed in, default
values will be populated from that scenario. | entailment |
def _make_attr_element_from_typeattr(parent, type_attr_i):
"""
General function to add an attribute element to a resource element.
resource_attr_i can also e a type_attr if being called from get_tempalte_as_xml
"""
attr = _make_attr_element(parent, type_attr_i.attr)
if type_attr_i.unit_id is not None:
attr_unit = etree.SubElement(attr, 'unit')
attr_unit.text = units.get_unit(type_attr_i.unit_id).abbreviation
attr_is_var = etree.SubElement(attr, 'is_var')
attr_is_var.text = type_attr_i.attr_is_var
if type_attr_i.data_type is not None:
attr_data_type = etree.SubElement(attr, 'data_type')
attr_data_type.text = type_attr_i.data_type
if type_attr_i.data_restriction is not None:
attr_data_restriction = etree.SubElement(attr, 'restrictions')
attr_data_restriction.text = type_attr_i.data_restriction
return attr | General function to add an attribute element to a resource element.
resource_attr_i can also e a type_attr if being called from get_tempalte_as_xml | entailment |
def _make_attr_element_from_resourceattr(parent, resource_attr_i):
"""
General function to add an attribute element to a resource element.
"""
attr = _make_attr_element(parent, resource_attr_i.attr)
attr_is_var = etree.SubElement(attr, 'is_var')
attr_is_var.text = resource_attr_i.attr_is_var
return attr | General function to add an attribute element to a resource element. | entailment |
def _make_attr_element(parent, attr_i):
"""
create an attribute element from an attribute DB object
"""
attr = etree.SubElement(parent, "attribute")
attr_name = etree.SubElement(attr, 'name')
attr_name.text = attr_i.name
attr_desc = etree.SubElement(attr, 'description')
attr_desc.text = attr_i.description
attr_dimension = etree.SubElement(attr, 'dimension')
attr_dimension.text = units.get_dimension(attr_i.dimension_id, do_accept_dimension_id_none=True).name
return attr | create an attribute element from an attribute DB object | entailment |
def get_etree_layout_as_dict(layout_tree):
"""
Convert something that looks like this:
<layout>
<item>
<name>color</name>
<value>red</value>
</item>
<item>
<name>shapefile</name>
<value>blah.shp</value>
</item>
</layout>
Into something that looks like this:
{
'color' : ['red'],
'shapefile' : ['blah.shp']
}
"""
layout_dict = dict()
for item in layout_tree.findall('item'):
name = item.find('name').text
val_element = item.find('value')
value = val_element.text.strip()
if value == '':
children = val_element.getchildren()
value = etree.tostring(children[0], pretty_print=True, encoding="unicode")
layout_dict[name] = value
return layout_dict | Convert something that looks like this:
<layout>
<item>
<name>color</name>
<value>red</value>
</item>
<item>
<name>shapefile</name>
<value>blah.shp</value>
</item>
</layout>
Into something that looks like this:
{
'color' : ['red'],
'shapefile' : ['blah.shp']
} | entailment |
def _get_layout_as_etree(layout_dict):
"""
Convert something that looks like this:
{
'color' : ['red'],
'shapefile' : ['blah.shp']
}
Into something that looks like this:
<layout>
<item>
<name>color</name>
<value>red</value>
</item>
<item>
<name>shapefile</name>
<value>blah.shp</value>
</item>
</layout>
"""
if layout_dict is None:
return None
layout = etree.Element("layout")
layout_dict = eval(layout_dict)
for k, v in layout_dict.items():
item = etree.SubElement(layout, "item")
name = etree.SubElement(item, "name")
name.text = k
value = etree.SubElement(item, "value")
value.text = str(v)
return layout | Convert something that looks like this:
{
'color' : ['red'],
'shapefile' : ['blah.shp']
}
Into something that looks like this:
<layout>
<item>
<name>color</name>
<value>red</value>
</item>
<item>
<name>shapefile</name>
<value>blah.shp</value>
</item>
</layout> | entailment |
def valueFromDataset(cls, datatype, value, metadata=None, tmap=None):
"""
Return the value contained by dataset argument, after casting to
correct type and performing type-specific validation
"""
if tmap is None:
tmap = typemap
obj = cls.fromDataset(datatype, value, metadata=metadata, tmap=tmap)
return obj.value | Return the value contained by dataset argument, after casting to
correct type and performing type-specific validation | entailment |
def fromDataset(datatype, value, metadata=None, tmap=None):
"""
Return a representation of dataset argument as an instance
of the class corresponding to its datatype
"""
if tmap is None:
tmap = typemap
return tmap[datatype.upper()].fromDataset(value, metadata=metadata) | Return a representation of dataset argument as an instance
of the class corresponding to its datatype | entailment |
def exists_dimension(dimension_name,**kwargs):
"""
Given a dimension returns True if it exists, False otherwise
"""
try:
dimension = db.DBSession.query(Dimension).filter(Dimension.name==dimension_name).one()
# At this point the dimension exists
return True
except NoResultFound:
# The dimension does not exist
raise False | Given a dimension returns True if it exists, False otherwise | entailment |
def _parse_unit(measure_or_unit_abbreviation):
"""
Helper function that extracts constant factors from unit specifications.
This allows to specify units similar to this: 10^6 m^3.
Return a couple (unit, factor)
"""
try:
float(measure_or_unit_abbreviation[0])
# The measure contains the values and the unit_abbreviation
factor, unit_abbreviation = measure_or_unit_abbreviation.split(' ', 1)
return unit_abbreviation, float(factor)
except ValueError:
# The measure just contains the unit_abbreviation
return measure_or_unit_abbreviation, 1.0 | Helper function that extracts constant factors from unit specifications.
This allows to specify units similar to this: 10^6 m^3.
Return a couple (unit, factor) | entailment |
def convert_units(values, source_measure_or_unit_abbreviation, target_measure_or_unit_abbreviation,**kwargs):
"""
Convert a value from one unit to another one.
Example::
>>> cli = PluginLib.connect()
>>> cli.service.convert_units(20.0, 'm', 'km')
0.02
Parameters:
values: single measure or an array of measures
source_measure_or_unit_abbreviation: A measure in the source unit, or just the abbreviation of the source unit, from which convert the provided measure value/values
target_measure_or_unit_abbreviation: A measure in the target unit, or just the abbreviation of the target unit, into which convert the provided measure value/values
Returns:
Always a list
"""
if numpy.isscalar(values):
# If it is a scalar, converts to an array
values = [values]
float_values = [float(value) for value in values]
values_to_return = convert(float_values, source_measure_or_unit_abbreviation, target_measure_or_unit_abbreviation)
return values_to_return | Convert a value from one unit to another one.
Example::
>>> cli = PluginLib.connect()
>>> cli.service.convert_units(20.0, 'm', 'km')
0.02
Parameters:
values: single measure or an array of measures
source_measure_or_unit_abbreviation: A measure in the source unit, or just the abbreviation of the source unit, from which convert the provided measure value/values
target_measure_or_unit_abbreviation: A measure in the target unit, or just the abbreviation of the target unit, into which convert the provided measure value/values
Returns:
Always a list | entailment |
def convert(values, source_measure_or_unit_abbreviation, target_measure_or_unit_abbreviation):
"""
Convert a value or a list of values from an unit to another one.
The two units must represent the same physical dimension.
"""
source_dimension = get_dimension_by_unit_measure_or_abbreviation(source_measure_or_unit_abbreviation)
target_dimension = get_dimension_by_unit_measure_or_abbreviation(target_measure_or_unit_abbreviation)
if source_dimension == target_dimension:
source=JSONObject({})
target=JSONObject({})
source.unit_abbreviation, source.factor = _parse_unit(source_measure_or_unit_abbreviation)
target.unit_abbreviation, target.factor = _parse_unit(target_measure_or_unit_abbreviation)
source.unit_data = get_unit_by_abbreviation(source.unit_abbreviation)
target.unit_data = get_unit_by_abbreviation(target.unit_abbreviation)
source.conv_factor = JSONObject({'lf': source.unit_data.lf, 'cf': source.unit_data.cf})
target.conv_factor = JSONObject({'lf': target.unit_data.lf, 'cf': target.unit_data.cf})
if isinstance(values, float):
# If values is a float => returns a float
return (source.conv_factor.lf / target.conv_factor.lf * (source.factor * values)
+ (source.conv_factor.cf - target.conv_factor.cf)
/ target.conv_factor.lf) / target.factor
elif isinstance(values, list):
# If values is a list of floats => returns a list of floats
return [(source.conv_factor.lf / target.conv_factor.lf * (source.factor * value)
+ (source.conv_factor.cf - target.conv_factor.cf)
/ target.conv_factor.lf) / target.factor for value in values]
else:
raise HydraError("Unit conversion: dimensions are not consistent.") | Convert a value or a list of values from an unit to another one.
The two units must represent the same physical dimension. | entailment |
def get_empty_dimension(**kwargs):
"""
Returns a dimension object initialized with empty values
"""
dimension = JSONObject(Dimension())
dimension.id = None
dimension.name = ''
dimension.description = ''
dimension.project_id = None
dimension.units = []
return dimension | Returns a dimension object initialized with empty values | entailment |
def get_dimension(dimension_id, do_accept_dimension_id_none=False,**kwargs):
"""
Given a dimension id returns all its data
"""
if do_accept_dimension_id_none == True and dimension_id is None:
# In this special case, the method returns a dimension with id None
return get_empty_dimension()
try:
dimension = db.DBSession.query(Dimension).filter(Dimension.id==dimension_id).one()
#lazy load units
dimension.units
return JSONObject(dimension)
except NoResultFound:
# The dimension does not exist
raise ResourceNotFoundError("Dimension %s not found"%(dimension_id)) | Given a dimension id returns all its data | entailment |
def get_dimensions(**kwargs):
"""
Returns a list of objects describing all the dimensions with all the units.
"""
dimensions_list = db.DBSession.query(Dimension).options(load_only("id")).all()
return_list = []
for dimension in dimensions_list:
return_list.append(get_dimension(dimension.id))
return return_list | Returns a list of objects describing all the dimensions with all the units. | entailment |
def get_dimension_by_name(dimension_name,**kwargs):
"""
Given a dimension name returns all its data. Used in convert functions
"""
try:
if dimension_name is None:
dimension_name = ''
dimension = db.DBSession.query(Dimension).filter(func.lower(Dimension.name)==func.lower(dimension_name.strip())).one()
return get_dimension(dimension.id)
except NoResultFound:
# The dimension does not exist
raise ResourceNotFoundError("Dimension %s not found"%(dimension_name)) | Given a dimension name returns all its data. Used in convert functions | entailment |
def get_unit(unit_id, **kwargs):
"""
Returns a single unit
"""
try:
unit = db.DBSession.query(Unit).filter(Unit.id==unit_id).one()
return JSONObject(unit)
except NoResultFound:
# The dimension does not exist
raise ResourceNotFoundError("Unit %s not found"%(unit_id)) | Returns a single unit | entailment |
def get_units(**kwargs):
"""
Returns all the units
"""
units_list = db.DBSession.query(Unit).all()
units = []
for unit in units_list:
new_unit = JSONObject(unit)
units.append(new_unit)
return units | Returns all the units | entailment |
def get_dimension_by_unit_measure_or_abbreviation(measure_or_unit_abbreviation,**kwargs):
"""
Return the physical dimension a given unit abbreviation of a measure, or the measure itself, refers to.
The search key is the abbreviation or the full measure
"""
unit_abbreviation, factor = _parse_unit(measure_or_unit_abbreviation)
units = db.DBSession.query(Unit).filter(Unit.abbreviation==unit_abbreviation).all()
if len(units) == 0:
raise HydraError('Unit %s not found.'%(unit_abbreviation))
elif len(units) > 1:
raise HydraError('Unit %s has multiple dimensions not found.'%(unit_abbreviation))
else:
dimension = db.DBSession.query(Dimension).filter(Dimension.id==units[0].dimension_id).one()
return str(dimension.name) | Return the physical dimension a given unit abbreviation of a measure, or the measure itself, refers to.
The search key is the abbreviation or the full measure | entailment |
def get_dimension_by_unit_id(unit_id, do_accept_unit_id_none=False, **kwargs):
"""
Return the physical dimension a given unit id refers to.
if do_accept_unit_id_none is False, it raises an exception if unit_id is not valid or None
if do_accept_unit_id_none is True, and unit_id is None, the function returns a Dimension with id None (unit_id can be none in some cases)
"""
if do_accept_unit_id_none == True and unit_id is None:
# In this special case, the method returns a dimension with id None
return get_empty_dimension()
try:
dimension = db.DBSession.query(Dimension).join(Unit).filter(Unit.id==unit_id).filter().one()
return get_dimension(dimension.id)
except NoResultFound:
# The dimension does not exist
raise ResourceNotFoundError("Unit %s not found"%(unit_id)) | Return the physical dimension a given unit id refers to.
if do_accept_unit_id_none is False, it raises an exception if unit_id is not valid or None
if do_accept_unit_id_none is True, and unit_id is None, the function returns a Dimension with id None (unit_id can be none in some cases) | entailment |
def get_unit_by_abbreviation(unit_abbreviation, **kwargs):
"""
Returns a single unit by abbreviation. Used as utility function to resolve string to id
"""
try:
if unit_abbreviation is None:
unit_abbreviation = ''
unit_i = db.DBSession.query(Unit).filter(Unit.abbreviation==unit_abbreviation.strip()).one()
return JSONObject(unit_i)
except NoResultFound:
# The dimension does not exist
raise ResourceNotFoundError("Unit '%s' not found"%(unit_abbreviation)) | Returns a single unit by abbreviation. Used as utility function to resolve string to id | entailment |
def add_dimension(dimension,**kwargs):
"""
Add the dimension defined into the object "dimension" to the DB
If dimension["project_id"] is None it means that the dimension is global, otherwise is property of a project
If the dimension exists emits an exception
"""
if numpy.isscalar(dimension):
# If it is a scalar, converts to an Object
dimension = {'name': dimension}
new_dimension = Dimension()
new_dimension.name = dimension["name"]
if "description" in dimension and dimension["description"] is not None:
new_dimension.description = dimension["description"]
if "project_id" in dimension and dimension["project_id"] is not None:
new_dimension.project_id = dimension["project_id"]
# Save on DB
db.DBSession.add(new_dimension)
db.DBSession.flush()
# Load all the record
db_dimension = db.DBSession.query(Dimension).filter(Dimension.id==new_dimension.id).one()
return JSONObject(db_dimension) | Add the dimension defined into the object "dimension" to the DB
If dimension["project_id"] is None it means that the dimension is global, otherwise is property of a project
If the dimension exists emits an exception | entailment |
def update_dimension(dimension,**kwargs):
"""
Update a dimension in the DB.
Raises and exception if the dimension does not exist.
The key is ALWAYS the name and the name itself is not modificable
"""
db_dimension = None
dimension = JSONObject(dimension)
try:
db_dimension = db.DBSession.query(Dimension).filter(Dimension.id==dimension.id).filter().one()
if "description" in dimension and dimension["description"] is not None:
db_dimension.description = dimension["description"]
if "project_id" in dimension and dimension["project_id"] is not None and dimension["project_id"] != "" and dimension["project_id"].isdigit():
db_dimension.project_id = dimension["project_id"]
except NoResultFound:
raise ResourceNotFoundError("Dimension (ID=%s) does not exist"%(dimension.id))
db.DBSession.flush()
return JSONObject(db_dimension) | Update a dimension in the DB.
Raises and exception if the dimension does not exist.
The key is ALWAYS the name and the name itself is not modificable | entailment |
def delete_dimension(dimension_id,**kwargs):
"""
Delete a dimension from the DB. Raises and exception if the dimension does not exist
"""
try:
dimension = db.DBSession.query(Dimension).filter(Dimension.id==dimension_id).one()
db.DBSession.query(Unit).filter(Unit.dimension_id==dimension.id).delete()
db.DBSession.delete(dimension)
db.DBSession.flush()
return True
except NoResultFound:
raise ResourceNotFoundError("Dimension (dimension_id=%s) does not exist"%(dimension_id)) | Delete a dimension from the DB. Raises and exception if the dimension does not exist | entailment |
def bulk_add_dimensions(dimension_list, **kwargs):
"""
Save all the dimensions contained in the passed list.
"""
added_dimensions = []
for dimension in dimension_list:
added_dimensions.append(add_dimension(dimension, **kwargs))
return JSONObject({"dimensions": added_dimensions}) | Save all the dimensions contained in the passed list. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.