sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def persistent_object_context_changed(self):
""" Override from PersistentObject. """
super().persistent_object_context_changed()
def change_registration(registered_object, unregistered_object):
if registered_object and registered_object.uuid == self.parent_uuid:
self.__parent = registered_object
if self.persistent_object_context:
self.__registration_listener = self.persistent_object_context.registration_event.listen(change_registration)
self.__parent = self.persistent_object_context.get_registered_object(self.parent_uuid)
|
Override from PersistentObject.
|
entailment
|
def persistent_object_context_changed(self):
""" Override from PersistentObject. """
super().persistent_object_context_changed()
def register():
if self.__source is not None and self.__target is not None:
assert not self.__binding
self.__binding = Binding.PropertyBinding(self.__source, self.source_property)
self.__binding.target_setter = self.__set_target_from_source
# while reading, the data item in the display data channel will not be connected;
# we still set its value here. when the data item becomes valid, it will update.
self.__binding.update_target_direct(self.__binding.get_target_value())
def source_registered(source):
self.__source = source
register()
def target_registered(target):
self.__target = target
def property_changed(target, property_name):
if property_name == self.target_property:
self.__set_source_from_target(getattr(target, property_name))
assert self.__target_property_changed_listener is None
self.__target_property_changed_listener = target.property_changed_event.listen(functools.partial(property_changed, target))
register()
def unregistered(item=None):
if not item or item == self.__source:
self.__source = None
if not item or item == self.__target:
self.__target = None
if self.__binding:
self.__binding.close()
self.__binding = None
if self.__target_property_changed_listener:
self.__target_property_changed_listener.close()
self.__target_property_changed_listener = None
def change_registration(registered_object, unregistered_object):
if registered_object and registered_object.uuid == self.source_uuid:
source_registered(registered_object)
if registered_object and registered_object.uuid == self.target_uuid:
target_registered(registered_object)
if unregistered_object and unregistered_object in (self._source, self._target):
unregistered(unregistered_object)
if self.persistent_object_context:
self.__registration_listener = self.persistent_object_context.registration_event.listen(change_registration)
source = self.persistent_object_context.get_registered_object(self.source_uuid)
target = self.persistent_object_context.get_registered_object(self.target_uuid)
if source:
source_registered(source)
if target:
target_registered(target)
else:
unregistered()
|
Override from PersistentObject.
|
entailment
|
def persistent_object_context_changed(self):
""" Override from PersistentObject. """
super().persistent_object_context_changed()
def detach():
for listener in self.__interval_mutated_listeners:
listener.close()
self.__interval_mutated_listeners = list()
def reattach():
detach()
interval_descriptors = list()
if self.__source:
for region in self.__source.graphics:
if isinstance(region, Graphics.IntervalGraphic):
interval_descriptor = {"interval": region.interval, "color": "#F00"}
interval_descriptors.append(interval_descriptor)
self.__interval_mutated_listeners.append(region.property_changed_event.listen(lambda k: reattach()))
if self.__target:
self.__target.interval_descriptors = interval_descriptors
def item_inserted(key, value, before_index):
if key == "graphics" and self.__target:
reattach()
def item_removed(key, value, index):
if key == "graphics" and self.__target:
reattach()
def source_registered(source):
self.__source = source
self.__item_inserted_event_listener = self.__source.item_inserted_event.listen(item_inserted)
self.__item_removed_event_listener = self.__source.item_removed_event.listen(item_removed)
reattach()
def target_registered(target):
self.__target = target
reattach()
def unregistered(source=None):
if self.__item_inserted_event_listener:
self.__item_inserted_event_listener.close()
self.__item_inserted_event_listener = None
if self.__item_removed_event_listener:
self.__item_removed_event_listener.close()
self.__item_removed_event_listener = None
if self.persistent_object_context:
self.persistent_object_context.subscribe(self.source_uuid, source_registered, unregistered)
self.persistent_object_context.subscribe(self.target_uuid, target_registered, unregistered)
else:
unregistered()
|
Override from PersistentObject.
|
entailment
|
def __display_for_tree_node(self, tree_node):
""" Return the text display for the given tree node. Based on number of keys associated with tree node. """
keys = tree_node.keys
if len(keys) == 1:
return "{0} ({1})".format(tree_node.keys[-1], tree_node.count)
elif len(keys) == 2:
months = (_("January"), _("February"), _("March"), _("April"), _("May"), _("June"), _("July"), _("August"),
_("September"), _("October"), _("November"), _("December"))
return "{0} ({1})".format(months[max(min(tree_node.keys[1]-1, 11), 0)], tree_node.count)
else:
weekdays = (_("Monday"), _("Tuesday"), _("Wednesday"), _("Thursday"), _("Friday"), _("Saturday"), _("Sunday"))
date = datetime.date(tree_node.keys[0], tree_node.keys[1], tree_node.keys[2])
return "{0} - {1} ({2})".format(tree_node.keys[2], weekdays[date.weekday()], tree_node.count)
|
Return the text display for the given tree node. Based on number of keys associated with tree node.
|
entailment
|
def __insert_child(self, parent_tree_node, index, tree_node):
"""
Called from the root tree node when a new node is inserted into tree. This method creates properties
to represent the node for display and inserts it into the item model controller.
"""
# manage the item model
parent_item = self.__mapping[id(parent_tree_node)]
self.item_model_controller.begin_insert(index, index, parent_item.row, parent_item.id)
properties = {
"display": self.__display_for_tree_node(tree_node),
"tree_node": tree_node # used for removal and other lookup
}
item = self.item_model_controller.create_item(properties)
parent_item.insert_child(index, item)
self.__mapping[id(tree_node)] = item
self.item_model_controller.end_insert()
|
Called from the root tree node when a new node is inserted into tree. This method creates properties
to represent the node for display and inserts it into the item model controller.
|
entailment
|
def __remove_child(self, parent_tree_node, index):
"""
Called from the root tree node when a node is removed from the tree. This method removes it into the
item model controller.
"""
# get parent and item
parent_item = self.__mapping[id(parent_tree_node)]
# manage the item model
self.item_model_controller.begin_remove(index, index, parent_item.row, parent_item.id)
child_item = parent_item.children[index]
parent_item.remove_child(child_item)
self.__mapping.pop(id(child_item.data["tree_node"]))
self.item_model_controller.end_remove()
|
Called from the root tree node when a node is removed from the tree. This method removes it into the
item model controller.
|
entailment
|
def update_all_nodes(self):
""" Update all tree item displays if needed. Usually for count updates. """
item_model_controller = self.item_model_controller
if item_model_controller:
if self.__node_counts_dirty:
for item in self.__mapping.values():
if "tree_node" in item.data: # don't update the root node
tree_node = item.data["tree_node"]
item.data["display"] = self.__display_for_tree_node(tree_node)
item_model_controller.data_changed(item.row, item.parent.row, item.parent.id)
self.__node_counts_dirty = False
|
Update all tree item displays if needed. Usually for count updates.
|
entailment
|
def date_browser_selection_changed(self, selected_indexes):
"""
Called to handle selection changes in the tree widget.
This method should be connected to the on_selection_changed event. This method builds a list
of keys represented by all selected items. It then provides date_filter to filter data items
based on the list of keys. It then sets the filter into the document controller.
:param selected_indexes: The selected indexes
:type selected_indexes: list of ints
"""
partial_date_filters = list()
for index, parent_row, parent_id in selected_indexes:
item_model_controller = self.item_model_controller
tree_node = item_model_controller.item_value("tree_node", index, parent_id)
partial_date_filters.append(ListModel.PartialDateFilter("created_local", *tree_node.keys))
if len(partial_date_filters) > 0:
self.__date_filter = ListModel.OrFilter(partial_date_filters)
else:
self.__date_filter = None
self.__update_filter()
|
Called to handle selection changes in the tree widget.
This method should be connected to the on_selection_changed event. This method builds a list
of keys represented by all selected items. It then provides date_filter to filter data items
based on the list of keys. It then sets the filter into the document controller.
:param selected_indexes: The selected indexes
:type selected_indexes: list of ints
|
entailment
|
def text_filter_changed(self, text):
"""
Called to handle changes to the text filter.
:param text: The text for the filter.
"""
text = text.strip() if text else None
if text is not None:
self.__text_filter = ListModel.TextFilter("text_for_filter", text)
else:
self.__text_filter = None
self.__update_filter()
|
Called to handle changes to the text filter.
:param text: The text for the filter.
|
entailment
|
def __update_filter(self):
"""
Create a combined filter. Set the resulting filter into the document controller.
"""
filters = list()
if self.__date_filter:
filters.append(self.__date_filter)
if self.__text_filter:
filters.append(self.__text_filter)
self.document_controller.display_filter = ListModel.AndFilter(filters)
|
Create a combined filter. Set the resulting filter into the document controller.
|
entailment
|
def __get_keys(self):
""" Return the keys associated with this node by adding its key and then adding parent keys recursively. """
keys = list()
tree_node = self
while tree_node is not None and tree_node.key is not None:
keys.insert(0, tree_node.key)
tree_node = tree_node.parent
return keys
|
Return the keys associated with this node by adding its key and then adding parent keys recursively.
|
entailment
|
def insert_value(self, keys, value):
"""
Insert a value (data item) into this tree node and then its
children. This will be called in response to a new data item being
inserted into the document. Also updates the tree node's cumulative
child count.
"""
self.count += 1
if not self.key:
self.__value_reverse_mapping[value] = keys
if len(keys) == 0:
self.values.append(value)
else:
key = keys[0]
index = bisect.bisect_left(self.children, TreeNode(key, reversed=self.reversed))
if index == len(self.children) or self.children[index].key != key:
new_tree_node = TreeNode(key, list(), reversed=self.reversed)
new_tree_node.child_inserted = self.child_inserted
new_tree_node.child_removed = self.child_removed
new_tree_node.tree_node_updated = self.tree_node_updated
new_tree_node.__set_parent(self)
self.children.insert(index, new_tree_node)
if self.child_inserted:
self.child_inserted(self, index, new_tree_node)
child = self.children[index]
child.insert_value(keys[1:], value)
if self.tree_node_updated:
self.tree_node_updated(child)
|
Insert a value (data item) into this tree node and then its
children. This will be called in response to a new data item being
inserted into the document. Also updates the tree node's cumulative
child count.
|
entailment
|
def remove_value(self, keys, value):
"""
Remove a value (data item) from this tree node and its children.
Also updates the tree node's cumulative child count.
"""
self.count -= 1
if not self.key:
keys = self.__value_reverse_mapping[value]
del self.__value_reverse_mapping[value]
if len(keys) == 0:
self.values.remove(value)
else:
key = keys[0]
index = bisect.bisect_left(self.children, TreeNode(key, reversed=self.reversed))
assert index != len(self.children) and self.children[index].key == key
self.children[index].remove_value(keys[1:], value)
if self.tree_node_updated:
self.tree_node_updated(self.children[index])
if self.children[index].count == 0:
del self.children[index]
if self.child_removed:
self.child_removed(self, index)
|
Remove a value (data item) from this tree node and its children.
Also updates the tree node's cumulative child count.
|
entailment
|
def label_storm_objects(data, method, min_intensity, max_intensity, min_area=1, max_area=100, max_range=1,
increment=1, gaussian_sd=0):
"""
From a 2D grid or time series of 2D grids, this method labels storm objects with either the Enhanced Watershed
or Hysteresis methods.
Args:
data: the gridded data to be labeled. Should be a 2D numpy array in (y, x) coordinate order or a 3D numpy array
in (time, y, x) coordinate order
method: "ew" or "watershed" for Enhanced Watershed or "hyst" for hysteresis
min_intensity: Minimum intensity threshold for gridpoints contained within any objects
max_intensity: For watershed, any points above max_intensity are considered as the same value as max intensity.
For hysteresis, all objects have to contain at least 1 pixel that equals or exceeds this value
min_area: (default 1) The minimum area of any object in pixels.
max_area: (default 100) The area threshold in pixels at which the enhanced watershed ends growth. Object area
may exceed this threshold if the pixels at the last watershed level exceed the object area.
max_range: Maximum difference between the maximum and minimum value in an enhanced watershed object before
growth is stopped.
increment: Discretization increment for the enhanced watershed
gaussian_sd: Standard deviation of Gaussian filter applied to data
Returns:
label_grid: an ndarray with the same shape as data in which each pixel is labeled with a positive integer value.
"""
if method.lower() in ["ew", "watershed"]:
labeler = EnhancedWatershed(min_intensity, increment, max_intensity, max_area, max_range)
else:
labeler = Hysteresis(min_intensity, max_intensity)
if len(data.shape) == 2:
label_grid = labeler.label(gaussian_filter(data, gaussian_sd))
label_grid[data < min_intensity] = 0
if min_area > 1:
label_grid = labeler.size_filter(label_grid, min_area)
else:
label_grid = np.zeros(data.shape, dtype=int)
for t in range(data.shape[0]):
label_grid[t] = labeler.label(gaussian_filter(data[t], gaussian_sd))
label_grid[t][data[t] < min_intensity] = 0
if min_area > 1:
label_grid[t] = labeler.size_filter(label_grid[t], min_area)
return label_grid
|
From a 2D grid or time series of 2D grids, this method labels storm objects with either the Enhanced Watershed
or Hysteresis methods.
Args:
data: the gridded data to be labeled. Should be a 2D numpy array in (y, x) coordinate order or a 3D numpy array
in (time, y, x) coordinate order
method: "ew" or "watershed" for Enhanced Watershed or "hyst" for hysteresis
min_intensity: Minimum intensity threshold for gridpoints contained within any objects
max_intensity: For watershed, any points above max_intensity are considered as the same value as max intensity.
For hysteresis, all objects have to contain at least 1 pixel that equals or exceeds this value
min_area: (default 1) The minimum area of any object in pixels.
max_area: (default 100) The area threshold in pixels at which the enhanced watershed ends growth. Object area
may exceed this threshold if the pixels at the last watershed level exceed the object area.
max_range: Maximum difference between the maximum and minimum value in an enhanced watershed object before
growth is stopped.
increment: Discretization increment for the enhanced watershed
gaussian_sd: Standard deviation of Gaussian filter applied to data
Returns:
label_grid: an ndarray with the same shape as data in which each pixel is labeled with a positive integer value.
|
entailment
|
def extract_storm_objects(label_grid, data, x_grid, y_grid, times, dx=1, dt=1, obj_buffer=0):
"""
After storms are labeled, this method extracts the storm objects from the grid and places them into STObjects.
The STObjects contain intensity, location, and shape information about each storm at each timestep.
Args:
label_grid: 2D or 3D array output by label_storm_objects.
data: 2D or 3D array used as input to label_storm_objects.
x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length.
y_grid: 2D array of y-coordinate data.
times: List or array of time values, preferably as integers
dx: grid spacing in same units as x_grid and y_grid.
dt: period elapsed between times
obj_buffer: number of extra pixels beyond bounding box of object to store in each STObject
Returns:
storm_objects: list of lists containing STObjects identified at each time.
"""
storm_objects = []
if len(label_grid.shape) == 3:
ij_grid = np.indices(label_grid.shape[1:])
for t, time in enumerate(times):
storm_objects.append([])
object_slices = list(find_objects(label_grid[t], label_grid[t].max()))
if len(object_slices) > 0:
for o, obj_slice in enumerate(object_slices):
if obj_buffer > 0:
obj_slice_buff = [slice(np.maximum(0, osl.start - obj_buffer),
np.minimum(osl.stop + obj_buffer, label_grid.shape[l + 1]))
for l, osl in enumerate(obj_slice)]
else:
obj_slice_buff = obj_slice
storm_objects[-1].append(STObject(data[t][obj_slice_buff],
np.where(label_grid[t][obj_slice_buff] == o + 1, 1, 0),
x_grid[obj_slice_buff],
y_grid[obj_slice_buff],
ij_grid[0][obj_slice_buff],
ij_grid[1][obj_slice_buff],
time,
time,
dx=dx,
step=dt))
if t > 0:
dims = storm_objects[-1][-1].timesteps[0].shape
storm_objects[-1][-1].estimate_motion(time, data[t - 1], dims[1], dims[0])
else:
ij_grid = np.indices(label_grid.shape)
storm_objects.append([])
object_slices = list(find_objects(label_grid, label_grid.max()))
if len(object_slices) > 0:
for o, obj_slice in enumerate(object_slices):
if obj_buffer > 0:
obj_slice_buff = [slice(np.maximum(0, osl.start - obj_buffer),
np.minimum(osl.stop + obj_buffer, label_grid.shape[l + 1]))
for l, osl in enumerate(obj_slice)]
else:
obj_slice_buff = obj_slice
storm_objects[-1].append(STObject(data[obj_slice_buff],
np.where(label_grid[obj_slice_buff] == o + 1, 1, 0),
x_grid[obj_slice_buff],
y_grid[obj_slice_buff],
ij_grid[0][obj_slice_buff],
ij_grid[1][obj_slice_buff],
times,
times,
dx=dx,
step=dt))
return storm_objects
|
After storms are labeled, this method extracts the storm objects from the grid and places them into STObjects.
The STObjects contain intensity, location, and shape information about each storm at each timestep.
Args:
label_grid: 2D or 3D array output by label_storm_objects.
data: 2D or 3D array used as input to label_storm_objects.
x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length.
y_grid: 2D array of y-coordinate data.
times: List or array of time values, preferably as integers
dx: grid spacing in same units as x_grid and y_grid.
dt: period elapsed between times
obj_buffer: number of extra pixels beyond bounding box of object to store in each STObject
Returns:
storm_objects: list of lists containing STObjects identified at each time.
|
entailment
|
def extract_storm_patches(label_grid, data, x_grid, y_grid, times, dx=1, dt=1, patch_radius=16):
"""
After storms are labeled, this method extracts boxes of equal size centered on each storm from the grid and places
them into STObjects. The STObjects contain intensity, location, and shape information about each storm
at each timestep.
Args:
label_grid: 2D or 3D array output by label_storm_objects.
data: 2D or 3D array used as input to label_storm_objects.
x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length.
y_grid: 2D array of y-coordinate data.
times: List or array of time values, preferably as integers
dx: grid spacing in same units as x_grid and y_grid.
dt: period elapsed between times
patch_radius: Number of grid points from center of mass to extract
Returns:
storm_objects: list of lists containing STObjects identified at each time.
"""
storm_objects = []
if len(label_grid.shape) == 3:
ij_grid = np.indices(label_grid.shape[1:])
for t, time in enumerate(times):
storm_objects.append([])
# object_slices = find_objects(label_grid[t], label_grid[t].max())
centers = list(center_of_mass(data[t], labels=label_grid[t], index=np.arange(1, label_grid[t].max() + 1)))
if len(centers) > 0:
for o, center in enumerate(centers):
int_center = np.round(center).astype(int)
obj_slice_buff = [slice(int_center[0] - patch_radius, int_center[0] + patch_radius),
slice(int_center[1] - patch_radius, int_center[1] + patch_radius)]
storm_objects[-1].append(STObject(data[t][obj_slice_buff],
np.where(label_grid[t][obj_slice_buff] == o + 1, 1, 0),
x_grid[obj_slice_buff],
y_grid[obj_slice_buff],
ij_grid[0][obj_slice_buff],
ij_grid[1][obj_slice_buff],
time,
time,
dx=dx,
step=dt))
if t > 0:
dims = storm_objects[-1][-1].timesteps[0].shape
storm_objects[-1][-1].estimate_motion(time, data[t - 1], dims[1], dims[0])
else:
ij_grid = np.indices(label_grid.shape)
storm_objects.append([])
centers = list(center_of_mass(data, labels=label_grid, index=np.arange(1, label_grid.max() + 1)))
if len(centers) > 0:
for o, center in enumerate(centers):
int_center = np.round(center).astype(int)
obj_slice_buff = (slice(int_center[0] - patch_radius, int_center[0] + patch_radius),
slice(int_center[1] - patch_radius, int_center[1] + patch_radius))
storm_objects[-1].append(STObject(data[obj_slice_buff],
np.where(label_grid[obj_slice_buff] == o + 1, 1, 0),
x_grid[obj_slice_buff],
y_grid[obj_slice_buff],
ij_grid[0][obj_slice_buff],
ij_grid[1][obj_slice_buff],
times[0],
times[0],
dx=dx,
step=dt))
return storm_objects
|
After storms are labeled, this method extracts boxes of equal size centered on each storm from the grid and places
them into STObjects. The STObjects contain intensity, location, and shape information about each storm
at each timestep.
Args:
label_grid: 2D or 3D array output by label_storm_objects.
data: 2D or 3D array used as input to label_storm_objects.
x_grid: 2D array of x-coordinate data, preferably on a uniform spatial grid with units of length.
y_grid: 2D array of y-coordinate data.
times: List or array of time values, preferably as integers
dx: grid spacing in same units as x_grid and y_grid.
dt: period elapsed between times
patch_radius: Number of grid points from center of mass to extract
Returns:
storm_objects: list of lists containing STObjects identified at each time.
|
entailment
|
def track_storms(storm_objects, times, distance_components, distance_maxima, distance_weights, tracked_objects=None):
"""
Given the output of extract_storm_objects, this method tracks storms through time and merges individual
STObjects into a set of tracks.
Args:
storm_objects: list of list of STObjects that have not been tracked.
times: List of times associated with each set of STObjects
distance_components: list of function objects that make up components of distance function
distance_maxima: array of maximum values for each distance for normalization purposes
distance_weights: weight given to each component of the distance function. Should add to 1.
tracked_objects: List of STObjects that have already been tracked.
Returns:
tracked_objects:
"""
obj_matcher = ObjectMatcher(distance_components, distance_weights, distance_maxima)
if tracked_objects is None:
tracked_objects = []
for t, time in enumerate(times):
past_time_objects = []
for obj in tracked_objects:
if obj.end_time == time - obj.step:
past_time_objects.append(obj)
if len(past_time_objects) == 0:
tracked_objects.extend(storm_objects[t])
elif len(past_time_objects) > 0 and len(storm_objects[t]) > 0:
assignments = obj_matcher.match_objects(past_time_objects, storm_objects[t], times[t-1], times[t])
unpaired = list(range(len(storm_objects[t])))
for pair in assignments:
past_time_objects[pair[0]].extend(storm_objects[t][pair[1]])
unpaired.remove(pair[1])
if len(unpaired) > 0:
for up in unpaired:
tracked_objects.append(storm_objects[t][up])
return tracked_objects
|
Given the output of extract_storm_objects, this method tracks storms through time and merges individual
STObjects into a set of tracks.
Args:
storm_objects: list of list of STObjects that have not been tracked.
times: List of times associated with each set of STObjects
distance_components: list of function objects that make up components of distance function
distance_maxima: array of maximum values for each distance for normalization purposes
distance_weights: weight given to each component of the distance function. Should add to 1.
tracked_objects: List of STObjects that have already been tracked.
Returns:
tracked_objects:
|
entailment
|
def peirce_skill_score(self):
"""
Multiclass Peirce Skill Score (also Hanssen and Kuipers score, True Skill Score)
"""
n = float(self.table.sum())
nf = self.table.sum(axis=1)
no = self.table.sum(axis=0)
correct = float(self.table.trace())
return (correct / n - (nf * no).sum() / n ** 2) / (1 - (no * no).sum() / n ** 2)
|
Multiclass Peirce Skill Score (also Hanssen and Kuipers score, True Skill Score)
|
entailment
|
def gerrity_score(self):
"""
Gerrity Score, which weights each cell in the contingency table by its observed relative frequency.
:return:
"""
k = self.table.shape[0]
n = float(self.table.sum())
p_o = self.table.sum(axis=0) / n
p_sum = np.cumsum(p_o)[:-1]
a = (1.0 - p_sum) / p_sum
s = np.zeros(self.table.shape, dtype=float)
for (i, j) in np.ndindex(*s.shape):
if i == j:
s[i, j] = 1.0 / (k - 1.0) * (np.sum(1.0 / a[0:j]) + np.sum(a[j:k-1]))
elif i < j:
s[i, j] = 1.0 / (k - 1.0) * (np.sum(1.0 / a[0:i]) - (j - i) + np.sum(a[j:k-1]))
else:
s[i, j] = s[j, i]
return np.sum(self.table / float(self.table.sum()) * s)
|
Gerrity Score, which weights each cell in the contingency table by its observed relative frequency.
:return:
|
entailment
|
def centroid_distance(item_a, time_a, item_b, time_b, max_value):
"""
Euclidean distance between the centroids of item_a and item_b.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
ax, ay = item_a.center_of_mass(time_a)
bx, by = item_b.center_of_mass(time_b)
return np.minimum(np.sqrt((ax - bx) ** 2 + (ay - by) ** 2), max_value) / float(max_value)
|
Euclidean distance between the centroids of item_a and item_b.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def shifted_centroid_distance(item_a, time_a, item_b, time_b, max_value):
"""
Centroid distance with motion corrections.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
ax, ay = item_a.center_of_mass(time_a)
bx, by = item_b.center_of_mass(time_b)
if time_a < time_b:
bx = bx - item_b.u
by = by - item_b.v
else:
ax = ax - item_a.u
ay = ay - item_a.v
return np.minimum(np.sqrt((ax - bx) ** 2 + (ay - by) ** 2), max_value) / float(max_value)
|
Centroid distance with motion corrections.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def closest_distance(item_a, time_a, item_b, time_b, max_value):
"""
Euclidean distance between the pixels in item_a and item_b closest to each other.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
return np.minimum(item_a.closest_distance(time_a, item_b, time_b), max_value) / float(max_value)
|
Euclidean distance between the pixels in item_a and item_b closest to each other.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def ellipse_distance(item_a, time_a, item_b, time_b, max_value):
"""
Calculate differences in the properties of ellipses fitted to each object.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
ts = np.array([0, np.pi])
ell_a = item_a.get_ellipse_model(time_a)
ell_b = item_b.get_ellipse_model(time_b)
ends_a = ell_a.predict_xy(ts)
ends_b = ell_b.predict_xy(ts)
distances = np.sqrt((ends_a[:, 0:1] - ends_b[:, 0:1].T) ** 2 + (ends_a[:, 1:] - ends_b[:, 1:].T) ** 2)
return np.minimum(distances[0, 1], max_value) / float(max_value)
|
Calculate differences in the properties of ellipses fitted to each object.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def nonoverlap(item_a, time_a, item_b, time_b, max_value):
"""
Percentage of pixels in each object that do not overlap with the other object
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
return np.minimum(1 - item_a.count_overlap(time_a, item_b, time_b), max_value) / float(max_value)
|
Percentage of pixels in each object that do not overlap with the other object
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def max_intensity(item_a, time_a, item_b, time_b, max_value):
"""
RMS difference in maximum intensity
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
intensity_a = item_a.max_intensity(time_a)
intensity_b = item_b.max_intensity(time_b)
diff = np.sqrt((intensity_a - intensity_b) ** 2)
return np.minimum(diff, max_value) / float(max_value)
|
RMS difference in maximum intensity
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def area_difference(item_a, time_a, item_b, time_b, max_value):
"""
RMS Difference in object areas.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
size_a = item_a.size(time_a)
size_b = item_b.size(time_b)
diff = np.sqrt((size_a - size_b) ** 2)
return np.minimum(diff, max_value) / float(max_value)
|
RMS Difference in object areas.
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def mean_minimum_centroid_distance(item_a, item_b, max_value):
"""
RMS difference in the minimum distances from the centroids of one track to the centroids of another track
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
centroids_a = np.array([item_a.center_of_mass(t) for t in item_a.times])
centroids_b = np.array([item_b.center_of_mass(t) for t in item_b.times])
distance_matrix = (centroids_a[:, 0:1] - centroids_b.T[0:1]) ** 2 + (centroids_a[:, 1:] - centroids_b.T[1:]) ** 2
mean_min_distances = np.sqrt(distance_matrix.min(axis=0).mean() + distance_matrix.min(axis=1).mean())
return np.minimum(mean_min_distances, max_value) / float(max_value)
|
RMS difference in the minimum distances from the centroids of one track to the centroids of another track
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def mean_min_time_distance(item_a, item_b, max_value):
"""
Calculate the mean time difference among the time steps in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
times_a = item_a.times.reshape((item_a.times.size, 1))
times_b = item_b.times.reshape((1, item_b.times.size))
distance_matrix = (times_a - times_b) ** 2
mean_min_distances = np.sqrt(distance_matrix.min(axis=0).mean() + distance_matrix.min(axis=1).mean())
return np.minimum(mean_min_distances, max_value) / float(max_value)
|
Calculate the mean time difference among the time steps in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def start_centroid_distance(item_a, item_b, max_value):
"""
Distance between the centroids of the first step in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
start_a = item_a.center_of_mass(item_a.times[0])
start_b = item_b.center_of_mass(item_b.times[0])
start_distance = np.sqrt((start_a[0] - start_b[0]) ** 2 + (start_a[1] - start_b[1]) ** 2)
return np.minimum(start_distance, max_value) / float(max_value)
|
Distance between the centroids of the first step in each object.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def start_time_distance(item_a, item_b, max_value):
"""
Absolute difference between the starting times of each item.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
start_time_diff = np.abs(item_a.times[0] - item_b.times[0])
return np.minimum(start_time_diff, max_value) / float(max_value)
|
Absolute difference between the starting times of each item.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def duration_distance(item_a, item_b, max_value):
"""
Absolute difference in the duration of two items
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
duration_a = item_a.times.size
duration_b = item_b.times.size
return np.minimum(np.abs(duration_a - duration_b), max_value) / float(max_value)
|
Absolute difference in the duration of two items
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def mean_area_distance(item_a, item_b, max_value):
"""
Absolute difference in the means of the areas of each track over time.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
mean_area_a = np.mean([item_a.size(t) for t in item_a.times])
mean_area_b = np.mean([item_b.size(t) for t in item_b.times])
return np.abs(mean_area_a - mean_area_b) / float(max_value)
|
Absolute difference in the means of the areas of each track over time.
Args:
item_a: STObject from the first set in TrackMatcher
item_b: STObject from the second set in TrackMatcher
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
|
entailment
|
def match_objects(self, set_a, set_b, time_a, time_b):
"""
Match two sets of objects at particular times.
Args:
set_a: list of STObjects
set_b: list of STObjects
time_a: time at which set_a is being evaluated for matching
time_b: time at which set_b is being evaluated for matching
Returns:
List of tuples containing (set_a index, set_b index) for each match
"""
costs = self.cost_matrix(set_a, set_b, time_a, time_b) * 100
min_row_costs = costs.min(axis=1)
min_col_costs = costs.min(axis=0)
good_rows = np.where(min_row_costs < 100)[0]
good_cols = np.where(min_col_costs < 100)[0]
assignments = []
if len(good_rows) > 0 and len(good_cols) > 0:
munk = Munkres()
initial_assignments = munk.compute(costs[tuple(np.meshgrid(good_rows, good_cols, indexing='ij'))].tolist())
initial_assignments = [(good_rows[x[0]], good_cols[x[1]]) for x in initial_assignments]
for a in initial_assignments:
if costs[a[0], a[1]] < 100:
assignments.append(a)
return assignments
|
Match two sets of objects at particular times.
Args:
set_a: list of STObjects
set_b: list of STObjects
time_a: time at which set_a is being evaluated for matching
time_b: time at which set_b is being evaluated for matching
Returns:
List of tuples containing (set_a index, set_b index) for each match
|
entailment
|
def cost_matrix(self, set_a, set_b, time_a, time_b):
"""
Calculates the costs (distances) between the items in set a and set b at the specified times.
Args:
set_a: List of STObjects
set_b: List of STObjects
time_a: time at which objects in set_a are evaluated
time_b: time at whcih object in set_b are evaluated
Returns:
A numpy array with shape [len(set_a), len(set_b)] containing the cost matrix between the items in set a
and the items in set b.
"""
costs = np.zeros((len(set_a), len(set_b)))
for a, item_a in enumerate(set_a):
for b, item_b in enumerate(set_b):
costs[a, b] = self.total_cost_function(item_a, item_b, time_a, time_b)
return costs
|
Calculates the costs (distances) between the items in set a and set b at the specified times.
Args:
set_a: List of STObjects
set_b: List of STObjects
time_a: time at which objects in set_a are evaluated
time_b: time at whcih object in set_b are evaluated
Returns:
A numpy array with shape [len(set_a), len(set_b)] containing the cost matrix between the items in set a
and the items in set b.
|
entailment
|
def total_cost_function(self, item_a, item_b, time_a, time_b):
"""
Calculate total cost function between two items.
Args:
item_a: STObject
item_b: STObject
time_a: Timestep in item_a at which cost function is evaluated
time_b: Timestep in item_b at which cost function is evaluated
Returns:
The total weighted distance between item_a and item_b
"""
distances = np.zeros(len(self.weights))
for c, component in enumerate(self.cost_function_components):
distances[c] = component(item_a, time_a, item_b, time_b, self.max_values[c])
total_distance = np.sum(self.weights * distances)
return total_distance
|
Calculate total cost function between two items.
Args:
item_a: STObject
item_b: STObject
time_a: Timestep in item_a at which cost function is evaluated
time_b: Timestep in item_b at which cost function is evaluated
Returns:
The total weighted distance between item_a and item_b
|
entailment
|
def match_tracks(self, set_a, set_b, closest_matches=False):
"""
Find the optimal set of matching assignments between set a and set b. This function supports optimal 1:1
matching using the Munkres method and matching from every object in set a to the closest object in set b.
In this situation set b accepts multiple matches from set a.
Args:
set_a:
set_b:
closest_matches:
Returns:
"""
costs = self.track_cost_matrix(set_a, set_b) * 100
min_row_costs = costs.min(axis=1)
min_col_costs = costs.min(axis=0)
good_rows = np.where(min_row_costs < 100)[0]
good_cols = np.where(min_col_costs < 100)[0]
assignments = []
if len(good_rows) > 0 and len(good_cols) > 0:
if closest_matches:
b_matches = costs[np.meshgrid(good_rows, good_cols, indexing='ij')].argmin(axis=1)
a_matches = np.arange(b_matches.size)
initial_assignments = [(good_rows[a_matches[x]], good_cols[b_matches[x]])
for x in range(b_matches.size)]
else:
munk = Munkres()
initial_assignments = munk.compute(costs[np.meshgrid(good_rows, good_cols, indexing='ij')].tolist())
initial_assignments = [(good_rows[x[0]], good_cols[x[1]]) for x in initial_assignments]
for a in initial_assignments:
if costs[a[0], a[1]] < 100:
assignments.append(a)
return assignments
|
Find the optimal set of matching assignments between set a and set b. This function supports optimal 1:1
matching using the Munkres method and matching from every object in set a to the closest object in set b.
In this situation set b accepts multiple matches from set a.
Args:
set_a:
set_b:
closest_matches:
Returns:
|
entailment
|
def match(self, set_a, set_b):
"""
For each step in each track from set_a, identify all steps in all tracks from set_b that meet all
cost function criteria
Args:
set_a: List of STObjects
set_b: List of STObjects
Returns:
track_pairings: pandas.DataFrame
"""
track_step_matches = [[] * len(set_a)]
costs = self.cost_matrix(set_a, set_b)
valid_costs = np.all(costs < 1, axis=2)
set_a_matches, set_b_matches = np.where(valid_costs)
s = 0
track_pairings = pd.DataFrame(index=np.arange(costs.shape[0]),
columns=["Track", "Step", "Time", "Matched", "Pairings"], dtype=object)
set_b_info = []
for trb, track_b in enumerate(set_b):
for t, time in enumerate(track_b.times):
set_b_info.append((trb, t))
set_b_info_arr = np.array(set_b_info, dtype=int)
for tr, track_a in enumerate(set_a):
for t, time in enumerate(track_a.times):
track_pairings.loc[s, ["Track", "Step", "Time"]] = [tr, t, time]
track_pairings.loc[s, "Matched"] = 1 if np.count_nonzero(set_a_matches == s) > 0 else 0
if track_pairings.loc[s, "Matched"] == 1:
track_pairings.loc[s, "Pairings"] = set_b_info_arr[set_b_matches[set_a_matches == s]]
else:
track_pairings.loc[s, "Pairings"] = np.array([])
s += 1
return track_pairings
|
For each step in each track from set_a, identify all steps in all tracks from set_b that meet all
cost function criteria
Args:
set_a: List of STObjects
set_b: List of STObjects
Returns:
track_pairings: pandas.DataFrame
|
entailment
|
def variable_specifier(self) -> dict:
"""Return the variable specifier for this variable.
The specifier can be used to lookup the value of this variable in a computation context.
"""
if self.value_type is not None:
return {"type": "variable", "version": 1, "uuid": str(self.uuid), "x-name": self.name, "x-value": self.value}
else:
return self.specifier
|
Return the variable specifier for this variable.
The specifier can be used to lookup the value of this variable in a computation context.
|
entailment
|
def bound_variable(self):
"""Return an object with a value property and a changed_event.
The value property returns the value of the variable. The changed_event is fired
whenever the value changes.
"""
class BoundVariable:
def __init__(self, variable):
self.__variable = variable
self.changed_event = Event.Event()
self.needs_rebind_event = Event.Event()
def property_changed(key):
if key == "value":
self.changed_event.fire()
self.__variable_property_changed_listener = variable.property_changed_event.listen(property_changed)
@property
def value(self):
return self.__variable.value
def close(self):
self.__variable_property_changed_listener.close()
self.__variable_property_changed_listener = None
return BoundVariable(self)
|
Return an object with a value property and a changed_event.
The value property returns the value of the variable. The changed_event is fired
whenever the value changes.
|
entailment
|
def resolve_object_specifier(self, object_specifier, secondary_specifier=None, property_name=None, objects_model=None):
"""Resolve the object specifier.
First lookup the object specifier in the enclosing computation. If it's not found,
then lookup in the computation's context. Otherwise it should be a value type variable.
In that case, return the bound variable.
"""
variable = self.__computation().resolve_variable(object_specifier)
if not variable:
return self.__context.resolve_object_specifier(object_specifier, secondary_specifier, property_name, objects_model)
elif variable.specifier is None:
return variable.bound_variable
return None
|
Resolve the object specifier.
First lookup the object specifier in the enclosing computation. If it's not found,
then lookup in the computation's context. Otherwise it should be a value type variable.
In that case, return the bound variable.
|
entailment
|
def parse_names(cls, expression):
"""Return the list of identifiers used in the expression."""
names = set()
try:
ast_node = ast.parse(expression, "ast")
class Visitor(ast.NodeVisitor):
def visit_Name(self, node):
names.add(node.id)
Visitor().visit(ast_node)
except Exception:
pass
return names
|
Return the list of identifiers used in the expression.
|
entailment
|
def bind(self, context) -> None:
"""Bind a context to this computation.
The context allows the computation to convert object specifiers to actual objects.
"""
# make a computation context based on the enclosing context.
self.__computation_context = ComputationContext(self, context)
# re-bind is not valid. be careful to set the computation after the data item is already in document.
for variable in self.variables:
assert variable.bound_item is None
for result in self.results:
assert result.bound_item is None
# bind the variables
for variable in self.variables:
self.__bind_variable(variable)
# bind the results
for result in self.results:
self.__bind_result(result)
|
Bind a context to this computation.
The context allows the computation to convert object specifiers to actual objects.
|
entailment
|
def unbind(self):
"""Unlisten and close each bound item."""
for variable in self.variables:
self.__unbind_variable(variable)
for result in self.results:
self.__unbind_result(result)
|
Unlisten and close each bound item.
|
entailment
|
def ativar_sat(self, tipo_certificado, cnpj, codigo_uf):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especilizada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
"""
retorno = super(ClienteSATLocal, self).ativar_sat(
tipo_certificado, cnpj, codigo_uf)
return RespostaAtivarSAT.analisar(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especilizada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
|
entailment
|
def comunicar_certificado_icpbrasil(self, certificado):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
comunicar_certificado_icpbrasil(certificado)
return RespostaSAT.comunicar_certificado_icpbrasil(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def enviar_dados_venda(self, dados_venda):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
"""
retorno = super(ClienteSATLocal, self).enviar_dados_venda(dados_venda)
return RespostaEnviarDadosVenda.analisar(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
|
entailment
|
def cancelar_ultima_venda(self, chave_cfe, dados_cancelamento):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
"""
retorno = super(ClienteSATLocal, self).\
cancelar_ultima_venda(chave_cfe, dados_cancelamento)
return RespostaCancelarUltimaVenda.analisar(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
|
entailment
|
def consultar_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).consultar_sat()
return RespostaSAT.consultar_sat(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def consultar_status_operacional(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
"""
retorno = super(ClienteSATLocal, self).consultar_status_operacional()
return RespostaConsultarStatusOperacional.analisar(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
|
entailment
|
def consultar_numero_sessao(self, numero_sessao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
consultar_numero_sessao(numero_sessao)
return RespostaConsultarNumeroSessao.analisar(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def configurar_interface_de_rede(self, configuracao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
configurar_interface_de_rede(configuracao)
return RespostaSAT.configurar_interface_de_rede(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def associar_assinatura(self, sequencia_cnpj, assinatura_ac):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
associar_assinatura(sequencia_cnpj, assinatura_ac)
# (!) resposta baseada na redação com efeitos até 31-12-2016
return RespostaSAT.associar_assinatura(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def atualizar_software_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.atualizar_software_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).atualizar_software_sat()
return RespostaSAT.atualizar_software_sat(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.atualizar_software_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def extrair_logs(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.extrair_logs`.
:return: Uma resposta SAT especializada em ``ExtrairLogs``.
:rtype: satcfe.resposta.extrairlogs.RespostaExtrairLogs
"""
retorno = super(ClienteSATLocal, self).extrair_logs()
return RespostaExtrairLogs.analisar(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.extrair_logs`.
:return: Uma resposta SAT especializada em ``ExtrairLogs``.
:rtype: satcfe.resposta.extrairlogs.RespostaExtrairLogs
|
entailment
|
def bloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.bloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).bloquear_sat()
return RespostaSAT.bloquear_sat(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.bloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def desbloquear_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.desbloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).desbloquear_sat()
return RespostaSAT.desbloquear_sat(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.desbloquear_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def trocar_codigo_de_ativacao(self, novo_codigo_ativacao,
opcao=constantes.CODIGO_ATIVACAO_REGULAR,
codigo_emergencia=None):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.trocar_codigo_de_ativacao`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).trocar_codigo_de_ativacao(
novo_codigo_ativacao, opcao=opcao,
codigo_emergencia=codigo_emergencia)
return RespostaSAT.trocar_codigo_de_ativacao(retorno)
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.trocar_codigo_de_ativacao`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def documento(self, *args, **kwargs):
"""Resulta no documento XML como string, que pode ou não incluir a
declaração XML no início do documento.
"""
forcar_unicode = kwargs.pop('forcar_unicode', False)
incluir_xml_decl = kwargs.pop('incluir_xml_decl', True)
doc = ET.tostring(self._xml(*args, **kwargs),
encoding='utf-8').decode('utf-8')
if forcar_unicode:
if incluir_xml_decl:
doc = u'{}\n{}'.format(constantes.XML_DECL_UNICODE, doc)
else:
if incluir_xml_decl:
doc = '{}\n{}'.format(constantes.XML_DECL, unidecode(doc))
else:
doc = unidecode(doc)
return doc
|
Resulta no documento XML como string, que pode ou não incluir a
declaração XML no início do documento.
|
entailment
|
def start(self, skip_choose=False, fixed_workspace_dir=None):
"""
Start the application.
Looks for workspace_location persistent string. If it doesn't find it, uses a default
workspace location.
Then checks to see if that workspace exists. If not and if skip_choose has not been
set to True, asks the user for a workspace location. User may choose new folder or
existing location. This works by putting up the dialog which will either call start
again or exit.
Creates workspace in location if it doesn't exist.
Migrates database to latest version.
Creates document model, resources path, etc.
"""
logging.getLogger("migration").setLevel(logging.INFO)
if fixed_workspace_dir:
workspace_dir = fixed_workspace_dir
else:
documents_dir = self.ui.get_document_location()
workspace_dir = os.path.join(documents_dir, "Nion Swift Libraries")
workspace_dir = self.ui.get_persistent_string("workspace_location", workspace_dir)
welcome_message_enabled = fixed_workspace_dir is None
profile, is_created = Profile.create_profile(pathlib.Path(workspace_dir), welcome_message_enabled, skip_choose)
if not profile:
self.choose_library()
return True
self.workspace_dir = workspace_dir
DocumentModel.DocumentModel.computation_min_period = 0.1
document_model = DocumentModel.DocumentModel(profile=profile)
document_model.create_default_data_groups()
document_model.start_dispatcher()
# parse the hardware aliases file
alias_path = os.path.join(self.workspace_dir, "aliases.ini")
HardwareSource.parse_hardware_aliases_config_file(alias_path)
# create the document controller
document_controller = self.create_document_controller(document_model, "library")
if self.__resources_path is not None:
document_model.create_sample_images(self.__resources_path)
workspace_history = self.ui.get_persistent_object("workspace_history", list())
if workspace_dir in workspace_history:
workspace_history.remove(workspace_dir)
workspace_history.insert(0, workspace_dir)
self.ui.set_persistent_object("workspace_history", workspace_history)
self.ui.set_persistent_string("workspace_location", workspace_dir)
if welcome_message_enabled:
logging.info("Welcome to Nion Swift.")
if is_created and len(document_model.display_items) > 0:
document_controller.selected_display_panel.set_display_panel_display_item(document_model.display_items[0])
document_controller.selected_display_panel.perform_action("set_fill_mode")
return True
|
Start the application.
Looks for workspace_location persistent string. If it doesn't find it, uses a default
workspace location.
Then checks to see if that workspace exists. If not and if skip_choose has not been
set to True, asks the user for a workspace location. User may choose new folder or
existing location. This works by putting up the dialog which will either call start
again or exit.
Creates workspace in location if it doesn't exist.
Migrates database to latest version.
Creates document model, resources path, etc.
|
entailment
|
def sort_by_date_key(data_item):
""" A sort key to for the created field of a data item. The sort by uuid makes it determinate. """
return data_item.title + str(data_item.uuid) if data_item.is_live else str(), data_item.date_for_sorting, str(data_item.uuid)
|
A sort key to for the created field of a data item. The sort by uuid makes it determinate.
|
entailment
|
def snapshot(self):
"""Return a new library item which is a copy of this one with any dynamic behavior made static."""
data_item = self.__class__()
# data format (temporary until moved to buffered data source)
data_item.large_format = self.large_format
data_item.set_data_and_metadata(copy.deepcopy(self.data_and_metadata), self.data_modified)
# metadata
data_item.created = self.created
data_item.timezone = self.timezone
data_item.timezone_offset = self.timezone_offset
data_item.metadata = self.metadata
data_item.title = self.title
data_item.caption = self.caption
data_item.description = self.description
data_item.session_id = self.session_id
data_item.session_data = copy.deepcopy(self.session_data)
return data_item
|
Return a new library item which is a copy of this one with any dynamic behavior made static.
|
entailment
|
def set_r_value(self, r_var: str, *, notify_changed=True) -> None:
"""Used to signal changes to the ref var, which are kept in document controller. ugh."""
self.r_var = r_var
self._description_changed()
if notify_changed: # set to False to set the r-value at startup; avoid marking it as a change
self.__notify_description_changed()
|
Used to signal changes to the ref var, which are kept in document controller. ugh.
|
entailment
|
def set_data_and_metadata(self, data_and_metadata, data_modified=None):
"""Sets the underlying data and data-metadata to the data_and_metadata.
Note: this does not make a copy of the data.
"""
self.increment_data_ref_count()
try:
if data_and_metadata:
data = data_and_metadata.data
data_shape_and_dtype = data_and_metadata.data_shape_and_dtype
intensity_calibration = data_and_metadata.intensity_calibration
dimensional_calibrations = data_and_metadata.dimensional_calibrations
metadata = data_and_metadata.metadata
timestamp = data_and_metadata.timestamp
data_descriptor = data_and_metadata.data_descriptor
timezone = data_and_metadata.timezone or Utility.get_local_timezone()
timezone_offset = data_and_metadata.timezone_offset or Utility.TimezoneMinutesToStringConverter().convert(Utility.local_utcoffset_minutes())
new_data_and_metadata = DataAndMetadata.DataAndMetadata(self.__load_data, data_shape_and_dtype, intensity_calibration, dimensional_calibrations, metadata, timestamp, data, data_descriptor, timezone, timezone_offset)
else:
new_data_and_metadata = None
self.__set_data_metadata_direct(new_data_and_metadata, data_modified)
if self.__data_and_metadata is not None:
if self.persistent_object_context and not self.persistent_object_context.is_write_delayed(self):
self.persistent_object_context.write_external_data(self, "data", self.__data_and_metadata.data)
self.__data_and_metadata.unloadable = True
finally:
self.decrement_data_ref_count()
|
Sets the underlying data and data-metadata to the data_and_metadata.
Note: this does not make a copy of the data.
|
entailment
|
def color_map_data(self) -> typing.Optional[numpy.ndarray]:
"""Return the color map data as a uint8 ndarray with shape (256, 3)."""
if self.display_data_shape is None: # is there display data?
return None
else:
return self.__color_map_data if self.__color_map_data is not None else ColorMaps.get_color_map_data_by_id("grayscale")
|
Return the color map data as a uint8 ndarray with shape (256, 3).
|
entailment
|
def get_calculated_display_values(self, immediate: bool=False) -> DisplayValues:
"""Return the display values.
Return the current (possibly uncalculated) display values unless 'immediate' is specified.
If 'immediate', return the existing (calculated) values if they exist. Using the 'immediate' values
avoids calculation except in cases where the display values haven't already been calculated.
"""
if not immediate or not self.__is_master or not self.__last_display_values:
if not self.__current_display_values and self.__data_item:
self.__current_display_values = DisplayValues(self.__data_item.xdata, self.sequence_index, self.collection_index, self.slice_center, self.slice_width, self.display_limits, self.complex_display_type, self.__color_map_data)
def finalize(display_values):
self.__last_display_values = display_values
self.display_values_changed_event.fire()
self.__current_display_values.on_finalize = finalize
return self.__current_display_values
return self.__last_display_values
|
Return the display values.
Return the current (possibly uncalculated) display values unless 'immediate' is specified.
If 'immediate', return the existing (calculated) values if they exist. Using the 'immediate' values
avoids calculation except in cases where the display values haven't already been calculated.
|
entailment
|
def increment_display_ref_count(self, amount: int=1):
"""Increment display reference count to indicate this library item is currently displayed."""
display_ref_count = self.__display_ref_count
self.__display_ref_count += amount
if display_ref_count == 0:
self.__is_master = True
if self.__data_item:
for _ in range(amount):
self.__data_item.increment_data_ref_count()
|
Increment display reference count to indicate this library item is currently displayed.
|
entailment
|
def decrement_display_ref_count(self, amount: int=1):
"""Decrement display reference count to indicate this library item is no longer displayed."""
assert not self._closed
self.__display_ref_count -= amount
if self.__display_ref_count == 0:
self.__is_master = False
if self.__data_item:
for _ in range(amount):
self.__data_item.decrement_data_ref_count()
|
Decrement display reference count to indicate this library item is no longer displayed.
|
entailment
|
def auto_display_limits(self):
"""Calculate best display limits and set them."""
display_data_and_metadata = self.get_calculated_display_values(True).display_data_and_metadata
data = display_data_and_metadata.data if display_data_and_metadata else None
if data is not None:
# The old algorithm was a problem during EELS where the signal data
# is a small percentage of the overall data and was falling outside
# the included range. This is the new simplified algorithm. Future
# feature may allow user to select more complex algorithms.
mn, mx = numpy.nanmin(data), numpy.nanmax(data)
self.display_limits = mn, mx
|
Calculate best display limits and set them.
|
entailment
|
def snapshot(self):
"""Return a new library item which is a copy of this one with any dynamic behavior made static."""
display_item = self.__class__()
display_item.display_type = self.display_type
# metadata
display_item._set_persistent_property_value("title", self._get_persistent_property_value("title"))
display_item._set_persistent_property_value("caption", self._get_persistent_property_value("caption"))
display_item._set_persistent_property_value("description", self._get_persistent_property_value("description"))
display_item._set_persistent_property_value("session_id", self._get_persistent_property_value("session_id"))
display_item._set_persistent_property_value("calibration_style_id", self._get_persistent_property_value("calibration_style_id"))
display_item._set_persistent_property_value("display_properties", self._get_persistent_property_value("display_properties"))
display_item.created = self.created
for graphic in self.graphics:
display_item.add_graphic(copy.deepcopy(graphic))
for display_data_channel in self.display_data_channels:
display_item.append_display_data_channel(copy.deepcopy(display_data_channel))
# this goes after the display data channels so that the layers don't get adjusted
display_item._set_persistent_property_value("display_layers", self._get_persistent_property_value("display_layers"))
return display_item
|
Return a new library item which is a copy of this one with any dynamic behavior made static.
|
entailment
|
def increment_display_ref_count(self, amount: int=1):
"""Increment display reference count to indicate this library item is currently displayed."""
display_ref_count = self.__display_ref_count
self.__display_ref_count += amount
for display_data_channel in self.display_data_channels:
display_data_channel.increment_display_ref_count(amount)
|
Increment display reference count to indicate this library item is currently displayed.
|
entailment
|
def decrement_display_ref_count(self, amount: int=1):
"""Decrement display reference count to indicate this library item is no longer displayed."""
assert not self._closed
self.__display_ref_count -= amount
for display_data_channel in self.display_data_channels:
display_data_channel.decrement_display_ref_count(amount)
|
Decrement display reference count to indicate this library item is no longer displayed.
|
entailment
|
def remove_graphic(self, graphic: Graphics.Graphic, *, safe: bool=False) -> typing.Optional[typing.Sequence]:
"""Remove a graphic, but do it through the container, so dependencies can be tracked."""
return self.remove_model_item(self, "graphics", graphic, safe=safe)
|
Remove a graphic, but do it through the container, so dependencies can be tracked.
|
entailment
|
def dimensional_shape(self) -> typing.Optional[typing.Tuple[int, ...]]:
"""Shape of the underlying data, if only one."""
if not self.__data_and_metadata:
return None
return self.__data_and_metadata.dimensional_shape
|
Shape of the underlying data, if only one.
|
entailment
|
def view_to_intervals(self, data_and_metadata: DataAndMetadata.DataAndMetadata, intervals: typing.List[typing.Tuple[float, float]]) -> None:
"""Change the view to encompass the channels and data represented by the given intervals."""
left = None
right = None
for interval in intervals:
left = min(left, interval[0]) if left is not None else interval[0]
right = max(right, interval[1]) if right is not None else interval[1]
left = left if left is not None else 0.0
right = right if right is not None else 1.0
extra = (right - left) * 0.5
left_channel = int(max(0.0, left - extra) * data_and_metadata.data_shape[-1])
right_channel = int(min(1.0, right + extra) * data_and_metadata.data_shape[-1])
self.set_display_property("left_channel", left_channel)
self.set_display_property("right_channel", right_channel)
data_min = numpy.amin(data_and_metadata.data[..., left_channel:right_channel])
data_max = numpy.amax(data_and_metadata.data[..., left_channel:right_channel])
if data_min > 0 and data_max > 0:
self.set_display_property("y_min", 0.0)
self.set_display_property("y_max", data_max * 1.2)
elif data_min < 0 and data_max < 0:
self.set_display_property("y_min", data_min * 1.2)
self.set_display_property("y_max", 0.0)
else:
self.set_display_property("y_min", data_min * 1.2)
self.set_display_property("y_max", data_max * 1.2)
|
Change the view to encompass the channels and data represented by the given intervals.
|
entailment
|
def write_local_file(fp, name_bytes, writer, dt):
"""
Writes a zip file local file header structure at the current file position.
Returns data_len, crc32 for the data.
:param fp: the file point to which to write the header
:param name: the name of the file
:param writer: a function taking an fp parameter to do the writing, returns crc32
:param dt: the datetime to write to the archive
"""
fp.write(struct.pack('I', 0x04034b50)) # local file header
fp.write(struct.pack('H', 10)) # extract version (default)
fp.write(struct.pack('H', 0)) # general purpose bits
fp.write(struct.pack('H', 0)) # compression method
msdos_date = int(dt.year - 1980) << 9 | int(dt.month) << 5 | int(dt.day)
msdos_time = int(dt.hour) << 11 | int(dt.minute) << 5 | int(dt.second)
fp.write(struct.pack('H', msdos_time)) # extract version (default)
fp.write(struct.pack('H', msdos_date)) # extract version (default)
crc32_pos = fp.tell()
fp.write(struct.pack('I', 0)) # crc32 placeholder
data_len_pos = fp.tell()
fp.write(struct.pack('I', 0)) # compressed length placeholder
fp.write(struct.pack('I', 0)) # uncompressed length placeholder
fp.write(struct.pack('H', len(name_bytes))) # name length
fp.write(struct.pack('H', 0)) # extra length
fp.write(name_bytes)
data_start_pos = fp.tell()
crc32 = writer(fp)
data_end_pos = fp.tell()
data_len = data_end_pos - data_start_pos
fp.seek(crc32_pos)
fp.write(struct.pack('I', crc32)) # crc32
fp.seek(data_len_pos)
fp.write(struct.pack('I', data_len)) # compressed length placeholder
fp.write(struct.pack('I', data_len)) # uncompressed length placeholder
fp.seek(data_end_pos)
return data_len, crc32
|
Writes a zip file local file header structure at the current file position.
Returns data_len, crc32 for the data.
:param fp: the file point to which to write the header
:param name: the name of the file
:param writer: a function taking an fp parameter to do the writing, returns crc32
:param dt: the datetime to write to the archive
|
entailment
|
def write_directory_data(fp, offset, name_bytes, data_len, crc32, dt):
"""
Write a zip fie directory entry at the current file position
:param fp: the file point to which to write the header
:param offset: the offset of the associated local file header
:param name: the name of the file
:param data_len: the length of data that will be written to the archive
:param crc32: the crc32 of the data to be written
:param dt: the datetime to write to the archive
"""
fp.write(struct.pack('I', 0x02014b50)) # central directory header
fp.write(struct.pack('H', 10)) # made by version (default)
fp.write(struct.pack('H', 10)) # extract version (default)
fp.write(struct.pack('H', 0)) # general purpose bits
fp.write(struct.pack('H', 0)) # compression method
msdos_date = int(dt.year - 1980) << 9 | int(dt.month) << 5 | int(dt.day)
msdos_time = int(dt.hour) << 11 | int(dt.minute) << 5 | int(dt.second)
fp.write(struct.pack('H', msdos_time)) # extract version (default)
fp.write(struct.pack('H', msdos_date)) # extract version (default)
fp.write(struct.pack('I', crc32)) # crc32
fp.write(struct.pack('I', data_len)) # compressed length
fp.write(struct.pack('I', data_len)) # uncompressed length
fp.write(struct.pack('H', len(name_bytes))) # name length
fp.write(struct.pack('H', 0)) # extra length
fp.write(struct.pack('H', 0)) # comments length
fp.write(struct.pack('H', 0)) # disk number
fp.write(struct.pack('H', 0)) # internal file attributes
fp.write(struct.pack('I', 0)) # external file attributes
fp.write(struct.pack('I', offset)) # relative offset of file header
fp.write(name_bytes)
|
Write a zip fie directory entry at the current file position
:param fp: the file point to which to write the header
:param offset: the offset of the associated local file header
:param name: the name of the file
:param data_len: the length of data that will be written to the archive
:param crc32: the crc32 of the data to be written
:param dt: the datetime to write to the archive
|
entailment
|
def write_end_of_directory(fp, dir_size, dir_offset, count):
"""
Write zip file end of directory header at the current file position
:param fp: the file point to which to write the header
:param dir_size: the total size of the directory
:param dir_offset: the start of the first directory header
:param count: the count of files
"""
fp.write(struct.pack('I', 0x06054b50)) # central directory header
fp.write(struct.pack('H', 0)) # disk number
fp.write(struct.pack('H', 0)) # disk number
fp.write(struct.pack('H', count)) # number of files
fp.write(struct.pack('H', count)) # number of files
fp.write(struct.pack('I', dir_size)) # central directory size
fp.write(struct.pack('I', dir_offset)) # central directory offset
fp.write(struct.pack('H', 0))
|
Write zip file end of directory header at the current file position
:param fp: the file point to which to write the header
:param dir_size: the total size of the directory
:param dir_offset: the start of the first directory header
:param count: the count of files
|
entailment
|
def write_zip_fp(fp, data, properties, dir_data_list=None):
"""
Write custom zip file of data and properties to fp
:param fp: the file point to which to write the header
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
:param dir_data_list: optional list of directory header information structures
If dir_data_list is specified, data should be None and properties should
be specified. Then the existing data structure will be left alone and only
the directory headers and end of directory header will be written.
Otherwise, if both data and properties are specified, both are written
out in full.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
"""
assert data is not None or properties is not None
# dir_data_list has the format: local file record offset, name, data length, crc32
dir_data_list = list() if dir_data_list is None else dir_data_list
dt = datetime.datetime.now()
if data is not None:
offset_data = fp.tell()
def write_data(fp):
numpy_start_pos = fp.tell()
numpy.save(fp, data)
numpy_end_pos = fp.tell()
fp.seek(numpy_start_pos)
data_c = numpy.require(data, dtype=data.dtype, requirements=["C_CONTIGUOUS"])
header_data = fp.read((numpy_end_pos - numpy_start_pos) - data_c.nbytes) # read the header
data_crc32 = binascii.crc32(data_c.data, binascii.crc32(header_data)) & 0xFFFFFFFF
fp.seek(numpy_end_pos)
return data_crc32
data_len, crc32 = write_local_file(fp, b"data.npy", write_data, dt)
dir_data_list.append((offset_data, b"data.npy", data_len, crc32))
if properties is not None:
json_str = str()
try:
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Geometry.IntPoint) or isinstance(obj, Geometry.IntSize) or isinstance(obj, Geometry.IntRect) or isinstance(obj, Geometry.FloatPoint) or isinstance(obj, Geometry.FloatSize) or isinstance(obj, Geometry.FloatRect):
return tuple(obj)
else:
return json.JSONEncoder.default(self, obj)
json_io = io.StringIO()
json.dump(properties, json_io, cls=JSONEncoder)
json_str = json_io.getvalue()
except Exception as e:
# catch exceptions to avoid corrupt zip files
import traceback
logging.error("Exception writing zip file %s" + str(e))
traceback.print_exc()
traceback.print_stack()
def write_json(fp):
json_bytes = bytes(json_str, 'ISO-8859-1')
fp.write(json_bytes)
return binascii.crc32(json_bytes) & 0xFFFFFFFF
offset_json = fp.tell()
json_len, json_crc32 = write_local_file(fp, b"metadata.json", write_json, dt)
dir_data_list.append((offset_json, b"metadata.json", json_len, json_crc32))
dir_offset = fp.tell()
for offset, name_bytes, data_len, crc32 in dir_data_list:
write_directory_data(fp, offset, name_bytes, data_len, crc32, dt)
dir_size = fp.tell() - dir_offset
write_end_of_directory(fp, dir_size, dir_offset, len(dir_data_list))
fp.truncate()
|
Write custom zip file of data and properties to fp
:param fp: the file point to which to write the header
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
:param dir_data_list: optional list of directory header information structures
If dir_data_list is specified, data should be None and properties should
be specified. Then the existing data structure will be left alone and only
the directory headers and end of directory header will be written.
Otherwise, if both data and properties are specified, both are written
out in full.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
|
entailment
|
def write_zip(file_path, data, properties):
"""
Write custom zip file to the file path
:param file_path: the file to which to write the zip file
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
See write_zip_fp.
"""
with open(file_path, "w+b") as fp:
write_zip_fp(fp, data, properties)
|
Write custom zip file to the file path
:param file_path: the file to which to write the zip file
:param data: the data to write to the file; may be None
:param properties: the properties to write to the file; may be None
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
See write_zip_fp.
|
entailment
|
def parse_zip(fp):
"""
Parse the zip file headers at fp
:param fp: the file pointer from which to parse the zip file
:return: A tuple of local files, directory headers, and end of central directory
The local files are dictionary where the keys are the local file offset and the
values are each a tuple consisting of the name, data position, data length, and crc32.
The directory headers are a dictionary where the keys are the names of the files
and the values are a tuple consisting of the directory header position, and the
associated local file position.
The end of central directory is a tuple consisting of the location of the end of
central directory header and the location of the first directory header.
This method will seek to location 0 of fp and leave fp at end of file.
"""
local_files = {}
dir_files = {}
eocd = None
fp.seek(0)
while True:
pos = fp.tell()
signature = struct.unpack('I', fp.read(4))[0]
if signature == 0x04034b50:
fp.seek(pos + 14)
crc32 = struct.unpack('I', fp.read(4))[0]
fp.seek(pos + 18)
data_len = struct.unpack('I', fp.read(4))[0]
fp.seek(pos + 26)
name_len = struct.unpack('H', fp.read(2))[0]
extra_len = struct.unpack('H', fp.read(2))[0]
name_bytes = fp.read(name_len)
fp.seek(extra_len, os.SEEK_CUR)
data_pos = fp.tell()
fp.seek(data_len, os.SEEK_CUR)
local_files[pos] = (name_bytes, data_pos, data_len, crc32)
elif signature == 0x02014b50:
fp.seek(pos + 28)
name_len = struct.unpack('H', fp.read(2))[0]
extra_len = struct.unpack('H', fp.read(2))[0]
comment_len = struct.unpack('H', fp.read(2))[0]
fp.seek(pos + 42)
pos2 = struct.unpack('I', fp.read(4))[0]
name_bytes = fp.read(name_len)
fp.seek(pos + 46 + name_len + extra_len + comment_len)
dir_files[name_bytes] = (pos, pos2)
elif signature == 0x06054b50:
fp.seek(pos + 16)
pos2 = struct.unpack('I', fp.read(4))[0]
eocd = (pos, pos2)
break
else:
raise IOError()
return local_files, dir_files, eocd
|
Parse the zip file headers at fp
:param fp: the file pointer from which to parse the zip file
:return: A tuple of local files, directory headers, and end of central directory
The local files are dictionary where the keys are the local file offset and the
values are each a tuple consisting of the name, data position, data length, and crc32.
The directory headers are a dictionary where the keys are the names of the files
and the values are a tuple consisting of the directory header position, and the
associated local file position.
The end of central directory is a tuple consisting of the location of the end of
central directory header and the location of the first directory header.
This method will seek to location 0 of fp and leave fp at end of file.
|
entailment
|
def read_data(fp, local_files, dir_files, name_bytes):
"""
Read a numpy data array from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the data file to read
:return: the numpy data array, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
"""
if name_bytes in dir_files:
fp.seek(local_files[dir_files[name_bytes][1]][1])
return numpy.load(fp)
return None
|
Read a numpy data array from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the data file to read
:return: the numpy data array, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
|
entailment
|
def read_json(fp, local_files, dir_files, name_bytes):
"""
Read json properties from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the json file to read
:return: the json properites as a dictionary, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
"""
if name_bytes in dir_files:
json_pos = local_files[dir_files[name_bytes][1]][1]
json_len = local_files[dir_files[name_bytes][1]][2]
fp.seek(json_pos)
json_properties = fp.read(json_len)
return json.loads(json_properties.decode("utf-8"))
return None
|
Read json properties from the zip file
:param fp: a file pointer
:param local_files: the local files structure
:param dir_files: the directory headers
:param name: the name of the json file to read
:return: the json properites as a dictionary, if found
The file pointer will be at a location following the
local file entry after this method.
The local_files and dir_files should be passed from
the results of parse_zip.
|
entailment
|
def rewrite_zip(file_path, properties):
"""
Rewrite the json properties in the zip file
:param file_path: the file path to the zip file
:param properties: the updated properties to write to the zip file
This method will attempt to keep the data file within the zip
file intact without rewriting it. However, if the data file is not the
first item in the zip file, this method will rewrite it.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
"""
with open(file_path, "r+b") as fp:
local_files, dir_files, eocd = parse_zip(fp)
# check to make sure directory has two files, named data.npy and metadata.json, and that data.npy is first
# TODO: check compression, etc.
if len(dir_files) == 2 and b"data.npy" in dir_files and b"metadata.json" in dir_files and dir_files[b"data.npy"][1] == 0:
fp.seek(dir_files[b"metadata.json"][1])
dir_data_list = list()
local_file_pos = dir_files[b"data.npy"][1]
local_file = local_files[local_file_pos]
dir_data_list.append((local_file_pos, b"data.npy", local_file[2], local_file[3]))
write_zip_fp(fp, None, properties, dir_data_list)
else:
data = None
if b"data.npy" in dir_files:
fp.seek(local_files[dir_files[b"data.npy"][1]][1])
data = numpy.load(fp)
fp.seek(0)
write_zip_fp(fp, data, properties)
|
Rewrite the json properties in the zip file
:param file_path: the file path to the zip file
:param properties: the updated properties to write to the zip file
This method will attempt to keep the data file within the zip
file intact without rewriting it. However, if the data file is not the
first item in the zip file, this method will rewrite it.
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
|
entailment
|
def is_matching(cls, file_path):
"""
Return whether the given absolute file path is an ndata file.
"""
if file_path.endswith(".ndata") and os.path.exists(file_path):
try:
with open(file_path, "r+b") as fp:
local_files, dir_files, eocd = parse_zip(fp)
contains_data = b"data.npy" in dir_files
contains_metadata = b"metadata.json" in dir_files
file_count = contains_data + contains_metadata # use fact that True is 1, False is 0
# TODO: make sure ndata isn't compressed, or handle it
if len(dir_files) != file_count or file_count == 0:
return False
return True
except Exception as e:
logging.error("Exception parsing ndata file: %s", file_path)
logging.error(str(e))
return False
|
Return whether the given absolute file path is an ndata file.
|
entailment
|
def write_data(self, data, file_datetime):
"""
Write data to the ndata file specified by reference.
:param data: the numpy array data to write
:param file_datetime: the datetime for the file
"""
with self.__lock:
assert data is not None
absolute_file_path = self.__file_path
#logging.debug("WRITE data file %s for %s", absolute_file_path, key)
make_directory_if_needed(os.path.dirname(absolute_file_path))
properties = self.read_properties() if os.path.exists(absolute_file_path) else dict()
write_zip(absolute_file_path, data, properties)
# convert to utc time.
tz_minutes = Utility.local_utcoffset_minutes(file_datetime)
timestamp = calendar.timegm(file_datetime.timetuple()) - tz_minutes * 60
os.utime(absolute_file_path, (time.time(), timestamp))
|
Write data to the ndata file specified by reference.
:param data: the numpy array data to write
:param file_datetime: the datetime for the file
|
entailment
|
def write_properties(self, properties, file_datetime):
"""
Write properties to the ndata file specified by reference.
:param reference: the reference to which to write
:param properties: the dict to write to the file
:param file_datetime: the datetime for the file
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
"""
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("WRITE properties %s for %s", absolute_file_path, key)
make_directory_if_needed(os.path.dirname(absolute_file_path))
exists = os.path.exists(absolute_file_path)
if exists:
rewrite_zip(absolute_file_path, Utility.clean_dict(properties))
else:
write_zip(absolute_file_path, None, Utility.clean_dict(properties))
# convert to utc time.
tz_minutes = Utility.local_utcoffset_minutes(file_datetime)
timestamp = calendar.timegm(file_datetime.timetuple()) - tz_minutes * 60
os.utime(absolute_file_path, (time.time(), timestamp))
|
Write properties to the ndata file specified by reference.
:param reference: the reference to which to write
:param properties: the dict to write to the file
:param file_datetime: the datetime for the file
The properties param must not change during this method. Callers should
take care to ensure this does not happen.
|
entailment
|
def read_properties(self):
"""
Read properties from the ndata file reference
:param reference: the reference from which to read
:return: a tuple of the item_uuid and a dict of the properties
"""
with self.__lock:
absolute_file_path = self.__file_path
with open(absolute_file_path, "rb") as fp:
local_files, dir_files, eocd = parse_zip(fp)
properties = read_json(fp, local_files, dir_files, b"metadata.json")
return properties
|
Read properties from the ndata file reference
:param reference: the reference from which to read
:return: a tuple of the item_uuid and a dict of the properties
|
entailment
|
def read_data(self):
"""
Read data from the ndata file reference
:param reference: the reference from which to read
:return: a numpy array of the data; maybe None
"""
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("READ data file %s", absolute_file_path)
with open(absolute_file_path, "rb") as fp:
local_files, dir_files, eocd = parse_zip(fp)
return read_data(fp, local_files, dir_files, b"data.npy")
return None
|
Read data from the ndata file reference
:param reference: the reference from which to read
:return: a numpy array of the data; maybe None
|
entailment
|
def remove(self):
"""
Remove the ndata file reference
:param reference: the reference to remove
"""
with self.__lock:
absolute_file_path = self.__file_path
#logging.debug("DELETE data file %s", absolute_file_path)
if os.path.isfile(absolute_file_path):
os.remove(absolute_file_path)
|
Remove the ndata file reference
:param reference: the reference to remove
|
entailment
|
def build_menu(self, display_type_menu, document_controller, display_panel):
"""Build the dynamic menu for the selected display panel.
The user accesses this menu by right-clicking on the display panel.
The basic menu items are to an empty display panel or a browser display panel.
After that, each display controller factory is given a chance to add to the menu. The display
controllers (for instance, a scan acquisition controller), may add its own menu items.
"""
dynamic_live_actions = list()
def switch_to_display_content(display_panel_type):
self.switch_to_display_content(document_controller, display_panel, display_panel_type, display_panel.display_item)
empty_action = display_type_menu.add_menu_item(_("Clear Display Panel"), functools.partial(switch_to_display_content, "empty-display-panel"))
display_type_menu.add_separator()
data_item_display_action = display_type_menu.add_menu_item(_("Display Item"), functools.partial(switch_to_display_content, "data-display-panel"))
thumbnail_browser_action = display_type_menu.add_menu_item(_("Thumbnail Browser"), functools.partial(switch_to_display_content, "thumbnail-browser-display-panel"))
grid_browser_action = display_type_menu.add_menu_item(_("Grid Browser"), functools.partial(switch_to_display_content, "browser-display-panel"))
display_type_menu.add_separator()
display_panel_type = display_panel.display_panel_type
empty_action.checked = display_panel_type == "empty" and display_panel.display_panel_controller is None
data_item_display_action.checked = display_panel_type == "data_item"
thumbnail_browser_action.checked = display_panel_type == "horizontal"
grid_browser_action.checked = display_panel_type == "grid"
dynamic_live_actions.append(empty_action)
dynamic_live_actions.append(data_item_display_action)
dynamic_live_actions.append(thumbnail_browser_action)
dynamic_live_actions.append(grid_browser_action)
for factory in self.__display_controller_factories.values():
dynamic_live_actions.extend(factory.build_menu(display_type_menu, display_panel))
return dynamic_live_actions
|
Build the dynamic menu for the selected display panel.
The user accesses this menu by right-clicking on the display panel.
The basic menu items are to an empty display panel or a browser display panel.
After that, each display controller factory is given a chance to add to the menu. The display
controllers (for instance, a scan acquisition controller), may add its own menu items.
|
entailment
|
def persistent_object_context_changed(self):
""" Override from PersistentObject. """
super().persistent_object_context_changed()
def source_registered(source):
self.__source = source
def source_unregistered(source=None):
pass
def reference_registered(property_name, reference):
self.__referenced_objects[property_name] = reference
def reference_unregistered(property_name, reference=None):
pass
if self.persistent_object_context:
self.persistent_object_context.subscribe(self.source_uuid, source_registered, source_unregistered)
for property_name, value in self.__properties.items():
if isinstance(value, dict) and value.get("type") in {"data_item", "display_item", "data_source", "graphic", "structure"} and "uuid" in value:
self.persistent_object_context.subscribe(uuid.UUID(value["uuid"]), functools.partial(reference_registered, property_name), functools.partial(reference_unregistered, property_name))
else:
source_unregistered()
for property_name, value in self.__properties.items():
if isinstance(value, dict) and value.get("type") in {"data_item", "display_item", "data_source", "graphic", "structure"} and "uuid" in value:
reference_unregistered(property_name)
|
Override from PersistentObject.
|
entailment
|
def ativar_sat(self, tipo_certificado, cnpj, codigo_uf):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especializada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
"""
resp = self._http_post('ativarsat',
tipo_certificado=tipo_certificado,
cnpj=cnpj,
codigo_uf=codigo_uf)
conteudo = resp.json()
return RespostaAtivarSAT.analisar(conteudo.get('retorno'))
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.ativar_sat`.
:return: Uma resposta SAT especializada em ``AtivarSAT``.
:rtype: satcfe.resposta.ativarsat.RespostaAtivarSAT
|
entailment
|
def comunicar_certificado_icpbrasil(self, certificado):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('comunicarcertificadoicpbrasil',
certificado=certificado)
conteudo = resp.json()
return RespostaSAT.comunicar_certificado_icpbrasil(
conteudo.get('retorno'))
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def enviar_dados_venda(self, dados_venda):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
"""
resp = self._http_post('enviardadosvenda',
dados_venda=dados_venda.documento())
conteudo = resp.json()
return RespostaEnviarDadosVenda.analisar(conteudo.get('retorno'))
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.enviar_dados_venda`.
:return: Uma resposta SAT especializada em ``EnviarDadosVenda``.
:rtype: satcfe.resposta.enviardadosvenda.RespostaEnviarDadosVenda
|
entailment
|
def cancelar_ultima_venda(self, chave_cfe, dados_cancelamento):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
"""
resp = self._http_post('cancelarultimavenda',
chave_cfe=chave_cfe,
dados_cancelamento=dados_cancelamento.documento())
conteudo = resp.json()
return RespostaCancelarUltimaVenda.analisar(conteudo.get('retorno'))
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.cancelar_ultima_venda`.
:return: Uma resposta SAT especializada em ``CancelarUltimaVenda``.
:rtype: satcfe.resposta.cancelarultimavenda.RespostaCancelarUltimaVenda
|
entailment
|
def consultar_sat(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('consultarsat')
conteudo = resp.json()
return RespostaSAT.consultar_sat(conteudo.get('retorno'))
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_sat`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def consultar_status_operacional(self):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
"""
resp = self._http_post('consultarstatusoperacional')
conteudo = resp.json()
return RespostaConsultarStatusOperacional.analisar(
conteudo.get('retorno'))
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_status_operacional`.
:return: Uma resposta SAT especializada em ``ConsultarStatusOperacional``.
:rtype: satcfe.resposta.consultarstatusoperacional.RespostaConsultarStatusOperacional
|
entailment
|
def consultar_numero_sessao(self, numero_sessao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('consultarnumerosessao',
numero_sessao=numero_sessao)
conteudo = resp.json()
return RespostaConsultarNumeroSessao.analisar(conteudo.get('retorno'))
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.consultar_numero_sessao`.
:return: Uma resposta SAT que irá depender da sessão consultada.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def configurar_interface_de_rede(self, configuracao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('configurarinterfacederede',
configuracao=configuracao.documento())
conteudo = resp.json()
return RespostaSAT.configurar_interface_de_rede(conteudo.get('retorno'))
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
def associar_assinatura(self, sequencia_cnpj, assinatura_ac):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('associarassinatura',
sequencia_cnpj=sequencia_cnpj, assinatura_ac=assinatura_ac)
# (!) resposta baseada na redação com efeitos até 31-12-2016
conteudo = resp.json()
return RespostaSAT.associar_assinatura(conteudo.get('retorno'))
|
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.associar_assinatura`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.