sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def bootstrap_main(args):
"""
Main function explicitly called from the C++ code.
Return the main application object.
"""
version_info = sys.version_info
if version_info.major != 3 or version_info.minor < 6:
return None, "python36"
main_fn = load_module_as_package("nionui_app.nionswift")
if main_fn:
return main_fn(["nionui_app.nionswift"] + args, {"pyqt": None}), None
return None, "main"
|
Main function explicitly called from the C++ code.
Return the main application object.
|
entailment
|
def _migrate_library(workspace_dir: pathlib.Path, do_logging: bool=True) -> pathlib.Path:
""" Migrate library to latest version. """
library_path_11 = workspace_dir / "Nion Swift Workspace.nslib"
library_path_12 = workspace_dir / "Nion Swift Library 12.nslib"
library_path_13 = workspace_dir / "Nion Swift Library 13.nslib"
library_paths = (library_path_11, library_path_12)
library_path_latest = library_path_13
if not os.path.exists(library_path_latest):
for library_path in reversed(library_paths):
if os.path.exists(library_path):
if do_logging:
logging.info("Migrating library: %s -> %s", library_path, library_path_latest)
shutil.copyfile(library_path, library_path_latest)
break
return library_path_latest
|
Migrate library to latest version.
|
entailment
|
def status(self):
"""Nome amigável do campo ``ESTADO_OPERACAO``, conforme a "Tabela de
Informações do Status do SAT".
"""
for valor, rotulo in ESTADOS_OPERACAO:
if self.ESTADO_OPERACAO == valor:
return rotulo
return u'(desconhecido: {})'.format(self.ESTADO_OPERACAO)
|
Nome amigável do campo ``ESTADO_OPERACAO``, conforme a "Tabela de
Informações do Status do SAT".
|
entailment
|
def analisar(retorno):
"""Constrói uma :class:`RespostaConsultarStatusOperacional` a partir do
retorno informado.
:param unicode retorno: Retorno da função ``ConsultarStatusOperacional``.
"""
resposta = analisar_retorno(forcar_unicode(retorno),
funcao='ConsultarStatusOperacional',
classe_resposta=RespostaConsultarStatusOperacional,
campos=RespostaSAT.CAMPOS + (
('NSERIE', as_clean_unicode),
('TIPO_LAN', as_clean_unicode),
('LAN_IP', normalizar_ip),
('LAN_MAC', unicode),
('LAN_MASK', normalizar_ip),
('LAN_GW', normalizar_ip),
('LAN_DNS_1', normalizar_ip),
('LAN_DNS_2', normalizar_ip),
('STATUS_LAN', as_clean_unicode),
('NIVEL_BATERIA', as_clean_unicode),
('MT_TOTAL', as_clean_unicode),
('MT_USADA', as_clean_unicode),
('DH_ATUAL', as_datetime),
('VER_SB', as_clean_unicode),
('VER_LAYOUT', as_clean_unicode),
('ULTIMO_CF_E_SAT', as_clean_unicode),
('LISTA_INICIAL', as_clean_unicode),
('LISTA_FINAL', as_clean_unicode),
('DH_CFE', as_datetime_or_none),
('DH_ULTIMA', as_datetime),
('CERT_EMISSAO', as_date),
('CERT_VENCIMENTO', as_date),
('ESTADO_OPERACAO', int),
),
campos_alternativos=[
# se falhar resultarão apenas os 5 campos padrão
RespostaSAT.CAMPOS,
]
)
if resposta.EEEEE not in ('10000',):
raise ExcecaoRespostaSAT(resposta)
return resposta
|
Constrói uma :class:`RespostaConsultarStatusOperacional` a partir do
retorno informado.
:param unicode retorno: Retorno da função ``ConsultarStatusOperacional``.
|
entailment
|
def merge_input_csv_forecast_json(input_csv_file, forecast_json_path, condition_models, dist_models):
"""
Reads forecasts from json files and merges them with the input data from the step csv files.
Args:
input_csv_file: Name of the input data csv file being processed
forecast_json_path: Path to the forecast json files toplevel directory
condition_models: List of models used to forecast hail or no hail
dist_models: List of models used to forecast the hail size distribution
Returns:
"""
try:
run_date = input_csv_file[:-4].split("_")[-1]
print(run_date)
ens_member = "_".join(input_csv_file.split("/")[-1][:-4].split("_")[3:-1])
ens_name = input_csv_file.split("/")[-1].split("_")[2]
input_data = pd.read_csv(input_csv_file, index_col="Step_ID")
full_json_path = forecast_json_path + "{0}/{1}/".format(run_date, ens_member)
track_ids = sorted(input_data["Track_ID"].unique())
model_pred_cols = []
condition_models_ns = []
dist_models_ns = []
gamma_params = ["Shape", "Location", "Scale"]
for condition_model in condition_models:
model_pred_cols.append(condition_model.replace(" ", "-") + "_Condition")
condition_models_ns.append(condition_model.replace(" ", "-"))
for dist_model in dist_models:
dist_models_ns.append(dist_model.replace(" ", "-"))
for param in gamma_params:
model_pred_cols.append(dist_model.replace(" ", "-") + "_" + param)
pred_data = pd.DataFrame(index=input_data.index, columns=model_pred_cols,
dtype=float)
for track_id in track_ids:
track_id_num = track_id.split("_")[-1]
json_filename = full_json_path + "{0}_{1}_{2}_model_track_{3}.json".format(ens_name,
run_date,
ens_member,
track_id_num)
json_file = open(json_filename)
json_data = json.load(json_file)
json_file.close()
for s, step in enumerate(json_data["features"]):
step_id = track_id + "_{0:02d}".format(s)
for cond_model in condition_models_ns:
pred_data.loc[step_id, cond_model + "_Condition"] = step["properties"]["condition_" + cond_model]
for dist_model in dist_models_ns:
pred_data.loc[step_id, [dist_model + "_" + p
for p in gamma_params]] = step["properties"]["dist_" + dist_model]
out_data = input_data.merge(pred_data, left_index=True, right_index=True)
return out_data, ens_name, ens_member
except Exception as e:
print(traceback.format_exc())
raise e
|
Reads forecasts from json files and merges them with the input data from the step csv files.
Args:
input_csv_file: Name of the input data csv file being processed
forecast_json_path: Path to the forecast json files toplevel directory
condition_models: List of models used to forecast hail or no hail
dist_models: List of models used to forecast the hail size distribution
Returns:
|
entailment
|
def mark_data_dirty(self):
""" Called from item to indicate its data or metadata has changed."""
self.__cache.set_cached_value_dirty(self.__display_item, self.__cache_property_name)
self.__initialize_cache()
self.__cached_value_dirty = True
|
Called from item to indicate its data or metadata has changed.
|
entailment
|
def __initialize_cache(self):
"""Initialize the cache values (cache values are used for optimization)."""
if self.__cached_value_dirty is None:
self.__cached_value_dirty = self.__cache.is_cached_value_dirty(self.__display_item, self.__cache_property_name)
self.__cached_value = self.__cache.get_cached_value(self.__display_item, self.__cache_property_name)
|
Initialize the cache values (cache values are used for optimization).
|
entailment
|
def recompute_if_necessary(self, ui):
"""Recompute the data on a thread, if necessary.
If the data has recently been computed, this call will be rescheduled for the future.
If the data is currently being computed, it do nothing."""
self.__initialize_cache()
if self.__cached_value_dirty:
with self.__is_recomputing_lock:
is_recomputing = self.__is_recomputing
self.__is_recomputing = True
if is_recomputing:
pass
else:
# the only way to get here is if we're not currently computing
# this has the side effect of limiting the number of threads that
# are sleeping.
def recompute():
try:
if self.__recompute_thread_cancel.wait(0.01): # helps tests run faster
return
minimum_time = 0.5
current_time = time.time()
if current_time < self.__cached_value_time + minimum_time:
if self.__recompute_thread_cancel.wait(self.__cached_value_time + minimum_time - current_time):
return
self.recompute_data(ui)
finally:
self.__is_recomputing = False
self.__recompute_thread = None
with self.__is_recomputing_lock:
self.__recompute_thread = threading.Thread(target=recompute)
self.__recompute_thread.start()
|
Recompute the data on a thread, if necessary.
If the data has recently been computed, this call will be rescheduled for the future.
If the data is currently being computed, it do nothing.
|
entailment
|
def recompute_data(self, ui):
"""Compute the data associated with this processor.
This method is thread safe and may take a long time to return. It should not be called from
the UI thread. Upon return, the results will be calculated with the latest data available
and the cache will not be marked dirty.
"""
self.__initialize_cache()
with self.__recompute_lock:
if self.__cached_value_dirty:
try:
calculated_data = self.get_calculated_data(ui)
except Exception as e:
import traceback
traceback.print_exc()
traceback.print_stack()
raise
self.__cache.set_cached_value(self.__display_item, self.__cache_property_name, calculated_data)
self.__cached_value = calculated_data
self.__cached_value_dirty = False
self.__cached_value_time = time.time()
else:
calculated_data = None
if calculated_data is None:
calculated_data = self.get_default_data()
if calculated_data is not None:
# if the default is not None, treat is as valid cached data
self.__cache.set_cached_value(self.__display_item, self.__cache_property_name, calculated_data)
self.__cached_value = calculated_data
self.__cached_value_dirty = False
self.__cached_value_time = time.time()
else:
# otherwise remove everything from the cache
self.__cache.remove_cached_value(self.__display_item, self.__cache_property_name)
self.__cached_value = None
self.__cached_value_dirty = None
self.__cached_value_time = 0
self.__recompute_lock.release()
if callable(self.on_thumbnail_updated):
self.on_thumbnail_updated()
self.__recompute_lock.acquire()
|
Compute the data associated with this processor.
This method is thread safe and may take a long time to return. It should not be called from
the UI thread. Upon return, the results will be calculated with the latest data available
and the cache will not be marked dirty.
|
entailment
|
def thumbnail_source_for_display_item(self, ui, display_item: DisplayItem.DisplayItem) -> ThumbnailSource:
"""Returned ThumbnailSource must be closed."""
with self.__lock:
thumbnail_source = self.__thumbnail_sources.get(display_item)
if not thumbnail_source:
thumbnail_source = ThumbnailSource(ui, display_item)
self.__thumbnail_sources[display_item] = thumbnail_source
def will_delete(thumbnail_source):
del self.__thumbnail_sources[thumbnail_source._display_item]
thumbnail_source._on_will_delete = will_delete
else:
assert thumbnail_source._ui == ui
return thumbnail_source.add_ref()
|
Returned ThumbnailSource must be closed.
|
entailment
|
def load_plug_ins(app, root_dir):
"""Load plug-ins."""
global extensions
ui = app.ui
# a list of directories in which sub-directories PlugIns will be searched.
subdirectories = []
# the default location is where the directory main packages are located.
if root_dir:
subdirectories.append(root_dir)
# also search the default data location; create directory there if it doesn't exist to make it easier for user.
# default data location will be application specific.
data_location = ui.get_data_location()
if data_location is not None:
subdirectories.append(data_location)
# create directories here if they don't exist
plugins_dir = os.path.abspath(os.path.join(data_location, "PlugIns"))
if not os.path.exists(plugins_dir):
logging.info("Creating plug-ins directory %s", plugins_dir)
os.makedirs(plugins_dir)
# search the Nion/Swift subdirectory of the default document location too,
# but don't create directories here - avoid polluting user visible directories.
document_location = ui.get_document_location()
if document_location is not None:
subdirectories.append(os.path.join(document_location, "Nion", "Swift"))
# do not create them in documents if they don't exist. this location is optional.
# build a list of directories that will be loaded as plug-ins.
PlugInDir = collections.namedtuple("PlugInDir", ["directory", "relative_path"])
plugin_dirs = list()
# track directories that have already been searched.
seen_plugin_dirs = list()
# for each subdirectory, look in PlugIns for sub-directories that represent the plug-ins.
for subdirectory in subdirectories:
plugins_dir = os.path.abspath(os.path.join(subdirectory, "PlugIns"))
if os.path.exists(plugins_dir) and not plugins_dir in seen_plugin_dirs:
logging.info("Loading plug-ins from %s", plugins_dir)
# add the PlugIns directory to the system import path.
sys.path.append(plugins_dir)
# now build a list of sub-directories representing plug-ins within plugins_dir.
sorted_relative_paths = sorted([d for d in os.listdir(plugins_dir) if os.path.isdir(os.path.join(plugins_dir, d))])
plugin_dirs.extend([PlugInDir(plugins_dir, sorted_relative_path) for sorted_relative_path in sorted_relative_paths])
# mark plugins_dir as 'seen' to avoid search it twice.
seen_plugin_dirs.append(plugins_dir)
else:
logging.info("NOT Loading plug-ins from %s (missing)", plugins_dir)
version_map = dict()
module_exists_map = dict()
plugin_adapters = list()
import nionswift_plugin
for module_info in pkgutil.iter_modules(nionswift_plugin.__path__):
plugin_adapters.append(ModuleAdapter(nionswift_plugin.__name__, module_info))
for directory, relative_path in plugin_dirs:
plugin_adapters.append(PlugInAdapter(directory, relative_path))
progress = True
while progress:
progress = False
plugin_adapters_copy = copy.deepcopy(plugin_adapters)
plugin_adapters = list()
for plugin_adapter in plugin_adapters_copy:
manifest_path = plugin_adapter.manifest_path
manifest = plugin_adapter.manifest
if manifest:
manifest_valid = True
if not "name" in manifest:
logging.info("Invalid manifest (missing 'name'): %s", manifest_path)
manifest_valid = False
if not "identifier" in manifest:
logging.info("Invalid manifest (missing 'identifier'): %s", manifest_path)
manifest_valid = False
if "identifier" in manifest and not re.match("[_\-a-zA-Z][_\-a-zA-Z0-9.]*$", manifest["identifier"]):
logging.info("Invalid manifest (invalid 'identifier': '%s'): %s", manifest["identifier"], manifest_path)
manifest_valid = False
if not "version" in manifest:
logging.info("Invalid manifest (missing 'version'): %s", manifest_path)
manifest_valid = False
if "requires" in manifest and not isinstance(manifest["requires"], list):
logging.info("Invalid manifest ('requires' not a list): %s", manifest_path)
manifest_valid = False
if not manifest_valid:
continue
for module in manifest.get("modules", list()):
if module in module_exists_map:
module_exists = module_exists_map.get(module)
else:
module_exists = importlib.util.find_spec(module) is not None
module_exists_map[module] = module_exists
if not module_exists:
logging.info("Plug-in '" + plugin_adapter.module_name + "' NOT loaded (" + plugin_adapter.module_path + ").")
logging.info("Cannot satisfy requirement (%s): %s", module, manifest_path)
manifest_valid = False
break
for requirement in manifest.get("requires", list()):
# TODO: see https://packaging.pypa.io/en/latest/
requirement_components = requirement.split()
if len(requirement_components) != 3 or requirement_components[1] != "~=":
logging.info("Invalid manifest (requirement '%s' invalid): %s", requirement, manifest_path)
manifest_valid = False
break
identifier, operator, version_specifier = requirement_components[0], requirement_components[1], requirement_components[2]
if identifier in version_map:
if Utility.compare_versions("~" + version_specifier, version_map[identifier]) != 0:
logging.info("Plug-in '" + plugin_adapter.module_name + "' NOT loaded (" + plugin_adapter.module_path + ").")
logging.info("Cannot satisfy requirement (%s): %s", requirement, manifest_path)
manifest_valid = False
break
else:
# requirements not loaded yet; add back to plugin_adapters, but don't mark progress since nothing was loaded.
logging.info("Plug-in '" + plugin_adapter.module_name + "' delayed (%s) (" + plugin_adapter.module_path + ").", requirement)
plugin_adapters.append(plugin_adapter)
manifest_valid = False
break
if not manifest_valid:
continue
version_map[manifest["identifier"]] = manifest["version"]
# read the manifests, if any
# repeat loop of plug-ins until no plug-ins left in the list
# if all dependencies satisfied for a plug-in, load it
# otherwise defer until next round
# stop if no plug-ins loaded in the round
# count on the user to have correct dependencies
module = plugin_adapter.load()
if module:
__modules.append(module)
progress = True
for plugin_adapter in plugin_adapters:
logging.info("Plug-in '" + plugin_adapter.module_name + "' NOT loaded (requirements) (" + plugin_adapter.module_path + ").")
notify_modules("run")
|
Load plug-ins.
|
entailment
|
def getMyID(self,gist_name):
'''
Getting gistID of a gist in order to make the workflow
easy and uninterrupted.
'''
r = requests.get(
'%s'%BASE_URL+'/users/%s/gists' % self.user,
headers=self.gist.header
)
if (r.status_code == 200):
r_text = json.loads(r.text)
limit = len(r.json())
for g,no in zip(r_text, range(0,limit)):
for ka,va in r.json()[no]['files'].iteritems():
if str(va['filename']) == str(gist_name):
return r.json()[no]['id']
return 0
raise Exception('Username not found')
|
Getting gistID of a gist in order to make the workflow
easy and uninterrupted.
|
entailment
|
def close(self):
"""Close the document controller.
This method must be called to shut down the document controller. There are several
paths by which it can be called, though.
* User quits application via menu item. The menu item will call back to Application.exit which will close each
document controller by calling this method.
* User quits application using dock menu item. The Qt application will call aboutToClose in the document windows
* User closes document window via menu item.
* User closes document window via close box.
The main concept of closing is that it is always triggered by the document window closing. This can be initiated
from within Python by calling request_close on the document window. When the window closes, either by explicit request
or by the user clicking a close box, it will invoke the about_to_close method on the document window. At this point,
the window would still be open, so the about_to_close message can be used to tell the document controller to save anything
it needs to save and prepare for closing.
"""
assert self.__closed == False
self.__closed = True
self.finish_periodic() # required to finish periodic operations during tests
# dialogs
for weak_dialog in self.__dialogs:
dialog = weak_dialog()
if dialog:
try:
dialog.request_close()
except Exception as e:
pass
# menus
self._file_menu = None
self._edit_menu = None
self._processing_menu = None
self._view_menu = None
self._window_menu = None
self._help_menu = None
self._library_menu = None
self._processing_arithmetic_menu = None
self._processing_reduce_menu = None
self._processing_transform_menu = None
self._processing_filter_menu = None
self._processing_fourier_menu = None
self._processing_graphics_menu = None
self._processing_sequence_menu = None
self._processing_redimension_menu = None
self._display_type_menu = None
if self.__workspace_controller:
self.__workspace_controller.close()
self.__workspace_controller = None
self.__call_soon_event_listener.close()
self.__call_soon_event_listener = None
self.__filtered_display_items_model.close()
self.__filtered_display_items_model = None
self.filter_controller.close()
self.filter_controller = None
self.__display_items_model.close()
self.__display_items_model = None
# document_model may be shared between several DocumentControllers, so use reference counting
# to determine when to close it.
self.document_model.remove_ref()
self.document_model = None
self.did_close_event.fire(self)
self.did_close_event = None
super().close()
|
Close the document controller.
This method must be called to shut down the document controller. There are several
paths by which it can be called, though.
* User quits application via menu item. The menu item will call back to Application.exit which will close each
document controller by calling this method.
* User quits application using dock menu item. The Qt application will call aboutToClose in the document windows
* User closes document window via menu item.
* User closes document window via close box.
The main concept of closing is that it is always triggered by the document window closing. This can be initiated
from within Python by calling request_close on the document window. When the window closes, either by explicit request
or by the user clicking a close box, it will invoke the about_to_close method on the document window. At this point,
the window would still be open, so the about_to_close message can be used to tell the document controller to save anything
it needs to save and prepare for closing.
|
entailment
|
def add_periodic(self, interval: float, listener_fn):
"""Add a listener function and return listener token. Token can be closed or deleted to unlisten."""
class PeriodicListener:
def __init__(self, interval: float, listener_fn):
self.interval = interval
self.__listener_fn = listener_fn
# the call function is very performance critical; make it fast by using a property
# instead of a logic statement each time.
if callable(listener_fn):
self.call = self.__listener_fn
else:
def void(*args, **kwargs):
pass
self.call = void
self.next_scheduled_time = time.time() + interval
def close(self):
self.__listener_fn = None
def void(*args, **kwargs):
pass
self.call = void
listener = PeriodicListener(interval, listener_fn)
def remove_listener(weak_listener):
with self.__weak_periodic_listeners_mutex:
self.__weak_periodic_listeners.remove(weak_listener)
weak_listener = weakref.ref(listener, remove_listener)
with self.__weak_periodic_listeners_mutex:
self.__weak_periodic_listeners.append(weak_listener)
return listener
|
Add a listener function and return listener token. Token can be closed or deleted to unlisten.
|
entailment
|
def __update_display_items_model(self, display_items_model: ListModel.FilteredListModel, data_group: typing.Optional[DataGroup.DataGroup], filter_id: typing.Optional[str]) -> None:
"""Update the data item model with a new container, filter, and sorting.
This is called when the data item model is created or when the user changes
the data group or sorting settings.
"""
with display_items_model.changes(): # change filter and sort together
if data_group is not None:
display_items_model.container = data_group
display_items_model.filter = ListModel.Filter(True)
display_items_model.sort_key = None
display_items_model.filter_id = None
elif filter_id == "latest-session":
display_items_model.container = self.document_model
display_items_model.filter = ListModel.EqFilter("session_id", self.document_model.session_id)
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = filter_id
elif filter_id == "temporary":
display_items_model.container = self.document_model
display_items_model.filter = ListModel.NotEqFilter("category", "persistent")
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = filter_id
elif filter_id == "none": # not intended to be used directly
display_items_model.container = self.document_model
display_items_model.filter = ListModel.Filter(False)
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = filter_id
else: # "all"
display_items_model.container = self.document_model
display_items_model.filter = ListModel.EqFilter("category", "persistent")
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = None
|
Update the data item model with a new container, filter, and sorting.
This is called when the data item model is created or when the user changes
the data group or sorting settings.
|
entailment
|
def focused_data_item(self) -> typing.Optional[DataItem.DataItem]:
"""Return the data item with keyboard focus."""
return self.__focused_display_item.data_item if self.__focused_display_item else None
|
Return the data item with keyboard focus.
|
entailment
|
def selected_display_item(self) -> typing.Optional[DisplayItem.DisplayItem]:
"""Return the selected display item.
The selected display is the display ite that has keyboard focus in the data panel or a display panel.
"""
# first check for the [focused] data browser
display_item = self.focused_display_item
if not display_item:
selected_display_panel = self.selected_display_panel
display_item = selected_display_panel.display_item if selected_display_panel else None
return display_item
|
Return the selected display item.
The selected display is the display ite that has keyboard focus in the data panel or a display panel.
|
entailment
|
def _get_two_data_sources(self):
"""Get two sensible data sources, which may be the same."""
selected_display_items = self.selected_display_items
if len(selected_display_items) < 2:
selected_display_items = list()
display_item = self.selected_display_item
if display_item:
selected_display_items.append(display_item)
if len(selected_display_items) == 1:
display_item = selected_display_items[0]
data_item = display_item.data_item if display_item else None
if display_item and len(display_item.graphic_selection.indexes) == 2:
index1 = display_item.graphic_selection.anchor_index
index2 = list(display_item.graphic_selection.indexes.difference({index1}))[0]
graphic1 = display_item.graphics[index1]
graphic2 = display_item.graphics[index2]
if data_item:
if data_item.is_datum_1d and isinstance(graphic1, Graphics.IntervalGraphic) and isinstance(graphic2, Graphics.IntervalGraphic):
crop_graphic1 = graphic1
crop_graphic2 = graphic2
elif data_item.is_datum_2d and isinstance(graphic1, Graphics.RectangleTypeGraphic) and isinstance(graphic2, Graphics.RectangleTypeGraphic):
crop_graphic1 = graphic1
crop_graphic2 = graphic2
else:
crop_graphic1 = self.__get_crop_graphic(display_item)
crop_graphic2 = crop_graphic1
else:
crop_graphic1 = self.__get_crop_graphic(display_item)
crop_graphic2 = crop_graphic1
else:
crop_graphic1 = self.__get_crop_graphic(display_item)
crop_graphic2 = crop_graphic1
return (display_item, crop_graphic1), (display_item, crop_graphic2)
if len(selected_display_items) == 2:
display_item1 = selected_display_items[0]
crop_graphic1 = self.__get_crop_graphic(display_item1)
display_item2 = selected_display_items[1]
crop_graphic2 = self.__get_crop_graphic(display_item2)
return (display_item1, crop_graphic1), (display_item2, crop_graphic2)
return None
|
Get two sensible data sources, which may be the same.
|
entailment
|
def calculate_origin_and_size(canvas_size, data_shape, image_canvas_mode, image_zoom, image_position) -> typing.Tuple[typing.Any, typing.Any]:
"""Calculate origin and size for canvas size, data shape, and image display parameters."""
if data_shape is None:
return None, None
if image_canvas_mode == "fill":
data_shape = data_shape
scale_h = float(data_shape[1]) / canvas_size[1]
scale_v = float(data_shape[0]) / canvas_size[0]
if scale_v < scale_h:
image_canvas_size = (canvas_size[0], canvas_size[0] * data_shape[1] / data_shape[0])
else:
image_canvas_size = (canvas_size[1] * data_shape[0] / data_shape[1], canvas_size[1])
image_canvas_origin = (canvas_size[0] * 0.5 - image_canvas_size[0] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5)
elif image_canvas_mode == "fit":
image_canvas_size = canvas_size
image_canvas_origin = (0, 0)
elif image_canvas_mode == "1:1":
image_canvas_size = data_shape
image_canvas_origin = (canvas_size[0] * 0.5 - image_canvas_size[0] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5)
elif image_canvas_mode == "2:1":
image_canvas_size = (data_shape[0] * 0.5, data_shape[1] * 0.5)
image_canvas_origin = (canvas_size[0] * 0.5 - image_canvas_size[0] * 0.5, canvas_size[1] * 0.5 - image_canvas_size[1] * 0.5)
else:
image_canvas_size = (canvas_size[0] * image_zoom, canvas_size[1] * image_zoom)
canvas_rect = Geometry.fit_to_size(((0, 0), image_canvas_size), data_shape)
image_canvas_origin_y = (canvas_size[0] * 0.5) - image_position[0] * canvas_rect[1][0] - canvas_rect[0][0]
image_canvas_origin_x = (canvas_size[1] * 0.5) - image_position[1] * canvas_rect[1][1] - canvas_rect[0][1]
image_canvas_origin = (image_canvas_origin_y, image_canvas_origin_x)
return image_canvas_origin, image_canvas_size
|
Calculate origin and size for canvas size, data shape, and image display parameters.
|
entailment
|
def read_library(persistent_storage_system, ignore_older_files) -> typing.Dict:
"""Read data items from the data reference handler and return as a list.
Data items will have persistent_object_context set upon return, but caller will need to call finish_reading
on each of the data items.
"""
data_item_uuids = set()
utilized_deletions = set() # the uuid's skipped due to being deleted
deletions = list()
reader_info_list, library_updates = auto_migrate_storage_system(persistent_storage_system=persistent_storage_system,
new_persistent_storage_system=persistent_storage_system,
data_item_uuids=data_item_uuids,
deletions=deletions,
utilized_deletions=utilized_deletions,
ignore_older_files=ignore_older_files)
# next, for each auto migration, create a temporary storage system and read items from that storage system
# using auto_migrate_storage_system. the data items returned will have been copied to the current storage
# system (persistent object context).
for auto_migration in reversed(persistent_storage_system.get_auto_migrations()):
old_persistent_storage_system = FileStorageSystem(auto_migration.library_path, auto_migration.paths) if auto_migration.paths else auto_migration.storage_system
new_reader_info_list, new_library_updates = auto_migrate_storage_system(persistent_storage_system=old_persistent_storage_system,
new_persistent_storage_system=persistent_storage_system,
data_item_uuids=data_item_uuids,
deletions=deletions,
utilized_deletions=utilized_deletions,
ignore_older_files=ignore_older_files)
reader_info_list.extend(new_reader_info_list)
library_updates.update(new_library_updates)
assert len(reader_info_list) == len(data_item_uuids)
library_storage_properties = persistent_storage_system.library_storage_properties
for reader_info in reader_info_list:
properties = reader_info.properties
properties = Utility.clean_dict(copy.deepcopy(properties) if properties else dict())
version = properties.get("version", 0)
if version == DataItem.DataItem.writer_version:
data_item_uuid = uuid.UUID(properties.get("uuid", uuid.uuid4()))
library_update = library_updates.get(data_item_uuid, dict())
library_storage_properties.setdefault("connections", list()).extend(library_update.get("connections", list()))
library_storage_properties.setdefault("computations", list()).extend(library_update.get("computations", list()))
library_storage_properties.setdefault("display_items", list()).extend(library_update.get("display_items", list()))
# mark deletions that need to be tracked because they've been deleted but are also present in older libraries
# and would be migrated during reading unless they explicitly are prevented from doing so (via data_item_deletions).
# utilized deletions are the ones that were attempted; if nothing was attempted, then no reason to track it anymore
# since there is nothing to migrate in the future.
library_storage_properties["data_item_deletions"] = [str(uuid_) for uuid_ in utilized_deletions]
connections_list = library_storage_properties.get("connections", list())
assert len(connections_list) == len({connection.get("uuid") for connection in connections_list})
computations_list = library_storage_properties.get("computations", list())
assert len(computations_list) == len({computation.get("uuid") for computation in computations_list})
# migrations
if library_storage_properties.get("version", 0) < 2:
for data_group_properties in library_storage_properties.get("data_groups", list()):
data_group_properties.pop("data_groups")
display_item_references = data_group_properties.setdefault("display_item_references", list())
data_item_uuid_strs = data_group_properties.pop("data_item_uuids", list())
for data_item_uuid_str in data_item_uuid_strs:
for display_item_properties in library_storage_properties.get("display_items", list()):
data_item_references = [d.get("data_item_reference", None) for d in display_item_properties.get("display_data_channels", list())]
if data_item_uuid_str in data_item_references:
display_item_references.append(display_item_properties["uuid"])
data_item_uuid_to_display_item_uuid_map = dict()
data_item_uuid_to_display_item_dict_map = dict()
display_to_display_item_map = dict()
display_to_display_data_channel_map = dict()
for display_item_properties in library_storage_properties.get("display_items", list()):
display_to_display_item_map[display_item_properties["display"]["uuid"]] = display_item_properties["uuid"]
display_to_display_data_channel_map[display_item_properties["display"]["uuid"]] = display_item_properties["display_data_channels"][0]["uuid"]
data_item_references = [d.get("data_item_reference", None) for d in display_item_properties.get("display_data_channels", list())]
for data_item_uuid_str in data_item_references:
data_item_uuid_to_display_item_uuid_map.setdefault(data_item_uuid_str, display_item_properties["uuid"])
data_item_uuid_to_display_item_dict_map.setdefault(data_item_uuid_str, display_item_properties)
display_item_properties.pop("display", None)
for workspace_properties in library_storage_properties.get("workspaces", list()):
def replace1(d):
if "children" in d:
for dd in d["children"]:
replace1(dd)
if "data_item_uuid" in d:
data_item_uuid_str = d.pop("data_item_uuid")
display_item_uuid_str = data_item_uuid_to_display_item_uuid_map.get(data_item_uuid_str)
if display_item_uuid_str:
d["display_item_uuid"] = display_item_uuid_str
replace1(workspace_properties["layout"])
for connection_dict in library_storage_properties.get("connections", list()):
source_uuid_str = connection_dict["source_uuid"]
if connection_dict["type"] == "interval-list-connection":
connection_dict["source_uuid"] = display_to_display_item_map.get(source_uuid_str, None)
if connection_dict["type"] == "property-connection" and connection_dict["source_property"] == "slice_interval":
connection_dict["source_uuid"] = display_to_display_data_channel_map.get(source_uuid_str, None)
def fix_specifier(specifier_dict):
if specifier_dict.get("type") in ("data_item", "display_xdata", "cropped_xdata", "cropped_display_xdata", "filter_xdata", "filtered_xdata"):
if specifier_dict.get("uuid") in data_item_uuid_to_display_item_dict_map:
specifier_dict["uuid"] = data_item_uuid_to_display_item_dict_map[specifier_dict["uuid"]]["display_data_channels"][0]["uuid"]
else:
specifier_dict.pop("uuid", None)
if specifier_dict.get("type") == "data_item":
specifier_dict["type"] = "data_source"
if specifier_dict.get("type") == "data_item_object":
specifier_dict["type"] = "data_item"
if specifier_dict.get("type") == "region":
specifier_dict["type"] = "graphic"
for computation_dict in library_storage_properties.get("computations", list()):
for variable_dict in computation_dict.get("variables", list()):
if "specifier" in variable_dict:
specifier_dict = variable_dict["specifier"]
if specifier_dict is not None:
fix_specifier(specifier_dict)
if "secondary_specifier" in variable_dict:
specifier_dict = variable_dict["secondary_specifier"]
if specifier_dict is not None:
fix_specifier(specifier_dict)
for result_dict in computation_dict.get("results", list()):
fix_specifier(result_dict["specifier"])
library_storage_properties["version"] = DocumentModel.DocumentModel.library_version
# TODO: add consistency checks: no duplicated items [by uuid] such as connections or computations or data items
assert library_storage_properties["version"] == DocumentModel.DocumentModel.library_version
persistent_storage_system.rewrite_properties(library_storage_properties)
properties = copy.deepcopy(library_storage_properties)
for reader_info in reader_info_list:
data_item_properties = Utility.clean_dict(reader_info.properties if reader_info.properties else dict())
if data_item_properties.get("version", 0) == DataItem.DataItem.writer_version:
data_item_properties["__large_format"] = reader_info.large_format
data_item_properties["__identifier"] = reader_info.identifier
properties.setdefault("data_items", list()).append(data_item_properties)
def data_item_created(data_item_properties: typing.Mapping) -> str:
return data_item_properties.get("created", "1900-01-01T00:00:00.000000")
properties["data_items"] = sorted(properties.get("data_items", list()), key=data_item_created)
return properties
|
Read data items from the data reference handler and return as a list.
Data items will have persistent_object_context set upon return, but caller will need to call finish_reading
on each of the data items.
|
entailment
|
def auto_migrate_storage_system(*, persistent_storage_system=None, new_persistent_storage_system=None, data_item_uuids=None, deletions: typing.List[uuid.UUID] = None, utilized_deletions: typing.Set[uuid.UUID] = None, ignore_older_files: bool = True):
"""Migrate items from the storage system to the object context.
Files in data_item_uuids have already been loaded and are ignored (not migrated).
Files in deletes have been deleted in object context and are ignored (not migrated) and then added
to the utilized deletions list.
Data items will have persistent_object_context set upon return, but caller will need to call finish_reading
on each of the data items.
"""
storage_handlers = persistent_storage_system.find_data_items()
ReaderInfo = collections.namedtuple("ReaderInfo", ["properties", "changed_ref", "large_format", "storage_handler", "identifier"])
reader_info_list = list()
for storage_handler in storage_handlers:
try:
large_format = isinstance(storage_handler, HDF5Handler.HDF5Handler)
properties = Migration.transform_to_latest(storage_handler.read_properties())
reader_info = ReaderInfo(properties, [False], large_format, storage_handler, storage_handler.reference)
reader_info_list.append(reader_info)
except Exception as e:
logging.debug("Error reading %s", storage_handler.reference)
import traceback
traceback.print_exc()
traceback.print_stack()
library_storage_properties = persistent_storage_system.library_storage_properties
for deletion in copy.deepcopy(library_storage_properties.get("data_item_deletions", list())):
if not deletion in deletions:
deletions.append(deletion)
preliminary_library_updates = dict()
library_updates = dict()
if not ignore_older_files:
Migration.migrate_to_latest(reader_info_list, preliminary_library_updates)
good_reader_info_list = list()
count = len(reader_info_list)
for index, reader_info in enumerate(reader_info_list):
storage_handler = reader_info.storage_handler
properties = reader_info.properties
try:
version = properties.get("version", 0)
if version == DataItem.DataItem.writer_version:
data_item_uuid = uuid.UUID(properties["uuid"])
if not data_item_uuid in data_item_uuids:
if str(data_item_uuid) in deletions:
utilized_deletions.add(data_item_uuid)
else:
auto_migrate_data_item(reader_info, persistent_storage_system, new_persistent_storage_system, index, count)
good_reader_info_list.append(reader_info)
data_item_uuids.add(data_item_uuid)
library_update = preliminary_library_updates.get(data_item_uuid)
if library_update:
library_updates[data_item_uuid] = library_update
except Exception as e:
logging.debug("Error reading %s", storage_handler.reference)
import traceback
traceback.print_exc()
traceback.print_stack()
return good_reader_info_list, library_updates
|
Migrate items from the storage system to the object context.
Files in data_item_uuids have already been loaded and are ignored (not migrated).
Files in deletes have been deleted in object context and are ignored (not migrated) and then added
to the utilized deletions list.
Data items will have persistent_object_context set upon return, but caller will need to call finish_reading
on each of the data items.
|
entailment
|
def rewrite_properties(self, properties):
"""Set the properties and write to disk."""
with self.__properties_lock:
self.__properties = properties
self.__write_properties(None)
|
Set the properties and write to disk.
|
entailment
|
def analisar(retorno):
"""Constrói uma :class:`RespostaAtivarSAT` a partir do retorno
informado.
:param unicode retorno: Retorno da função ``AtivarSAT``.
"""
resposta = analisar_retorno(forcar_unicode(retorno),
funcao='AtivarSAT',
classe_resposta=RespostaAtivarSAT,
campos=(
('numeroSessao', int),
('EEEEE', unicode),
('mensagem', unicode),
('cod', unicode),
('mensagemSEFAZ', unicode),
('CSR', unicode),
),
campos_alternativos=[
# se a ativação falhar espera-se o padrão de campos
# no retorno...
RespostaSAT.CAMPOS,
]
)
if resposta.EEEEE not in (
ATIVADO_CORRETAMENTE,
CSR_ICPBRASIL_CRIADO_SUCESSO,):
raise ExcecaoRespostaSAT(resposta)
return resposta
|
Constrói uma :class:`RespostaAtivarSAT` a partir do retorno
informado.
:param unicode retorno: Retorno da função ``AtivarSAT``.
|
entailment
|
def create_view_task(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None, buffer_size: int=1) -> ViewTask:
"""Create a view task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the view. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the view. Pass None for defaults.
:type channels_enabled: List of booleans.
:param buffer_size: The buffer size if using the grab_earliest method. Default is 1.
:type buffer_size: int
:return: The :py:class:`ViewTask` object.
:rtype: :py:class:`ViewTask`
Callers should call close on the returned task when finished.
See :py:class:`ViewTask` for examples of how to use.
"""
...
|
Create a view task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the view. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the view. Pass None for defaults.
:type channels_enabled: List of booleans.
:param buffer_size: The buffer size if using the grab_earliest method. Default is 1.
:type buffer_size: int
:return: The :py:class:`ViewTask` object.
:rtype: :py:class:`ViewTask`
Callers should call close on the returned task when finished.
See :py:class:`ViewTask` for examples of how to use.
|
entailment
|
def from_yaml():
""" Load configuration from yaml source(s), cached to only run once """
default_yaml_str = snippets.get_snippet_content('hatchery.yml')
ret = yaml.load(default_yaml_str, Loader=yaml.RoundTripLoader)
for config_path in CONFIG_LOCATIONS:
config_path = os.path.expanduser(config_path)
if os.path.isfile(config_path):
with open(config_path) as config_file:
config_dict = yaml.load(config_file, Loader=yaml.RoundTripLoader)
if config_dict is None:
continue
for k, v in config_dict.items():
if k not in ret.keys():
raise ConfigError(
'found garbage key "{}" in {}'.format(k, config_path)
)
ret[k] = v
return ret
|
Load configuration from yaml source(s), cached to only run once
|
entailment
|
def from_pypirc(pypi_repository):
""" Load configuration from .pypirc file, cached to only run once """
ret = {}
pypirc_locations = PYPIRC_LOCATIONS
for pypirc_path in pypirc_locations:
pypirc_path = os.path.expanduser(pypirc_path)
if os.path.isfile(pypirc_path):
parser = configparser.SafeConfigParser()
parser.read(pypirc_path)
if 'distutils' not in parser.sections():
continue
if 'index-servers' not in parser.options('distutils'):
continue
if pypi_repository not in parser.get('distutils', 'index-servers'):
continue
if pypi_repository in parser.sections():
for option in parser.options(pypi_repository):
ret[option] = parser.get(pypi_repository, option)
if not ret:
raise ConfigError(
'repository does not appear to be configured in pypirc ({})'.format(pypi_repository) +
', remember that it needs an entry in [distutils] and its own section'
)
return ret
|
Load configuration from .pypirc file, cached to only run once
|
entailment
|
def pypirc_temp(index_url):
""" Create a temporary pypirc file for interaction with twine """
pypirc_file = tempfile.NamedTemporaryFile(suffix='.pypirc', delete=False)
print(pypirc_file.name)
with open(pypirc_file.name, 'w') as fh:
fh.write(PYPIRC_TEMPLATE.format(index_name=PYPIRC_TEMP_INDEX_NAME, index_url=index_url))
return pypirc_file.name
|
Create a temporary pypirc file for interaction with twine
|
entailment
|
def get_api(version: str, ui_version: str=None) -> API_1:
"""Get a versioned interface matching the given version and ui_version.
version is a string in the form "1.0.2".
"""
ui_version = ui_version if ui_version else "~1.0"
return _get_api_with_app(version, ui_version, ApplicationModule.app)
|
Get a versioned interface matching the given version and ui_version.
version is a string in the form "1.0.2".
|
entailment
|
def mask_xdata_with_shape(self, shape: DataAndMetadata.ShapeType) -> DataAndMetadata.DataAndMetadata:
"""Return the mask created by this graphic as extended data.
.. versionadded:: 1.0
Scriptable: Yes
"""
mask = self._graphic.get_mask(shape)
return DataAndMetadata.DataAndMetadata.from_data(mask)
|
Return the mask created by this graphic as extended data.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def end(self, value: typing.Union[float, NormPointType]) -> None:
"""Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line."""
self.set_property("end", value)
|
Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line.
|
entailment
|
def start(self, value: typing.Union[float, NormPointType]) -> None:
"""Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line."""
self.set_property("start", value)
|
Set the end property in relative coordinates.
End may be a float when graphic is an Interval or a tuple (y, x) when graphic is a Line.
|
entailment
|
def data(self, data: numpy.ndarray) -> None:
"""Set the data.
:param data: A numpy ndarray.
.. versionadded:: 1.0
Scriptable: Yes
"""
self.__data_item.set_data(numpy.copy(data))
|
Set the data.
:param data: A numpy ndarray.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def display_xdata(self) -> DataAndMetadata.DataAndMetadata:
"""Return the extended data of this data item display.
Display data will always be 1d or 2d and either int, float, or RGB data type.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_data_channel = self.__display_item.display_data_channel
return display_data_channel.get_calculated_display_values(True).display_data_and_metadata
|
Return the extended data of this data item display.
Display data will always be 1d or 2d and either int, float, or RGB data type.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def set_dimensional_calibrations(self, dimensional_calibrations: typing.List[CalibrationModule.Calibration]) -> None:
"""Set the dimensional calibrations.
:param dimensional_calibrations: A list of calibrations, must match the dimensions of the data.
.. versionadded:: 1.0
Scriptable: Yes
"""
self.__data_item.set_dimensional_calibrations(dimensional_calibrations)
|
Set the dimensional calibrations.
:param dimensional_calibrations: A list of calibrations, must match the dimensions of the data.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def get_metadata_value(self, key: str) -> typing.Any:
"""Get the metadata value for the given key.
There are a set of predefined keys that, when used, will be type checked and be interoperable with other
applications. Please consult reference documentation for valid keys.
If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed
by the predefined keys. e.g. 'session.instrument' or 'camera.binning'.
Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer
using the ``metadata_value`` methods over directly accessing ``metadata``.
.. versionadded:: 1.0
Scriptable: Yes
"""
return self._data_item.get_metadata_value(key)
|
Get the metadata value for the given key.
There are a set of predefined keys that, when used, will be type checked and be interoperable with other
applications. Please consult reference documentation for valid keys.
If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed
by the predefined keys. e.g. 'session.instrument' or 'camera.binning'.
Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer
using the ``metadata_value`` methods over directly accessing ``metadata``.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def set_metadata_value(self, key: str, value: typing.Any) -> None:
"""Set the metadata value for the given key.
There are a set of predefined keys that, when used, will be type checked and be interoperable with other
applications. Please consult reference documentation for valid keys.
If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed
by the predefined keys. e.g. 'session.instrument' or 'camera.binning'.
Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer
using the ``metadata_value`` methods over directly accessing ``metadata``.
.. versionadded:: 1.0
Scriptable: Yes
"""
self._data_item.set_metadata_value(key, value)
|
Set the metadata value for the given key.
There are a set of predefined keys that, when used, will be type checked and be interoperable with other
applications. Please consult reference documentation for valid keys.
If using a custom key, we recommend structuring your keys in the '<group>.<attribute>' format followed
by the predefined keys. e.g. 'session.instrument' or 'camera.binning'.
Also note that some predefined keys map to the metadata ``dict`` but others do not. For this reason, prefer
using the ``metadata_value`` methods over directly accessing ``metadata``.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def graphics(self) -> typing.List[Graphic]:
"""Return the graphics attached to this data item.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [Graphic(graphic) for graphic in self.__display_item.graphics]
|
Return the graphics attached to this data item.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def add_point_region(self, y: float, x: float) -> Graphic:
"""Add a point graphic to the data item.
:param x: The x coordinate, in relative units [0.0, 1.0]
:param y: The y coordinate, in relative units [0.0, 1.0]
:return: The :py:class:`nion.swift.Facade.Graphic` object that was added.
.. versionadded:: 1.0
Scriptable: Yes
"""
graphic = Graphics.PointGraphic()
graphic.position = Geometry.FloatPoint(y, x)
self.__display_item.add_graphic(graphic)
return Graphic(graphic)
|
Add a point graphic to the data item.
:param x: The x coordinate, in relative units [0.0, 1.0]
:param y: The y coordinate, in relative units [0.0, 1.0]
:return: The :py:class:`nion.swift.Facade.Graphic` object that was added.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def mask_xdata(self) -> DataAndMetadata.DataAndMetadata:
"""Return the mask by combining any mask graphics on this data item as extended data.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_data_channel = self.__display_item.display_data_channel
shape = display_data_channel.display_data_shape
mask = numpy.zeros(shape)
for graphic in self.__display_item.graphics:
if isinstance(graphic, (Graphics.SpotGraphic, Graphics.WedgeGraphic, Graphics.RingGraphic, Graphics.LatticeGraphic)):
mask = numpy.logical_or(mask, graphic.get_mask(shape))
return DataAndMetadata.DataAndMetadata.from_data(mask)
|
Return the mask by combining any mask graphics on this data item as extended data.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def data_item(self) -> DataItem:
"""Return the data item associated with this display panel.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_panel = self.__display_panel
if not display_panel:
return None
data_item = display_panel.data_item
return DataItem(data_item) if data_item else None
|
Return the data item associated with this display panel.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def set_data_item(self, data_item: DataItem) -> None:
"""Set the data item associated with this display panel.
:param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add.
This will replace whatever data item, browser, or controller is currently in the display panel with the single
data item.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_panel = self.__display_panel
if display_panel:
display_item = data_item._data_item.container.get_display_item_for_data_item(data_item._data_item) if data_item._data_item.container else None
display_panel.set_display_panel_display_item(display_item)
|
Set the data item associated with this display panel.
:param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add.
This will replace whatever data item, browser, or controller is currently in the display panel with the single
data item.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def add_data_item(self, data_item: DataItem) -> None:
"""Add a data item to the group.
:param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add.
.. versionadded:: 1.0
Scriptable: Yes
"""
display_item = data_item._data_item.container.get_display_item_for_data_item(data_item._data_item) if data_item._data_item.container else None
if display_item:
self.__data_group.append_display_item(display_item)
|
Add a data item to the group.
:param data_item: The :py:class:`nion.swift.Facade.DataItem` object to add.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def close(self) -> None:
"""Close the task.
.. versionadded:: 1.0
This method must be called when the task is no longer needed.
"""
self.__data_channel_buffer.stop()
self.__data_channel_buffer.close()
self.__data_channel_buffer = None
if not self.__was_playing:
self.__hardware_source.stop_playing()
|
Close the task.
.. versionadded:: 1.0
This method must be called when the task is no longer needed.
|
entailment
|
def record(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None, timeout: float=None) -> typing.List[DataAndMetadata.DataAndMetadata]:
"""Record data and return a list of data_and_metadata objects.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the record. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the record. Pass None for defaults.
:type channels_enabled: List of booleans.
:param timeout: The timeout in seconds. Pass None to use default.
:return: The list of data and metadata items that were read.
:rtype: list of :py:class:`DataAndMetadata`
"""
if frame_parameters:
self.__hardware_source.set_record_frame_parameters(self.__hardware_source.get_frame_parameters_from_dict(frame_parameters))
if channels_enabled is not None:
for channel_index, channel_enabled in enumerate(channels_enabled):
self.__hardware_source.set_channel_enabled(channel_index, channel_enabled)
self.__hardware_source.start_recording()
return self.__hardware_source.get_next_xdatas_to_finish(timeout)
|
Record data and return a list of data_and_metadata objects.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the record. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the record. Pass None for defaults.
:type channels_enabled: List of booleans.
:param timeout: The timeout in seconds. Pass None to use default.
:return: The list of data and metadata items that were read.
:rtype: list of :py:class:`DataAndMetadata`
|
entailment
|
def create_record_task(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None) -> RecordTask:
"""Create a record task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the record. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the record. Pass None for defaults.
:type channels_enabled: List of booleans.
:return: The :py:class:`RecordTask` object.
:rtype: :py:class:`RecordTask`
Callers should call close on the returned task when finished.
See :py:class:`RecordTask` for examples of how to use.
"""
return RecordTask(self.__hardware_source, frame_parameters, channels_enabled)
|
Create a record task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the record. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the record. Pass None for defaults.
:type channels_enabled: List of booleans.
:return: The :py:class:`RecordTask` object.
:rtype: :py:class:`RecordTask`
Callers should call close on the returned task when finished.
See :py:class:`RecordTask` for examples of how to use.
|
entailment
|
def create_view_task(self, frame_parameters: dict=None, channels_enabled: typing.List[bool]=None, buffer_size: int=1) -> ViewTask:
"""Create a view task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the view. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the view. Pass None for defaults.
:type channels_enabled: List of booleans.
:param buffer_size: The buffer size if using the grab_earliest method. Default is 1.
:type buffer_size: int
:return: The :py:class:`ViewTask` object.
:rtype: :py:class:`ViewTask`
Callers should call close on the returned task when finished.
See :py:class:`ViewTask` for examples of how to use.
"""
return ViewTask(self.__hardware_source, frame_parameters, channels_enabled, buffer_size)
|
Create a view task for this hardware source.
.. versionadded:: 1.0
:param frame_parameters: The frame parameters for the view. Pass None for defaults.
:type frame_parameters: :py:class:`FrameParameters`
:param channels_enabled: The enabled channels for the view. Pass None for defaults.
:type channels_enabled: List of booleans.
:param buffer_size: The buffer size if using the grab_earliest method. Default is 1.
:type buffer_size: int
:return: The :py:class:`ViewTask` object.
:rtype: :py:class:`ViewTask`
Callers should call close on the returned task when finished.
See :py:class:`ViewTask` for examples of how to use.
|
entailment
|
def grab_next_to_finish(self, timeout: float=None) -> typing.List[DataAndMetadata.DataAndMetadata]:
"""Grabs the next frame to finish and returns it as data and metadata.
.. versionadded:: 1.0
:param timeout: The timeout in seconds. Pass None to use default.
:return: The list of data and metadata items that were read.
:rtype: list of :py:class:`DataAndMetadata`
If the view is not already started, it will be started automatically.
Scriptable: Yes
"""
self.start_playing()
return self.__hardware_source.get_next_xdatas_to_finish(timeout)
|
Grabs the next frame to finish and returns it as data and metadata.
.. versionadded:: 1.0
:param timeout: The timeout in seconds. Pass None to use default.
:return: The list of data and metadata items that were read.
:rtype: list of :py:class:`DataAndMetadata`
If the view is not already started, it will be started automatically.
Scriptable: Yes
|
entailment
|
def set_control_output(self, name: str, value: float, *, options: dict=None) -> None:
"""Set the value of a control asynchronously.
:param name: The name of the control (string).
:param value: The control value (float).
:param options: A dict of custom options to pass to the instrument for setting the value.
Options are:
value_type: local, delta, output. output is default.
confirm, confirm_tolerance_factor, confirm_timeout: confirm value gets set.
inform: True to keep dependent control outputs constant by adjusting their internal values. False is
default.
Default value of confirm is False.
Default confirm_tolerance_factor is 1.0. A value of 1.0 is the nominal tolerance for that control. Passing a
higher tolerance factor (for example 1.5) will increase the permitted error margin and passing lower tolerance
factor (for example 0.5) will decrease the permitted error margin and consequently make a timeout more likely.
The tolerance factor value 0.0 is a special value which removes all checking and only waits for any change at
all and then returns.
Default confirm_timeout is 16.0 (seconds).
Raises exception if control with name doesn't exist.
Raises TimeoutException if confirm is True and timeout occurs.
.. versionadded:: 1.0
Scriptable: Yes
"""
self.__instrument.set_control_output(name, value, options)
|
Set the value of a control asynchronously.
:param name: The name of the control (string).
:param value: The control value (float).
:param options: A dict of custom options to pass to the instrument for setting the value.
Options are:
value_type: local, delta, output. output is default.
confirm, confirm_tolerance_factor, confirm_timeout: confirm value gets set.
inform: True to keep dependent control outputs constant by adjusting their internal values. False is
default.
Default value of confirm is False.
Default confirm_tolerance_factor is 1.0. A value of 1.0 is the nominal tolerance for that control. Passing a
higher tolerance factor (for example 1.5) will increase the permitted error margin and passing lower tolerance
factor (for example 0.5) will decrease the permitted error margin and consequently make a timeout more likely.
The tolerance factor value 0.0 is a special value which removes all checking and only waits for any change at
all and then returns.
Default confirm_timeout is 16.0 (seconds).
Raises exception if control with name doesn't exist.
Raises TimeoutException if confirm is True and timeout occurs.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def get_property_as_float(self, name: str) -> float:
"""Return the value of a float property.
:return: The property value (float).
Raises exception if property with name doesn't exist.
.. versionadded:: 1.0
Scriptable: Yes
"""
return float(self.__instrument.get_property(name))
|
Return the value of a float property.
:return: The property value (float).
Raises exception if property with name doesn't exist.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def set_property_as_float(self, name: str, value: float) -> None:
"""Set the value of a float property.
:param name: The name of the property (string).
:param value: The property value (float).
Raises exception if property with name doesn't exist.
.. versionadded:: 1.0
Scriptable: Yes
"""
self.__instrument.set_property(name, float(value))
|
Set the value of a float property.
:param name: The name of the property (string).
:param value: The property value (float).
Raises exception if property with name doesn't exist.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def data_items(self) -> typing.List[DataItem]:
"""Return the list of data items.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DataItem(data_item) for data_item in self.__document_model.data_items]
|
Return the list of data items.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def display_items(self) -> typing.List[Display]:
"""Return the list of display items.
:return: The list of :py:class:`nion.swift.Facade.Display` objects.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [Display(display_item) for display_item in self.__document_model.display_items]
|
Return the list of display items.
:return: The list of :py:class:`nion.swift.Facade.Display` objects.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def get_source_data_items(self, data_item: DataItem) -> typing.List[DataItem]:
"""Return the list of data items that are data sources for the data item.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DataItem(data_item) for data_item in self._document_model.get_source_data_items(data_item._data_item)] if data_item else None
|
Return the list of data items that are data sources for the data item.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def get_dependent_data_items(self, data_item: DataItem) -> typing.List[DataItem]:
"""Return the dependent data items the data item argument.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DataItem(data_item) for data_item in self._document_model.get_dependent_data_items(data_item._data_item)] if data_item else None
|
Return the dependent data items the data item argument.
:return: The list of :py:class:`nion.swift.Facade.DataItem` objects.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def create_data_item(self, title: str=None) -> DataItem:
"""Create an empty data item in the library.
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
"""
data_item = DataItemModule.DataItem()
data_item.ensure_data_source()
if title is not None:
data_item.title = title
self.__document_model.append_data_item(data_item)
return DataItem(data_item)
|
Create an empty data item in the library.
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def create_data_item_from_data(self, data: numpy.ndarray, title: str=None) -> DataItem:
"""Create a data item in the library from an ndarray.
The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay
writing to disk and keep using the data, create an empty data item and use the data item methods to modify
the data.
:param data: The data (ndarray).
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
"""
return self.create_data_item_from_data_and_metadata(DataAndMetadata.DataAndMetadata.from_data(data), title)
|
Create a data item in the library from an ndarray.
The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay
writing to disk and keep using the data, create an empty data item and use the data item methods to modify
the data.
:param data: The data (ndarray).
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def create_data_item_from_data_and_metadata(self, data_and_metadata: DataAndMetadata.DataAndMetadata, title: str=None) -> DataItem:
"""Create a data item in the library from a data and metadata object.
The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay
writing to disk and keep using the data, create an empty data item and use the data item methods to modify
the data.
:param data_and_metadata: The data and metadata.
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
"""
data_item = DataItemModule.new_data_item(data_and_metadata)
if title is not None:
data_item.title = title
self.__document_model.append_data_item(data_item)
return DataItem(data_item)
|
Create a data item in the library from a data and metadata object.
The data for the data item will be written to disk immediately and unloaded from memory. If you wish to delay
writing to disk and keep using the data, create an empty data item and use the data item methods to modify
the data.
:param data_and_metadata: The data and metadata.
:param title: The title of the data item (optional).
:return: The new :py:class:`nion.swift.Facade.DataItem` object.
:rtype: :py:class:`nion.swift.Facade.DataItem`
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def copy_data_item(self, data_item: DataItem) -> DataItem:
"""Copy a data item.
.. versionadded:: 1.0
Scriptable: No
"""
data_item = copy.deepcopy(data_item._data_item)
self.__document_model.append_data_item(data_item)
return DataItem(data_item)
|
Copy a data item.
.. versionadded:: 1.0
Scriptable: No
|
entailment
|
def snapshot_data_item(self, data_item: DataItem) -> DataItem:
"""Snapshot a data item. Similar to copy but with a data snapshot.
.. versionadded:: 1.0
Scriptable: No
"""
data_item = data_item._data_item.snapshot()
self.__document_model.append_data_item(data_item)
return DataItem(data_item)
|
Snapshot a data item. Similar to copy but with a data snapshot.
.. versionadded:: 1.0
Scriptable: No
|
entailment
|
def get_or_create_data_group(self, title: str) -> DataGroup:
"""Get (or create) a data group.
:param title: The title of the data group.
:return: The new :py:class:`nion.swift.Facade.DataGroup` object.
:rtype: :py:class:`nion.swift.Facade.DataGroup`
.. versionadded:: 1.0
Scriptable: Yes
"""
return DataGroup(self.__document_model.get_or_create_data_group(title))
|
Get (or create) a data group.
:param title: The title of the data group.
:return: The new :py:class:`nion.swift.Facade.DataGroup` object.
:rtype: :py:class:`nion.swift.Facade.DataGroup`
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def get_data_item_for_hardware_source(self, hardware_source, channel_id: str=None, processor_id: str=None, create_if_needed: bool=False, large_format: bool=False) -> DataItem:
"""Get the data item associated with hardware source and (optional) channel id and processor_id. Optionally create if missing.
:param hardware_source: The hardware_source.
:param channel_id: The (optional) channel id.
:param processor_id: The (optional) processor id for the channel.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
assert hardware_source is not None
hardware_source_id = hardware_source._hardware_source.hardware_source_id
document_model = self._document_model
data_item_reference_key = document_model.make_data_item_reference_key(hardware_source_id, channel_id, processor_id)
return self.get_data_item_for_reference_key(data_item_reference_key, create_if_needed=create_if_needed, large_format=large_format)
|
Get the data item associated with hardware source and (optional) channel id and processor_id. Optionally create if missing.
:param hardware_source: The hardware_source.
:param channel_id: The (optional) channel id.
:param processor_id: The (optional) processor id for the channel.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
|
entailment
|
def get_data_item_for_reference_key(self, data_item_reference_key: str=None, create_if_needed: bool=False, large_format: bool=False) -> DataItem:
"""Get the data item associated with data item reference key. Optionally create if missing.
:param data_item_reference_key: The data item reference key.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
document_model = self._document_model
data_item_reference = document_model.get_data_item_reference(data_item_reference_key)
data_item = data_item_reference.data_item
if data_item is None and create_if_needed:
data_item = DataItemModule.DataItem(large_format=large_format)
data_item.ensure_data_source()
document_model.append_data_item(data_item)
document_model.setup_channel(data_item_reference_key, data_item)
data_item.session_id = document_model.session_id
data_item = document_model.get_data_item_reference(data_item_reference_key).data_item
return DataItem(data_item) if data_item else None
|
Get the data item associated with data item reference key. Optionally create if missing.
:param data_item_reference_key: The data item reference key.
:param create_if_needed: Whether to create a new data item if none is found.
:return: The associated data item. May be None.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
|
entailment
|
def get_data_item_by_uuid(self, data_item_uuid: uuid_module.UUID) -> DataItem:
"""Get the data item with the given UUID.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
data_item = self._document_model.get_data_item_by_uuid(data_item_uuid)
return DataItem(data_item) if data_item else None
|
Get the data item with the given UUID.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
|
entailment
|
def get_graphic_by_uuid(self, graphic_uuid: uuid_module.UUID) -> Graphic:
"""Get the graphic with the given UUID.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
for display_item in self._document_model.display_items:
for graphic in display_item.graphics:
if graphic.uuid == graphic_uuid:
return Graphic(graphic)
return None
|
Get the graphic with the given UUID.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
|
entailment
|
def has_library_value(self, key: str) -> bool:
"""Return whether the library value for the given key exists.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
"""
desc = Metadata.session_key_map.get(key)
if desc is not None:
field_id = desc['path'][-1]
return bool(getattr(ApplicationData.get_session_metadata_model(), field_id, None))
return False
|
Return whether the library value for the given key exists.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def get_library_value(self, key: str) -> typing.Any:
"""Get the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
"""
desc = Metadata.session_key_map.get(key)
if desc is not None:
field_id = desc['path'][-1]
return getattr(ApplicationData.get_session_metadata_model(), field_id)
raise KeyError()
|
Get the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def set_library_value(self, key: str, value: typing.Any) -> None:
"""Set the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
"""
desc = Metadata.session_key_map.get(key)
if desc is not None:
field_id = desc['path'][-1]
setattr(ApplicationData.get_session_metadata_model(), field_id, value)
return
raise KeyError()
|
Set the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def delete_library_value(self, key: str) -> None:
"""Delete the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
"""
desc = Metadata.session_key_map.get(key)
if desc is not None:
field_id = desc['path'][-1]
setattr(ApplicationData.get_session_metadata_model(), field_id, None)
return
raise KeyError()
|
Delete the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def all_display_panels(self) -> typing.List[DisplayPanel]:
"""Return the list of display panels currently visible.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DisplayPanel(display_panel) for display_panel in self.__document_controller.workspace_controller.display_panels]
|
Return the list of display panels currently visible.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def get_display_panel_by_id(self, identifier: str) -> DisplayPanel:
"""Return display panel with the identifier.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
display_panel = next(
(display_panel for display_panel in self.__document_controller.workspace_controller.display_panels if
display_panel.identifier.lower() == identifier.lower()), None)
return DisplayPanel(display_panel) if display_panel else None
|
Return display panel with the identifier.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
|
entailment
|
def display_data_item(self, data_item: DataItem, source_display_panel=None, source_data_item=None):
"""Display a new data item and gives it keyboard focus. Uses existing display if it is already displayed.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
"""
for display_panel in self.__document_controller.workspace_controller.display_panels:
if display_panel.data_item == data_item._data_item:
display_panel.request_focus()
return DisplayPanel(display_panel)
result_display_panel = self.__document_controller.next_result_display_panel()
if result_display_panel:
display_item = self.__document_controller.document_model.get_display_item_for_data_item(data_item._data_item)
result_display_panel.set_display_panel_display_item(display_item)
result_display_panel.request_focus()
return DisplayPanel(result_display_panel)
return None
|
Display a new data item and gives it keyboard focus. Uses existing display if it is already displayed.
.. versionadded:: 1.0
Status: Provisional
Scriptable: Yes
|
entailment
|
def show_get_string_message_box(self, caption: str, text: str, accepted_fn, rejected_fn=None, accepted_text: str=None, rejected_text: str=None) -> None:
"""Show a dialog box and ask for a string.
Caption describes the user prompt. Text is the initial/default string.
Accepted function must be a function taking one argument which is the resulting text if the user accepts the
message dialog. It will only be called if the user clicks OK.
Rejected function can be a function taking no arguments, called if the user clicks Cancel.
.. versionadded:: 1.0
Scriptable: No
"""
workspace = self.__document_controller.workspace_controller
workspace.pose_get_string_message_box(caption, text, accepted_fn, rejected_fn, accepted_text, rejected_text)
|
Show a dialog box and ask for a string.
Caption describes the user prompt. Text is the initial/default string.
Accepted function must be a function taking one argument which is the resulting text if the user accepts the
message dialog. It will only be called if the user clicks OK.
Rejected function can be a function taking no arguments, called if the user clicks Cancel.
.. versionadded:: 1.0
Scriptable: No
|
entailment
|
def create_data_item_from_data(self, data: numpy.ndarray, title: str=None) -> DataItem:
"""Create a data item in the library from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data` instead.
Scriptable: No
"""
return DataItem(self.__document_controller.add_data(data, title))
|
Create a data item in the library from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data` instead.
Scriptable: No
|
entailment
|
def create_data_item_from_data_and_metadata(self, data_and_metadata: DataAndMetadata.DataAndMetadata, title: str=None) -> DataItem:
"""Create a data item in the library from the data and metadata.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data_and_metadata` instead.
Scriptable: No
"""
data_item = DataItemModule.new_data_item(data_and_metadata)
if title is not None:
data_item.title = title
self.__document_controller.document_model.append_data_item(data_item)
return DataItem(data_item)
|
Create a data item in the library from the data and metadata.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data_and_metadata` instead.
Scriptable: No
|
entailment
|
def get_or_create_data_group(self, title: str) -> DataGroup:
"""Get (or create) a data group.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data` instead.
Scriptable: No
"""
return DataGroup(self.__document_controller.document_model.get_or_create_data_group(title))
|
Get (or create) a data group.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.Library.create_data_item_from_data` instead.
Scriptable: No
|
entailment
|
def document_windows(self) -> typing.List[DocumentWindow]:
"""Return the document windows.
.. versionadded:: 1.0
Scriptable: Yes
"""
return [DocumentWindow(document_controller) for document_controller in self.__application.document_controllers]
|
Return the document windows.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def create_calibration(self, offset: float=None, scale: float=None, units: str=None) -> CalibrationModule.Calibration:
"""Create a calibration object with offset, scale, and units.
:param offset: The offset of the calibration.
:param scale: The scale of the calibration.
:param units: The units of the calibration as a string.
:return: The calibration object.
.. versionadded:: 1.0
Scriptable: Yes
Calibrated units and uncalibrated units have the following relationship:
:samp:`calibrated_value = offset + value * scale`
"""
return CalibrationModule.Calibration(offset, scale, units)
|
Create a calibration object with offset, scale, and units.
:param offset: The offset of the calibration.
:param scale: The scale of the calibration.
:param units: The units of the calibration as a string.
:return: The calibration object.
.. versionadded:: 1.0
Scriptable: Yes
Calibrated units and uncalibrated units have the following relationship:
:samp:`calibrated_value = offset + value * scale`
|
entailment
|
def create_data_descriptor(self, is_sequence: bool, collection_dimension_count: int, datum_dimension_count: int) -> DataAndMetadata.DataDescriptor:
"""Create a data descriptor.
:param is_sequence: whether the descriptor describes a sequence of data.
:param collection_dimension_count: the number of collection dimensions represented by the descriptor.
:param datum_dimension_count: the number of datum dimensions represented by the descriptor.
.. versionadded:: 1.0
Scriptable: Yes
"""
return DataAndMetadata.DataDescriptor(is_sequence, collection_dimension_count, datum_dimension_count)
|
Create a data descriptor.
:param is_sequence: whether the descriptor describes a sequence of data.
:param collection_dimension_count: the number of collection dimensions represented by the descriptor.
:param datum_dimension_count: the number of datum dimensions represented by the descriptor.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def create_data_and_metadata(self, data: numpy.ndarray, intensity_calibration: CalibrationModule.Calibration = None,
dimensional_calibrations: typing.List[CalibrationModule.Calibration] = None, metadata: dict = None,
timestamp: str = None, data_descriptor: DataAndMetadata.DataDescriptor = None) -> DataAndMetadata.DataAndMetadata:
"""Create a data_and_metadata object from data.
:param data: an ndarray of data.
:param intensity_calibration: An optional calibration object.
:param dimensional_calibrations: An optional list of calibration objects.
:param metadata: A dict of metadata.
:param timestamp: A datetime object.
:param data_descriptor: A data descriptor describing the dimensions.
.. versionadded:: 1.0
Scriptable: Yes
"""
return DataAndMetadata.new_data_and_metadata(data, intensity_calibration, dimensional_calibrations, metadata, timestamp, data_descriptor)
|
Create a data_and_metadata object from data.
:param data: an ndarray of data.
:param intensity_calibration: An optional calibration object.
:param dimensional_calibrations: An optional list of calibration objects.
:param metadata: A dict of metadata.
:param timestamp: A datetime object.
:param data_descriptor: A data descriptor describing the dimensions.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def create_data_and_metadata_from_data(self, data: numpy.ndarray, intensity_calibration: CalibrationModule.Calibration=None, dimensional_calibrations: typing.List[CalibrationModule.Calibration]=None, metadata: dict=None, timestamp: str=None) -> DataAndMetadata.DataAndMetadata:
"""Create a data_and_metadata object from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.DataItem.create_data_and_metadata` instead.
Scriptable: No
"""
return self.create_data_and_metadata(numpy.copy(data), intensity_calibration, dimensional_calibrations, metadata, timestamp)
|
Create a data_and_metadata object from data.
.. versionadded:: 1.0
.. deprecated:: 1.1
Use :py:meth:`~nion.swift.Facade.DataItem.create_data_and_metadata` instead.
Scriptable: No
|
entailment
|
def create_data_and_metadata_io_handler(self, io_handler_delegate):
"""Create an I/O handler that reads and writes a single data_and_metadata.
:param io_handler_delegate: A delegate object :py:class:`DataAndMetadataIOHandlerInterface`
.. versionadded:: 1.0
Scriptable: No
"""
class DelegateIOHandler(ImportExportManager.ImportExportHandler):
def __init__(self):
super().__init__(io_handler_delegate.io_handler_id, io_handler_delegate.io_handler_name, io_handler_delegate.io_handler_extensions)
def read_data_elements(self, ui, extension, file_path):
data_and_metadata = io_handler_delegate.read_data_and_metadata(extension, file_path)
data_element = ImportExportManager.create_data_element_from_extended_data(data_and_metadata)
return [data_element]
def can_write(self, data_and_metadata, extension):
return io_handler_delegate.can_write_data_and_metadata(data_and_metadata, extension)
def write_display_item(self, ui, display_item: DisplayItemModule.DisplayItem, file_path: str, extension: str) -> None:
data_item = display_item.data_item
if data_item:
self.write_data_item(ui, data_item, file_path, extension)
def write_data_item(self, ui, data_item, file_path, extension):
data_and_metadata = data_item.xdata
data = data_and_metadata.data
if data is not None:
if hasattr(io_handler_delegate, "write_data_item"):
io_handler_delegate.write_data_item(DataItem(data_item), file_path, extension)
else:
assert hasattr(io_handler_delegate, "write_data_and_metadata")
io_handler_delegate.write_data_and_metadata(data_and_metadata, file_path, extension)
class IOHandlerReference:
def __init__(self):
self.__io_handler_delegate = io_handler_delegate
self.__io_handler = DelegateIOHandler()
ImportExportManager.ImportExportManager().register_io_handler(self.__io_handler)
def __del__(self):
self.close()
def close(self):
if self.__io_handler_delegate:
io_handler_delegate_close_fn = getattr(self.__io_handler_delegate, "close", None)
if io_handler_delegate_close_fn:
io_handler_delegate_close_fn()
ImportExportManager.ImportExportManager().unregister_io_handler(self.__io_handler)
self.__io_handler_delegate = None
return IOHandlerReference()
|
Create an I/O handler that reads and writes a single data_and_metadata.
:param io_handler_delegate: A delegate object :py:class:`DataAndMetadataIOHandlerInterface`
.. versionadded:: 1.0
Scriptable: No
|
entailment
|
def create_panel(self, panel_delegate):
"""Create a utility panel that can be attached to a window.
.. versionadded:: 1.0
Scriptable: No
The panel_delegate should respond to the following:
(property, read-only) panel_id
(property, read-only) panel_name
(property, read-only) panel_positions (a list from "top", "bottom", "left", "right", "all")
(property, read-only) panel_position (from "top", "bottom", "left", "right", "none")
(method, required) create_panel_widget(ui), returns a widget
(method, optional) close()
"""
panel_id = panel_delegate.panel_id
panel_name = panel_delegate.panel_name
panel_positions = getattr(panel_delegate, "panel_positions", ["left", "right"])
panel_position = getattr(panel_delegate, "panel_position", "none")
properties = getattr(panel_delegate, "panel_properties", None)
workspace_manager = Workspace.WorkspaceManager()
def create_facade_panel(document_controller, panel_id, properties):
panel = Panel(document_controller, panel_id, properties)
ui = UserInterface(self.__ui_version, document_controller.ui)
document_controller = DocumentWindow(document_controller)
panel.widget = panel_delegate.create_panel_widget(ui, document_controller)._widget
return panel
class PanelReference:
def __init__(self):
self.__panel_delegate = panel_delegate
workspace_manager.register_panel(create_facade_panel, panel_id, panel_name, panel_positions, panel_position, properties)
def __del__(self):
self.close()
def close(self):
if self.__panel_delegate:
panel_delegate_close_fn = getattr(self.__panel_delegate, "close", None)
if panel_delegate_close_fn:
panel_delegate_close_fn()
workspace_manager.unregister_panel(panel_id)
self.__panel_delegate = None
return PanelReference()
|
Create a utility panel that can be attached to a window.
.. versionadded:: 1.0
Scriptable: No
The panel_delegate should respond to the following:
(property, read-only) panel_id
(property, read-only) panel_name
(property, read-only) panel_positions (a list from "top", "bottom", "left", "right", "all")
(property, read-only) panel_position (from "top", "bottom", "left", "right", "none")
(method, required) create_panel_widget(ui), returns a widget
(method, optional) close()
|
entailment
|
def get_hardware_source_by_id(self, hardware_source_id: str, version: str):
"""Return the hardware source API matching the hardware_source_id and version.
.. versionadded:: 1.0
Scriptable: Yes
"""
actual_version = "1.0.0"
if Utility.compare_versions(version, actual_version) > 0:
raise NotImplementedError("Hardware API requested version %s is greater than %s." % (version, actual_version))
hardware_source = HardwareSourceModule.HardwareSourceManager().get_hardware_source_for_hardware_source_id(hardware_source_id)
return HardwareSource(hardware_source) if hardware_source else None
|
Return the hardware source API matching the hardware_source_id and version.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def library(self) -> Library:
"""Return the library object.
.. versionadded:: 1.0
Scriptable: Yes
"""
assert self.__app.document_model
return Library(self.__app.document_model)
|
Return the library object.
.. versionadded:: 1.0
Scriptable: Yes
|
entailment
|
def make_cost_matrix(profit_matrix, inversion_function):
"""
Create a cost matrix from a profit matrix by calling
'inversion_function' to invert each value. The inversion
function must take one numeric argument (of any type) and return
another numeric argument which is presumed to be the cost inverse
of the original profit.
This is a static method. Call it like this:
.. python::
cost_matrix = Munkres.make_cost_matrix(matrix, inversion_func)
For example:
.. python::
cost_matrix = Munkres.make_cost_matrix(matrix, lambda x : sys.maxsize - x)
:Parameters:
profit_matrix : list of lists
The matrix to convert from a profit to a cost matrix
inversion_function : function
The function to use to invert each entry in the profit matrix
:rtype: list of lists
:return: The converted matrix
"""
cost_matrix = []
for row in profit_matrix:
cost_matrix.append([inversion_function(value) for value in row])
return cost_matrix
|
Create a cost matrix from a profit matrix by calling
'inversion_function' to invert each value. The inversion
function must take one numeric argument (of any type) and return
another numeric argument which is presumed to be the cost inverse
of the original profit.
This is a static method. Call it like this:
.. python::
cost_matrix = Munkres.make_cost_matrix(matrix, inversion_func)
For example:
.. python::
cost_matrix = Munkres.make_cost_matrix(matrix, lambda x : sys.maxsize - x)
:Parameters:
profit_matrix : list of lists
The matrix to convert from a profit to a cost matrix
inversion_function : function
The function to use to invert each entry in the profit matrix
:rtype: list of lists
:return: The converted matrix
|
entailment
|
def print_matrix(matrix, msg=None):
"""
Convenience function: Displays the contents of a matrix of integers.
:Parameters:
matrix : list of lists
Matrix to print
msg : str
Optional message to print before displaying the matrix
"""
import math
if msg is not None:
print(msg)
# Calculate the appropriate format width.
width = 0
for row in matrix:
for val in row:
width = max(width, int(math.log10(val)) + 1)
# Make the format string
format = '%%%dd' % width
# Print the matrix
for row in matrix:
sep = '['
for val in row:
sys.stdout.write(sep + format % val)
sep = ', '
sys.stdout.write(']\n')
|
Convenience function: Displays the contents of a matrix of integers.
:Parameters:
matrix : list of lists
Matrix to print
msg : str
Optional message to print before displaying the matrix
|
entailment
|
def pad_matrix(self, matrix, pad_value=0):
"""
Pad a possibly non-square matrix to make it square.
:Parameters:
matrix : list of lists
matrix to pad
pad_value : int
value to use to pad the matrix
:rtype: list of lists
:return: a new, possibly padded, matrix
"""
max_columns = 0
total_rows = len(matrix)
for row in matrix:
max_columns = max(max_columns, len(row))
total_rows = max(max_columns, total_rows)
new_matrix = []
for row in matrix:
row_len = len(row)
new_row = row[:]
if total_rows > row_len:
# Row too short. Pad it.
new_row += [pad_value] * (total_rows - row_len)
new_matrix += [new_row]
while len(new_matrix) < total_rows:
new_matrix += [[pad_value] * total_rows]
return new_matrix
|
Pad a possibly non-square matrix to make it square.
:Parameters:
matrix : list of lists
matrix to pad
pad_value : int
value to use to pad the matrix
:rtype: list of lists
:return: a new, possibly padded, matrix
|
entailment
|
def compute(self, cost_matrix):
"""
Compute the indexes for the lowest-cost pairings between rows and
columns in the database. Returns a list of (row, column) tuples
that can be used to traverse the matrix.
:Parameters:
cost_matrix : list of lists
The cost matrix. If this cost matrix is not square, it
will be padded with zeros, via a call to ``pad_matrix()``.
(This method does *not* modify the caller's matrix. It
operates on a copy of the matrix.)
**WARNING**: This code handles square and rectangular
matrices. It does *not* handle irregular matrices.
:rtype: list
:return: A list of ``(row, column)`` tuples that describe the lowest
cost path through the matrix
"""
self.C = self.pad_matrix(cost_matrix)
self.n = len(self.C)
self.original_length = len(cost_matrix)
self.original_width = len(cost_matrix[0])
self.row_covered = [False for i in range(self.n)]
self.col_covered = [False for i in range(self.n)]
self.Z0_r = 0
self.Z0_c = 0
self.path = self.__make_matrix(self.n * 2, 0)
self.marked = self.__make_matrix(self.n, 0)
done = False
step = 1
steps = { 1 : self.__step1,
2 : self.__step2,
3 : self.__step3,
4 : self.__step4,
5 : self.__step5,
6 : self.__step6 }
while not done:
try:
func = steps[step]
step = func()
except KeyError:
done = True
# Look for the starred columns
results = []
for i in range(self.original_length):
for j in range(self.original_width):
if self.marked[i][j] == 1:
results += [(i, j)]
return results
|
Compute the indexes for the lowest-cost pairings between rows and
columns in the database. Returns a list of (row, column) tuples
that can be used to traverse the matrix.
:Parameters:
cost_matrix : list of lists
The cost matrix. If this cost matrix is not square, it
will be padded with zeros, via a call to ``pad_matrix()``.
(This method does *not* modify the caller's matrix. It
operates on a copy of the matrix.)
**WARNING**: This code handles square and rectangular
matrices. It does *not* handle irregular matrices.
:rtype: list
:return: A list of ``(row, column)`` tuples that describe the lowest
cost path through the matrix
|
entailment
|
def __make_matrix(self, n, val):
"""Create an *n*x*n* matrix, populating it with the specific value."""
matrix = []
for i in range(n):
matrix += [[val for j in range(n)]]
return matrix
|
Create an *n*x*n* matrix, populating it with the specific value.
|
entailment
|
def __step1(self):
"""
For each row of the matrix, find the smallest element and
subtract it from every element in its row. Go to Step 2.
"""
C = self.C
n = self.n
for i in range(n):
minval = min(self.C[i])
# Find the minimum value for this row and subtract that minimum
# from every element in the row.
for j in range(n):
self.C[i][j] -= minval
return 2
|
For each row of the matrix, find the smallest element and
subtract it from every element in its row. Go to Step 2.
|
entailment
|
def __step2(self):
"""
Find a zero (Z) in the resulting matrix. If there is no starred
zero in its row or column, star Z. Repeat for each element in the
matrix. Go to Step 3.
"""
n = self.n
for i in range(n):
for j in range(n):
if (self.C[i][j] == 0) and \
(not self.col_covered[j]) and \
(not self.row_covered[i]):
self.marked[i][j] = 1
self.col_covered[j] = True
self.row_covered[i] = True
self.__clear_covers()
return 3
|
Find a zero (Z) in the resulting matrix. If there is no starred
zero in its row or column, star Z. Repeat for each element in the
matrix. Go to Step 3.
|
entailment
|
def __step3(self):
"""
Cover each column containing a starred zero. If K columns are
covered, the starred zeros describe a complete set of unique
assignments. In this case, Go to DONE, otherwise, Go to Step 4.
"""
n = self.n
count = 0
for i in range(n):
for j in range(n):
if self.marked[i][j] == 1:
self.col_covered[j] = True
count += 1
if count >= n:
step = 7 # done
else:
step = 4
return step
|
Cover each column containing a starred zero. If K columns are
covered, the starred zeros describe a complete set of unique
assignments. In this case, Go to DONE, otherwise, Go to Step 4.
|
entailment
|
def __step4(self):
"""
Find a noncovered zero and prime it. If there is no starred zero
in the row containing this primed zero, Go to Step 5. Otherwise,
cover this row and uncover the column containing the starred
zero. Continue in this manner until there are no uncovered zeros
left. Save the smallest uncovered value and Go to Step 6.
"""
step = 0
done = False
row = -1
col = -1
star_col = -1
while not done:
(row, col) = self.__find_a_zero()
if row < 0:
done = True
step = 6
else:
self.marked[row][col] = 2
star_col = self.__find_star_in_row(row)
if star_col >= 0:
col = star_col
self.row_covered[row] = True
self.col_covered[col] = False
else:
done = True
self.Z0_r = row
self.Z0_c = col
step = 5
return step
|
Find a noncovered zero and prime it. If there is no starred zero
in the row containing this primed zero, Go to Step 5. Otherwise,
cover this row and uncover the column containing the starred
zero. Continue in this manner until there are no uncovered zeros
left. Save the smallest uncovered value and Go to Step 6.
|
entailment
|
def __step5(self):
"""
Construct a series of alternating primed and starred zeros as
follows. Let Z0 represent the uncovered primed zero found in Step 4.
Let Z1 denote the starred zero in the column of Z0 (if any).
Let Z2 denote the primed zero in the row of Z1 (there will always
be one). Continue until the series terminates at a primed zero
that has no starred zero in its column. Unstar each starred zero
of the series, star each primed zero of the series, erase all
primes and uncover every line in the matrix. Return to Step 3
"""
count = 0
path = self.path
path[count][0] = self.Z0_r
path[count][1] = self.Z0_c
done = False
while not done:
row = self.__find_star_in_col(path[count][1])
if row >= 0:
count += 1
path[count][0] = row
path[count][1] = path[count-1][1]
else:
done = True
if not done:
col = self.__find_prime_in_row(path[count][0])
count += 1
path[count][0] = path[count-1][0]
path[count][1] = col
self.__convert_path(path, count)
self.__clear_covers()
self.__erase_primes()
return 3
|
Construct a series of alternating primed and starred zeros as
follows. Let Z0 represent the uncovered primed zero found in Step 4.
Let Z1 denote the starred zero in the column of Z0 (if any).
Let Z2 denote the primed zero in the row of Z1 (there will always
be one). Continue until the series terminates at a primed zero
that has no starred zero in its column. Unstar each starred zero
of the series, star each primed zero of the series, erase all
primes and uncover every line in the matrix. Return to Step 3
|
entailment
|
def __step6(self):
"""
Add the value found in Step 4 to every element of each covered
row, and subtract it from every element of each uncovered column.
Return to Step 4 without altering any stars, primes, or covered
lines.
"""
minval = self.__find_smallest()
for i in range(self.n):
for j in range(self.n):
if self.row_covered[i]:
self.C[i][j] += minval
if not self.col_covered[j]:
self.C[i][j] -= minval
return 4
|
Add the value found in Step 4 to every element of each covered
row, and subtract it from every element of each uncovered column.
Return to Step 4 without altering any stars, primes, or covered
lines.
|
entailment
|
def __find_smallest(self):
"""Find the smallest uncovered value in the matrix."""
minval = sys.maxsize
for i in range(self.n):
for j in range(self.n):
if (not self.row_covered[i]) and (not self.col_covered[j]):
if minval > self.C[i][j]:
minval = self.C[i][j]
return minval
|
Find the smallest uncovered value in the matrix.
|
entailment
|
def __find_a_zero(self):
"""Find the first uncovered element with value 0"""
row = -1
col = -1
i = 0
n = self.n
done = False
while not done:
j = 0
while True:
if (self.C[i][j] == 0) and \
(not self.row_covered[i]) and \
(not self.col_covered[j]):
row = i
col = j
done = True
j += 1
if j >= n:
break
i += 1
if i >= n:
done = True
return (row, col)
|
Find the first uncovered element with value 0
|
entailment
|
def __find_star_in_row(self, row):
"""
Find the first starred element in the specified row. Returns
the column index, or -1 if no starred element was found.
"""
col = -1
for j in range(self.n):
if self.marked[row][j] == 1:
col = j
break
return col
|
Find the first starred element in the specified row. Returns
the column index, or -1 if no starred element was found.
|
entailment
|
def __find_star_in_col(self, col):
"""
Find the first starred element in the specified row. Returns
the row index, or -1 if no starred element was found.
"""
row = -1
for i in range(self.n):
if self.marked[i][col] == 1:
row = i
break
return row
|
Find the first starred element in the specified row. Returns
the row index, or -1 if no starred element was found.
|
entailment
|
def __find_prime_in_row(self, row):
"""
Find the first prime element in the specified row. Returns
the column index, or -1 if no starred element was found.
"""
col = -1
for j in range(self.n):
if self.marked[row][j] == 2:
col = j
break
return col
|
Find the first prime element in the specified row. Returns
the column index, or -1 if no starred element was found.
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.