repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
get_hit_rate_correction
def get_hit_rate_correction(gdacs, calibration_gdacs, cluster_size_histogram): '''Calculates a correction factor for single hit clusters at the given GDACs from the cluster_size_histogram via cubic interpolation. Parameters ---------- gdacs : array like The GDAC settings where the threshold should be determined from the calibration calibration_gdacs : array like GDAC settings used during the source scan for the cluster size calibration. cluster_size_histogram : numpy.array, shape=(80,336,# of GDACs during calibration) The calibration array Returns ------- numpy.array, shape=(80,336,# of GDACs during calibration) The threshold values for each pixel at gdacs. ''' logging.info('Calculate the correction factor for the single hit cluster rate at %d given GDAC settings', len(gdacs)) if len(calibration_gdacs) != cluster_size_histogram.shape[0]: raise ValueError('Length of the provided pixel GDACs does not match the dimension of the cluster size array') hist_sum = np.sum(cluster_size_histogram, axis=1) hist_rel = cluster_size_histogram / hist_sum[:, np.newaxis].astype('f4') * 100. maximum_rate = np.amax(hist_rel[:, 1]) correction_factor = maximum_rate / hist_rel[:, 1] # sort arrays since interpolate does not work otherwise calibration_gdacs_sorted = np.array(calibration_gdacs) correction_factor_sorted = correction_factor[np.argsort(calibration_gdacs_sorted)] calibration_gdacs_sorted = np.sort(calibration_gdacs_sorted) interpolation = interp1d(calibration_gdacs_sorted.tolist(), correction_factor_sorted.tolist(), kind='cubic', bounds_error=True) return interpolation(gdacs)
python
def get_hit_rate_correction(gdacs, calibration_gdacs, cluster_size_histogram): '''Calculates a correction factor for single hit clusters at the given GDACs from the cluster_size_histogram via cubic interpolation. Parameters ---------- gdacs : array like The GDAC settings where the threshold should be determined from the calibration calibration_gdacs : array like GDAC settings used during the source scan for the cluster size calibration. cluster_size_histogram : numpy.array, shape=(80,336,# of GDACs during calibration) The calibration array Returns ------- numpy.array, shape=(80,336,# of GDACs during calibration) The threshold values for each pixel at gdacs. ''' logging.info('Calculate the correction factor for the single hit cluster rate at %d given GDAC settings', len(gdacs)) if len(calibration_gdacs) != cluster_size_histogram.shape[0]: raise ValueError('Length of the provided pixel GDACs does not match the dimension of the cluster size array') hist_sum = np.sum(cluster_size_histogram, axis=1) hist_rel = cluster_size_histogram / hist_sum[:, np.newaxis].astype('f4') * 100. maximum_rate = np.amax(hist_rel[:, 1]) correction_factor = maximum_rate / hist_rel[:, 1] # sort arrays since interpolate does not work otherwise calibration_gdacs_sorted = np.array(calibration_gdacs) correction_factor_sorted = correction_factor[np.argsort(calibration_gdacs_sorted)] calibration_gdacs_sorted = np.sort(calibration_gdacs_sorted) interpolation = interp1d(calibration_gdacs_sorted.tolist(), correction_factor_sorted.tolist(), kind='cubic', bounds_error=True) return interpolation(gdacs)
[ "def", "get_hit_rate_correction", "(", "gdacs", ",", "calibration_gdacs", ",", "cluster_size_histogram", ")", ":", "logging", ".", "info", "(", "'Calculate the correction factor for the single hit cluster rate at %d given GDAC settings'", ",", "len", "(", "gdacs", ")", ")", "if", "len", "(", "calibration_gdacs", ")", "!=", "cluster_size_histogram", ".", "shape", "[", "0", "]", ":", "raise", "ValueError", "(", "'Length of the provided pixel GDACs does not match the dimension of the cluster size array'", ")", "hist_sum", "=", "np", ".", "sum", "(", "cluster_size_histogram", ",", "axis", "=", "1", ")", "hist_rel", "=", "cluster_size_histogram", "/", "hist_sum", "[", ":", ",", "np", ".", "newaxis", "]", ".", "astype", "(", "'f4'", ")", "*", "100.", "maximum_rate", "=", "np", ".", "amax", "(", "hist_rel", "[", ":", ",", "1", "]", ")", "correction_factor", "=", "maximum_rate", "/", "hist_rel", "[", ":", ",", "1", "]", "# sort arrays since interpolate does not work otherwise", "calibration_gdacs_sorted", "=", "np", ".", "array", "(", "calibration_gdacs", ")", "correction_factor_sorted", "=", "correction_factor", "[", "np", ".", "argsort", "(", "calibration_gdacs_sorted", ")", "]", "calibration_gdacs_sorted", "=", "np", ".", "sort", "(", "calibration_gdacs_sorted", ")", "interpolation", "=", "interp1d", "(", "calibration_gdacs_sorted", ".", "tolist", "(", ")", ",", "correction_factor_sorted", ".", "tolist", "(", ")", ",", "kind", "=", "'cubic'", ",", "bounds_error", "=", "True", ")", "return", "interpolation", "(", "gdacs", ")" ]
Calculates a correction factor for single hit clusters at the given GDACs from the cluster_size_histogram via cubic interpolation. Parameters ---------- gdacs : array like The GDAC settings where the threshold should be determined from the calibration calibration_gdacs : array like GDAC settings used during the source scan for the cluster size calibration. cluster_size_histogram : numpy.array, shape=(80,336,# of GDACs during calibration) The calibration array Returns ------- numpy.array, shape=(80,336,# of GDACs during calibration) The threshold values for each pixel at gdacs.
[ "Calculates", "a", "correction", "factor", "for", "single", "hit", "clusters", "at", "the", "given", "GDACs", "from", "the", "cluster_size_histogram", "via", "cubic", "interpolation", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1356-L1385
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
get_mean_threshold_from_calibration
def get_mean_threshold_from_calibration(gdac, mean_threshold_calibration): '''Calculates the mean threshold from the threshold calibration at the given gdac settings. If the given gdac value was not used during caluibration the value is determined by interpolation. Parameters ---------- gdacs : array like The GDAC settings where the threshold should be determined from the calibration mean_threshold_calibration : pytable The table created during the calibration scan. Returns ------- numpy.array, shape=(len(gdac), ) The mean threshold values at each value in gdacs. ''' interpolation = interp1d(mean_threshold_calibration['parameter_value'], mean_threshold_calibration['mean_threshold'], kind='slinear', bounds_error=True) return interpolation(gdac)
python
def get_mean_threshold_from_calibration(gdac, mean_threshold_calibration): '''Calculates the mean threshold from the threshold calibration at the given gdac settings. If the given gdac value was not used during caluibration the value is determined by interpolation. Parameters ---------- gdacs : array like The GDAC settings where the threshold should be determined from the calibration mean_threshold_calibration : pytable The table created during the calibration scan. Returns ------- numpy.array, shape=(len(gdac), ) The mean threshold values at each value in gdacs. ''' interpolation = interp1d(mean_threshold_calibration['parameter_value'], mean_threshold_calibration['mean_threshold'], kind='slinear', bounds_error=True) return interpolation(gdac)
[ "def", "get_mean_threshold_from_calibration", "(", "gdac", ",", "mean_threshold_calibration", ")", ":", "interpolation", "=", "interp1d", "(", "mean_threshold_calibration", "[", "'parameter_value'", "]", ",", "mean_threshold_calibration", "[", "'mean_threshold'", "]", ",", "kind", "=", "'slinear'", ",", "bounds_error", "=", "True", ")", "return", "interpolation", "(", "gdac", ")" ]
Calculates the mean threshold from the threshold calibration at the given gdac settings. If the given gdac value was not used during caluibration the value is determined by interpolation. Parameters ---------- gdacs : array like The GDAC settings where the threshold should be determined from the calibration mean_threshold_calibration : pytable The table created during the calibration scan. Returns ------- numpy.array, shape=(len(gdac), ) The mean threshold values at each value in gdacs.
[ "Calculates", "the", "mean", "threshold", "from", "the", "threshold", "calibration", "at", "the", "given", "gdac", "settings", ".", "If", "the", "given", "gdac", "value", "was", "not", "used", "during", "caluibration", "the", "value", "is", "determined", "by", "interpolation", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1388-L1405
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
get_pixel_thresholds_from_calibration_array
def get_pixel_thresholds_from_calibration_array(gdacs, calibration_gdacs, threshold_calibration_array, bounds_error=True): '''Calculates the threshold for all pixels in threshold_calibration_array at the given GDAC settings via linear interpolation. The GDAC settings used during calibration have to be given. Parameters ---------- gdacs : array like The GDAC settings where the threshold should be determined from the calibration calibration_gdacs : array like GDAC settings used during calibration, needed to translate the index of the calibration array to a value. threshold_calibration_array : numpy.array, shape=(80,336,# of GDACs during calibration) The calibration array Returns ------- numpy.array, shape=(80,336,# gdacs given) The threshold values for each pixel at gdacs. ''' if len(calibration_gdacs) != threshold_calibration_array.shape[2]: raise ValueError('Length of the provided pixel GDACs does not match the third dimension of the calibration array') interpolation = interp1d(x=calibration_gdacs, y=threshold_calibration_array, kind='slinear', bounds_error=bounds_error) return interpolation(gdacs)
python
def get_pixel_thresholds_from_calibration_array(gdacs, calibration_gdacs, threshold_calibration_array, bounds_error=True): '''Calculates the threshold for all pixels in threshold_calibration_array at the given GDAC settings via linear interpolation. The GDAC settings used during calibration have to be given. Parameters ---------- gdacs : array like The GDAC settings where the threshold should be determined from the calibration calibration_gdacs : array like GDAC settings used during calibration, needed to translate the index of the calibration array to a value. threshold_calibration_array : numpy.array, shape=(80,336,# of GDACs during calibration) The calibration array Returns ------- numpy.array, shape=(80,336,# gdacs given) The threshold values for each pixel at gdacs. ''' if len(calibration_gdacs) != threshold_calibration_array.shape[2]: raise ValueError('Length of the provided pixel GDACs does not match the third dimension of the calibration array') interpolation = interp1d(x=calibration_gdacs, y=threshold_calibration_array, kind='slinear', bounds_error=bounds_error) return interpolation(gdacs)
[ "def", "get_pixel_thresholds_from_calibration_array", "(", "gdacs", ",", "calibration_gdacs", ",", "threshold_calibration_array", ",", "bounds_error", "=", "True", ")", ":", "if", "len", "(", "calibration_gdacs", ")", "!=", "threshold_calibration_array", ".", "shape", "[", "2", "]", ":", "raise", "ValueError", "(", "'Length of the provided pixel GDACs does not match the third dimension of the calibration array'", ")", "interpolation", "=", "interp1d", "(", "x", "=", "calibration_gdacs", ",", "y", "=", "threshold_calibration_array", ",", "kind", "=", "'slinear'", ",", "bounds_error", "=", "bounds_error", ")", "return", "interpolation", "(", "gdacs", ")" ]
Calculates the threshold for all pixels in threshold_calibration_array at the given GDAC settings via linear interpolation. The GDAC settings used during calibration have to be given. Parameters ---------- gdacs : array like The GDAC settings where the threshold should be determined from the calibration calibration_gdacs : array like GDAC settings used during calibration, needed to translate the index of the calibration array to a value. threshold_calibration_array : numpy.array, shape=(80,336,# of GDACs during calibration) The calibration array Returns ------- numpy.array, shape=(80,336,# gdacs given) The threshold values for each pixel at gdacs.
[ "Calculates", "the", "threshold", "for", "all", "pixels", "in", "threshold_calibration_array", "at", "the", "given", "GDAC", "settings", "via", "linear", "interpolation", ".", "The", "GDAC", "settings", "used", "during", "calibration", "have", "to", "be", "given", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1408-L1428
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
get_n_cluster_per_event_hist
def get_n_cluster_per_event_hist(cluster_table): '''Calculates the number of cluster in every event. Parameters ---------- cluster_table : pytables.table Returns ------- numpy.Histogram ''' logging.info("Histogram number of cluster per event") cluster_in_events = analysis_utils.get_n_cluster_in_events(cluster_table)[:, 1] # get the number of cluster for every event return np.histogram(cluster_in_events, bins=range(0, np.max(cluster_in_events) + 2))
python
def get_n_cluster_per_event_hist(cluster_table): '''Calculates the number of cluster in every event. Parameters ---------- cluster_table : pytables.table Returns ------- numpy.Histogram ''' logging.info("Histogram number of cluster per event") cluster_in_events = analysis_utils.get_n_cluster_in_events(cluster_table)[:, 1] # get the number of cluster for every event return np.histogram(cluster_in_events, bins=range(0, np.max(cluster_in_events) + 2))
[ "def", "get_n_cluster_per_event_hist", "(", "cluster_table", ")", ":", "logging", ".", "info", "(", "\"Histogram number of cluster per event\"", ")", "cluster_in_events", "=", "analysis_utils", ".", "get_n_cluster_in_events", "(", "cluster_table", ")", "[", ":", ",", "1", "]", "# get the number of cluster for every event", "return", "np", ".", "histogram", "(", "cluster_in_events", ",", "bins", "=", "range", "(", "0", ",", "np", ".", "max", "(", "cluster_in_events", ")", "+", "2", ")", ")" ]
Calculates the number of cluster in every event. Parameters ---------- cluster_table : pytables.table Returns ------- numpy.Histogram
[ "Calculates", "the", "number", "of", "cluster", "in", "every", "event", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1470-L1483
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
get_data_statistics
def get_data_statistics(interpreted_files): '''Quick and dirty function to give as redmine compatible iverview table ''' print '| *File Name* | *File Size* | *Times Stamp* | *Events* | *Bad Events* | *Measurement time* | *# SR* | *Hits* |' # Mean Tot | Mean rel. BCID' for interpreted_file in interpreted_files: with tb.open_file(interpreted_file, mode="r") as in_file_h5: # open the actual hit file n_hits = np.sum(in_file_h5.root.HistOcc[:]) measurement_time = int(in_file_h5.root.meta_data[-1]['timestamp_stop'] - in_file_h5.root.meta_data[0]['timestamp_start']) # mean_tot = np.average(in_file_h5.root.HistTot[:], weights=range(0,16) * np.sum(range(0,16)))# / in_file_h5.root.HistTot[:].shape[0] # mean_bcid = np.average(in_file_h5.root.HistRelBcid[:], weights=range(0,16)) n_sr = np.sum(in_file_h5.root.HistServiceRecord[:]) n_bad_events = int(np.sum(in_file_h5.root.HistErrorCounter[2:])) try: n_events = str(in_file_h5.root.Hits[-1]['event_number'] + 1) except tb.NoSuchNodeError: n_events = '~' + str(in_file_h5.root.meta_data[-1]['event_number'] + (in_file_h5.root.meta_data[-1]['event_number'] - in_file_h5.root.meta_data[-2]['event_number'])) else: print '|', os.path.basename(interpreted_file), '|', int(os.path.getsize(interpreted_file) / (1024.0 * 1024.0)), 'Mb |', time.ctime(os.path.getctime(interpreted_file)), '|', n_events, '|', n_bad_events, '|', measurement_time, 's |', n_sr, '|', n_hits, '|'
python
def get_data_statistics(interpreted_files): '''Quick and dirty function to give as redmine compatible iverview table ''' print '| *File Name* | *File Size* | *Times Stamp* | *Events* | *Bad Events* | *Measurement time* | *# SR* | *Hits* |' # Mean Tot | Mean rel. BCID' for interpreted_file in interpreted_files: with tb.open_file(interpreted_file, mode="r") as in_file_h5: # open the actual hit file n_hits = np.sum(in_file_h5.root.HistOcc[:]) measurement_time = int(in_file_h5.root.meta_data[-1]['timestamp_stop'] - in_file_h5.root.meta_data[0]['timestamp_start']) # mean_tot = np.average(in_file_h5.root.HistTot[:], weights=range(0,16) * np.sum(range(0,16)))# / in_file_h5.root.HistTot[:].shape[0] # mean_bcid = np.average(in_file_h5.root.HistRelBcid[:], weights=range(0,16)) n_sr = np.sum(in_file_h5.root.HistServiceRecord[:]) n_bad_events = int(np.sum(in_file_h5.root.HistErrorCounter[2:])) try: n_events = str(in_file_h5.root.Hits[-1]['event_number'] + 1) except tb.NoSuchNodeError: n_events = '~' + str(in_file_h5.root.meta_data[-1]['event_number'] + (in_file_h5.root.meta_data[-1]['event_number'] - in_file_h5.root.meta_data[-2]['event_number'])) else: print '|', os.path.basename(interpreted_file), '|', int(os.path.getsize(interpreted_file) / (1024.0 * 1024.0)), 'Mb |', time.ctime(os.path.getctime(interpreted_file)), '|', n_events, '|', n_bad_events, '|', measurement_time, 's |', n_sr, '|', n_hits, '|'
[ "def", "get_data_statistics", "(", "interpreted_files", ")", ":", "print", "'| *File Name* | *File Size* | *Times Stamp* | *Events* | *Bad Events* | *Measurement time* | *# SR* | *Hits* |'", "# Mean Tot | Mean rel. BCID'", "for", "interpreted_file", "in", "interpreted_files", ":", "with", "tb", ".", "open_file", "(", "interpreted_file", ",", "mode", "=", "\"r\"", ")", "as", "in_file_h5", ":", "# open the actual hit file", "n_hits", "=", "np", ".", "sum", "(", "in_file_h5", ".", "root", ".", "HistOcc", "[", ":", "]", ")", "measurement_time", "=", "int", "(", "in_file_h5", ".", "root", ".", "meta_data", "[", "-", "1", "]", "[", "'timestamp_stop'", "]", "-", "in_file_h5", ".", "root", ".", "meta_data", "[", "0", "]", "[", "'timestamp_start'", "]", ")", "# mean_tot = np.average(in_file_h5.root.HistTot[:], weights=range(0,16) * np.sum(range(0,16)))# / in_file_h5.root.HistTot[:].shape[0]", "# mean_bcid = np.average(in_file_h5.root.HistRelBcid[:], weights=range(0,16))", "n_sr", "=", "np", ".", "sum", "(", "in_file_h5", ".", "root", ".", "HistServiceRecord", "[", ":", "]", ")", "n_bad_events", "=", "int", "(", "np", ".", "sum", "(", "in_file_h5", ".", "root", ".", "HistErrorCounter", "[", "2", ":", "]", ")", ")", "try", ":", "n_events", "=", "str", "(", "in_file_h5", ".", "root", ".", "Hits", "[", "-", "1", "]", "[", "'event_number'", "]", "+", "1", ")", "except", "tb", ".", "NoSuchNodeError", ":", "n_events", "=", "'~'", "+", "str", "(", "in_file_h5", ".", "root", ".", "meta_data", "[", "-", "1", "]", "[", "'event_number'", "]", "+", "(", "in_file_h5", ".", "root", ".", "meta_data", "[", "-", "1", "]", "[", "'event_number'", "]", "-", "in_file_h5", ".", "root", ".", "meta_data", "[", "-", "2", "]", "[", "'event_number'", "]", ")", ")", "else", ":", "print", "'|'", ",", "os", ".", "path", ".", "basename", "(", "interpreted_file", ")", ",", "'|'", ",", "int", "(", "os", ".", "path", ".", "getsize", "(", "interpreted_file", ")", "/", "(", "1024.0", "*", "1024.0", ")", ")", ",", "'Mb |'", ",", "time", ".", "ctime", "(", "os", ".", "path", ".", "getctime", "(", "interpreted_file", ")", ")", ",", "'|'", ",", "n_events", ",", "'|'", ",", "n_bad_events", ",", "'|'", ",", "measurement_time", ",", "'s |'", ",", "n_sr", ",", "'|'", ",", "n_hits", ",", "'|'" ]
Quick and dirty function to give as redmine compatible iverview table
[ "Quick", "and", "dirty", "function", "to", "give", "as", "redmine", "compatible", "iverview", "table" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1486-L1503
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
contiguous_regions
def contiguous_regions(condition): """Finds contiguous True regions of the boolean array "condition". Returns a 2D array where the first column is the start index of the region and the second column is the end index. http://stackoverflow.com/questions/4494404/find-large-number-of-consecutive-values-fulfilling-condition-in-a-numpy-array """ # Find the indicies of changes in "condition" d = np.diff(condition, n=1) idx, = d.nonzero() # We need to start things after the change in "condition". Therefore, # we'll shift the index by 1 to the right. idx += 1 if condition[0]: # If the start of condition is True prepend a 0 idx = np.r_[0, idx] if condition[-1]: # If the end of condition is True, append the length of the array idx = np.r_[idx, condition.size] # Reshape the result into two columns idx.shape = (-1, 2) return idx
python
def contiguous_regions(condition): """Finds contiguous True regions of the boolean array "condition". Returns a 2D array where the first column is the start index of the region and the second column is the end index. http://stackoverflow.com/questions/4494404/find-large-number-of-consecutive-values-fulfilling-condition-in-a-numpy-array """ # Find the indicies of changes in "condition" d = np.diff(condition, n=1) idx, = d.nonzero() # We need to start things after the change in "condition". Therefore, # we'll shift the index by 1 to the right. idx += 1 if condition[0]: # If the start of condition is True prepend a 0 idx = np.r_[0, idx] if condition[-1]: # If the end of condition is True, append the length of the array idx = np.r_[idx, condition.size] # Reshape the result into two columns idx.shape = (-1, 2) return idx
[ "def", "contiguous_regions", "(", "condition", ")", ":", "# Find the indicies of changes in \"condition\"", "d", "=", "np", ".", "diff", "(", "condition", ",", "n", "=", "1", ")", "idx", ",", "=", "d", ".", "nonzero", "(", ")", "# We need to start things after the change in \"condition\". Therefore,", "# we'll shift the index by 1 to the right.", "idx", "+=", "1", "if", "condition", "[", "0", "]", ":", "# If the start of condition is True prepend a 0", "idx", "=", "np", ".", "r_", "[", "0", ",", "idx", "]", "if", "condition", "[", "-", "1", "]", ":", "# If the end of condition is True, append the length of the array", "idx", "=", "np", ".", "r_", "[", "idx", ",", "condition", ".", "size", "]", "# Reshape the result into two columns", "idx", ".", "shape", "=", "(", "-", "1", ",", "2", ")", "return", "idx" ]
Finds contiguous True regions of the boolean array "condition". Returns a 2D array where the first column is the start index of the region and the second column is the end index. http://stackoverflow.com/questions/4494404/find-large-number-of-consecutive-values-fulfilling-condition-in-a-numpy-array
[ "Finds", "contiguous", "True", "regions", "of", "the", "boolean", "array", "condition", ".", "Returns", "a", "2D", "array", "where", "the", "first", "column", "is", "the", "start", "index", "of", "the", "region", "and", "the", "second", "column", "is", "the", "end", "index", ".", "http", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "4494404", "/", "find", "-", "large", "-", "number", "-", "of", "-", "consecutive", "-", "values", "-", "fulfilling", "-", "condition", "-", "in", "-", "a", "-", "numpy", "-", "array" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1519-L1543
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
check_bad_data
def check_bad_data(raw_data, prepend_data_headers=None, trig_count=None): """Checking FEI4 raw data array for corrupted data. """ consecutive_triggers = 16 if trig_count == 0 else trig_count is_fe_data_header = logical_and(is_fe_word, is_data_header) trigger_idx = np.where(is_trigger_word(raw_data) >= 1)[0] fe_dh_idx = np.where(is_fe_data_header(raw_data) >= 1)[0] n_triggers = trigger_idx.shape[0] n_dh = fe_dh_idx.shape[0] # get index of the last trigger if n_triggers: last_event_data_headers_cnt = np.where(fe_dh_idx > trigger_idx[-1])[0].shape[0] if consecutive_triggers and last_event_data_headers_cnt == consecutive_triggers: if not np.all(trigger_idx[-1] > fe_dh_idx): trigger_idx = np.r_[trigger_idx, raw_data.shape] last_event_data_headers_cnt = None elif last_event_data_headers_cnt != 0: fe_dh_idx = fe_dh_idx[:-last_event_data_headers_cnt] elif not np.all(trigger_idx[-1] > fe_dh_idx): trigger_idx = np.r_[trigger_idx, raw_data.shape] # if any data header, add trigger for histogramming, next readout has to have trigger word elif n_dh: trigger_idx = np.r_[trigger_idx, raw_data.shape] last_event_data_headers_cnt = None # no trigger, no data header # assuming correct data, return input values else: return False, prepend_data_headers, n_triggers, n_dh # # no triggers, check for the right amount of data headers # if consecutive_triggers and prepend_data_headers and prepend_data_headers + n_dh != consecutive_triggers: # return True, n_dh, n_triggers, n_dh n_triggers_cleaned = trigger_idx.shape[0] n_dh_cleaned = fe_dh_idx.shape[0] # check that trigger comes before data header if prepend_data_headers is None and n_triggers_cleaned and n_dh_cleaned and not trigger_idx[0] < fe_dh_idx[0]: return True, last_event_data_headers_cnt, n_triggers, n_dh # FIXME: 0? # check that no trigger comes before the first data header elif consecutive_triggers and prepend_data_headers is not None and n_triggers_cleaned and n_dh_cleaned and trigger_idx[0] < fe_dh_idx[0]: return True, last_event_data_headers_cnt, n_triggers, n_dh # FIXME: 0? # check for two consecutive triggers elif consecutive_triggers is None and prepend_data_headers == 0 and n_triggers_cleaned and n_dh_cleaned and trigger_idx[0] < fe_dh_idx[0]: return True, last_event_data_headers_cnt, n_triggers, n_dh # FIXME: 0? elif prepend_data_headers is not None: trigger_idx += (prepend_data_headers + 1) fe_dh_idx += (prepend_data_headers + 1) # for histogramming add trigger at index 0 trigger_idx = np.r_[0, trigger_idx] fe_dh_idx = np.r_[range(1, prepend_data_headers + 1), fe_dh_idx] event_hist, bins = np.histogram(fe_dh_idx, trigger_idx) if consecutive_triggers is None and np.any(event_hist == 0): return True, last_event_data_headers_cnt, n_triggers, n_dh elif consecutive_triggers and np.any(event_hist != consecutive_triggers): return True, last_event_data_headers_cnt, n_triggers, n_dh return False, last_event_data_headers_cnt, n_triggers, n_dh
python
def check_bad_data(raw_data, prepend_data_headers=None, trig_count=None): """Checking FEI4 raw data array for corrupted data. """ consecutive_triggers = 16 if trig_count == 0 else trig_count is_fe_data_header = logical_and(is_fe_word, is_data_header) trigger_idx = np.where(is_trigger_word(raw_data) >= 1)[0] fe_dh_idx = np.where(is_fe_data_header(raw_data) >= 1)[0] n_triggers = trigger_idx.shape[0] n_dh = fe_dh_idx.shape[0] # get index of the last trigger if n_triggers: last_event_data_headers_cnt = np.where(fe_dh_idx > trigger_idx[-1])[0].shape[0] if consecutive_triggers and last_event_data_headers_cnt == consecutive_triggers: if not np.all(trigger_idx[-1] > fe_dh_idx): trigger_idx = np.r_[trigger_idx, raw_data.shape] last_event_data_headers_cnt = None elif last_event_data_headers_cnt != 0: fe_dh_idx = fe_dh_idx[:-last_event_data_headers_cnt] elif not np.all(trigger_idx[-1] > fe_dh_idx): trigger_idx = np.r_[trigger_idx, raw_data.shape] # if any data header, add trigger for histogramming, next readout has to have trigger word elif n_dh: trigger_idx = np.r_[trigger_idx, raw_data.shape] last_event_data_headers_cnt = None # no trigger, no data header # assuming correct data, return input values else: return False, prepend_data_headers, n_triggers, n_dh # # no triggers, check for the right amount of data headers # if consecutive_triggers and prepend_data_headers and prepend_data_headers + n_dh != consecutive_triggers: # return True, n_dh, n_triggers, n_dh n_triggers_cleaned = trigger_idx.shape[0] n_dh_cleaned = fe_dh_idx.shape[0] # check that trigger comes before data header if prepend_data_headers is None and n_triggers_cleaned and n_dh_cleaned and not trigger_idx[0] < fe_dh_idx[0]: return True, last_event_data_headers_cnt, n_triggers, n_dh # FIXME: 0? # check that no trigger comes before the first data header elif consecutive_triggers and prepend_data_headers is not None and n_triggers_cleaned and n_dh_cleaned and trigger_idx[0] < fe_dh_idx[0]: return True, last_event_data_headers_cnt, n_triggers, n_dh # FIXME: 0? # check for two consecutive triggers elif consecutive_triggers is None and prepend_data_headers == 0 and n_triggers_cleaned and n_dh_cleaned and trigger_idx[0] < fe_dh_idx[0]: return True, last_event_data_headers_cnt, n_triggers, n_dh # FIXME: 0? elif prepend_data_headers is not None: trigger_idx += (prepend_data_headers + 1) fe_dh_idx += (prepend_data_headers + 1) # for histogramming add trigger at index 0 trigger_idx = np.r_[0, trigger_idx] fe_dh_idx = np.r_[range(1, prepend_data_headers + 1), fe_dh_idx] event_hist, bins = np.histogram(fe_dh_idx, trigger_idx) if consecutive_triggers is None and np.any(event_hist == 0): return True, last_event_data_headers_cnt, n_triggers, n_dh elif consecutive_triggers and np.any(event_hist != consecutive_triggers): return True, last_event_data_headers_cnt, n_triggers, n_dh return False, last_event_data_headers_cnt, n_triggers, n_dh
[ "def", "check_bad_data", "(", "raw_data", ",", "prepend_data_headers", "=", "None", ",", "trig_count", "=", "None", ")", ":", "consecutive_triggers", "=", "16", "if", "trig_count", "==", "0", "else", "trig_count", "is_fe_data_header", "=", "logical_and", "(", "is_fe_word", ",", "is_data_header", ")", "trigger_idx", "=", "np", ".", "where", "(", "is_trigger_word", "(", "raw_data", ")", ">=", "1", ")", "[", "0", "]", "fe_dh_idx", "=", "np", ".", "where", "(", "is_fe_data_header", "(", "raw_data", ")", ">=", "1", ")", "[", "0", "]", "n_triggers", "=", "trigger_idx", ".", "shape", "[", "0", "]", "n_dh", "=", "fe_dh_idx", ".", "shape", "[", "0", "]", "# get index of the last trigger", "if", "n_triggers", ":", "last_event_data_headers_cnt", "=", "np", ".", "where", "(", "fe_dh_idx", ">", "trigger_idx", "[", "-", "1", "]", ")", "[", "0", "]", ".", "shape", "[", "0", "]", "if", "consecutive_triggers", "and", "last_event_data_headers_cnt", "==", "consecutive_triggers", ":", "if", "not", "np", ".", "all", "(", "trigger_idx", "[", "-", "1", "]", ">", "fe_dh_idx", ")", ":", "trigger_idx", "=", "np", ".", "r_", "[", "trigger_idx", ",", "raw_data", ".", "shape", "]", "last_event_data_headers_cnt", "=", "None", "elif", "last_event_data_headers_cnt", "!=", "0", ":", "fe_dh_idx", "=", "fe_dh_idx", "[", ":", "-", "last_event_data_headers_cnt", "]", "elif", "not", "np", ".", "all", "(", "trigger_idx", "[", "-", "1", "]", ">", "fe_dh_idx", ")", ":", "trigger_idx", "=", "np", ".", "r_", "[", "trigger_idx", ",", "raw_data", ".", "shape", "]", "# if any data header, add trigger for histogramming, next readout has to have trigger word", "elif", "n_dh", ":", "trigger_idx", "=", "np", ".", "r_", "[", "trigger_idx", ",", "raw_data", ".", "shape", "]", "last_event_data_headers_cnt", "=", "None", "# no trigger, no data header", "# assuming correct data, return input values", "else", ":", "return", "False", ",", "prepend_data_headers", ",", "n_triggers", ",", "n_dh", "# # no triggers, check for the right amount of data headers", "# if consecutive_triggers and prepend_data_headers and prepend_data_headers + n_dh != consecutive_triggers:", "# return True, n_dh, n_triggers, n_dh", "n_triggers_cleaned", "=", "trigger_idx", ".", "shape", "[", "0", "]", "n_dh_cleaned", "=", "fe_dh_idx", ".", "shape", "[", "0", "]", "# check that trigger comes before data header", "if", "prepend_data_headers", "is", "None", "and", "n_triggers_cleaned", "and", "n_dh_cleaned", "and", "not", "trigger_idx", "[", "0", "]", "<", "fe_dh_idx", "[", "0", "]", ":", "return", "True", ",", "last_event_data_headers_cnt", ",", "n_triggers", ",", "n_dh", "# FIXME: 0?", "# check that no trigger comes before the first data header", "elif", "consecutive_triggers", "and", "prepend_data_headers", "is", "not", "None", "and", "n_triggers_cleaned", "and", "n_dh_cleaned", "and", "trigger_idx", "[", "0", "]", "<", "fe_dh_idx", "[", "0", "]", ":", "return", "True", ",", "last_event_data_headers_cnt", ",", "n_triggers", ",", "n_dh", "# FIXME: 0?", "# check for two consecutive triggers", "elif", "consecutive_triggers", "is", "None", "and", "prepend_data_headers", "==", "0", "and", "n_triggers_cleaned", "and", "n_dh_cleaned", "and", "trigger_idx", "[", "0", "]", "<", "fe_dh_idx", "[", "0", "]", ":", "return", "True", ",", "last_event_data_headers_cnt", ",", "n_triggers", ",", "n_dh", "# FIXME: 0?", "elif", "prepend_data_headers", "is", "not", "None", ":", "trigger_idx", "+=", "(", "prepend_data_headers", "+", "1", ")", "fe_dh_idx", "+=", "(", "prepend_data_headers", "+", "1", ")", "# for histogramming add trigger at index 0", "trigger_idx", "=", "np", ".", "r_", "[", "0", ",", "trigger_idx", "]", "fe_dh_idx", "=", "np", ".", "r_", "[", "range", "(", "1", ",", "prepend_data_headers", "+", "1", ")", ",", "fe_dh_idx", "]", "event_hist", ",", "bins", "=", "np", ".", "histogram", "(", "fe_dh_idx", ",", "trigger_idx", ")", "if", "consecutive_triggers", "is", "None", "and", "np", ".", "any", "(", "event_hist", "==", "0", ")", ":", "return", "True", ",", "last_event_data_headers_cnt", ",", "n_triggers", ",", "n_dh", "elif", "consecutive_triggers", "and", "np", ".", "any", "(", "event_hist", "!=", "consecutive_triggers", ")", ":", "return", "True", ",", "last_event_data_headers_cnt", ",", "n_triggers", ",", "n_dh", "return", "False", ",", "last_event_data_headers_cnt", ",", "n_triggers", ",", "n_dh" ]
Checking FEI4 raw data array for corrupted data.
[ "Checking", "FEI4", "raw", "data", "array", "for", "corrupted", "data", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1546-L1605
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
consecutive
def consecutive(data, stepsize=1): """Converts array into chunks with consecutive elements of given step size. http://stackoverflow.com/questions/7352684/how-to-find-the-groups-of-consecutive-elements-from-an-array-in-numpy """ return np.split(data, np.where(np.diff(data) != stepsize)[0] + 1)
python
def consecutive(data, stepsize=1): """Converts array into chunks with consecutive elements of given step size. http://stackoverflow.com/questions/7352684/how-to-find-the-groups-of-consecutive-elements-from-an-array-in-numpy """ return np.split(data, np.where(np.diff(data) != stepsize)[0] + 1)
[ "def", "consecutive", "(", "data", ",", "stepsize", "=", "1", ")", ":", "return", "np", ".", "split", "(", "data", ",", "np", ".", "where", "(", "np", ".", "diff", "(", "data", ")", "!=", "stepsize", ")", "[", "0", "]", "+", "1", ")" ]
Converts array into chunks with consecutive elements of given step size. http://stackoverflow.com/questions/7352684/how-to-find-the-groups-of-consecutive-elements-from-an-array-in-numpy
[ "Converts", "array", "into", "chunks", "with", "consecutive", "elements", "of", "given", "step", "size", ".", "http", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "7352684", "/", "how", "-", "to", "-", "find", "-", "the", "-", "groups", "-", "of", "-", "consecutive", "-", "elements", "-", "from", "-", "an", "-", "array", "-", "in", "-", "numpy" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1608-L1612
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
print_raw_data_file
def print_raw_data_file(input_file, start_index=0, limit=200, flavor='fei4b', select=None, tdc_trig_dist=False, trigger_data_mode=0, meta_data_v2=True): """Printing FEI4 data from raw data file for debugging. """ with tb.open_file(input_file + '.h5', mode="r") as file_h5: if meta_data_v2: index_start = file_h5.root.meta_data.read(field='index_start') index_stop = file_h5.root.meta_data.read(field='index_stop') else: index_start = file_h5.root.meta_data.read(field='start_index') index_stop = file_h5.root.meta_data.read(field='stop_index') total_words = 0 for read_out_index, (index_start, index_stop) in enumerate(np.column_stack((index_start, index_stop))): if start_index < index_stop: print "\nchunk %d with length %d (from index %d to %d)\n" % (read_out_index, (index_stop - index_start), index_start, index_stop) raw_data = file_h5.root.raw_data.read(index_start, index_stop) total_words += print_raw_data(raw_data=raw_data, start_index=max(start_index - index_start, 0), limit=limit - total_words, flavor=flavor, index_offset=index_start, select=select, tdc_trig_dist=tdc_trig_dist, trigger_data_mode=trigger_data_mode) if limit and total_words >= limit: break
python
def print_raw_data_file(input_file, start_index=0, limit=200, flavor='fei4b', select=None, tdc_trig_dist=False, trigger_data_mode=0, meta_data_v2=True): """Printing FEI4 data from raw data file for debugging. """ with tb.open_file(input_file + '.h5', mode="r") as file_h5: if meta_data_v2: index_start = file_h5.root.meta_data.read(field='index_start') index_stop = file_h5.root.meta_data.read(field='index_stop') else: index_start = file_h5.root.meta_data.read(field='start_index') index_stop = file_h5.root.meta_data.read(field='stop_index') total_words = 0 for read_out_index, (index_start, index_stop) in enumerate(np.column_stack((index_start, index_stop))): if start_index < index_stop: print "\nchunk %d with length %d (from index %d to %d)\n" % (read_out_index, (index_stop - index_start), index_start, index_stop) raw_data = file_h5.root.raw_data.read(index_start, index_stop) total_words += print_raw_data(raw_data=raw_data, start_index=max(start_index - index_start, 0), limit=limit - total_words, flavor=flavor, index_offset=index_start, select=select, tdc_trig_dist=tdc_trig_dist, trigger_data_mode=trigger_data_mode) if limit and total_words >= limit: break
[ "def", "print_raw_data_file", "(", "input_file", ",", "start_index", "=", "0", ",", "limit", "=", "200", ",", "flavor", "=", "'fei4b'", ",", "select", "=", "None", ",", "tdc_trig_dist", "=", "False", ",", "trigger_data_mode", "=", "0", ",", "meta_data_v2", "=", "True", ")", ":", "with", "tb", ".", "open_file", "(", "input_file", "+", "'.h5'", ",", "mode", "=", "\"r\"", ")", "as", "file_h5", ":", "if", "meta_data_v2", ":", "index_start", "=", "file_h5", ".", "root", ".", "meta_data", ".", "read", "(", "field", "=", "'index_start'", ")", "index_stop", "=", "file_h5", ".", "root", ".", "meta_data", ".", "read", "(", "field", "=", "'index_stop'", ")", "else", ":", "index_start", "=", "file_h5", ".", "root", ".", "meta_data", ".", "read", "(", "field", "=", "'start_index'", ")", "index_stop", "=", "file_h5", ".", "root", ".", "meta_data", ".", "read", "(", "field", "=", "'stop_index'", ")", "total_words", "=", "0", "for", "read_out_index", ",", "(", "index_start", ",", "index_stop", ")", "in", "enumerate", "(", "np", ".", "column_stack", "(", "(", "index_start", ",", "index_stop", ")", ")", ")", ":", "if", "start_index", "<", "index_stop", ":", "print", "\"\\nchunk %d with length %d (from index %d to %d)\\n\"", "%", "(", "read_out_index", ",", "(", "index_stop", "-", "index_start", ")", ",", "index_start", ",", "index_stop", ")", "raw_data", "=", "file_h5", ".", "root", ".", "raw_data", ".", "read", "(", "index_start", ",", "index_stop", ")", "total_words", "+=", "print_raw_data", "(", "raw_data", "=", "raw_data", ",", "start_index", "=", "max", "(", "start_index", "-", "index_start", ",", "0", ")", ",", "limit", "=", "limit", "-", "total_words", ",", "flavor", "=", "flavor", ",", "index_offset", "=", "index_start", ",", "select", "=", "select", ",", "tdc_trig_dist", "=", "tdc_trig_dist", ",", "trigger_data_mode", "=", "trigger_data_mode", ")", "if", "limit", "and", "total_words", ">=", "limit", ":", "break" ]
Printing FEI4 data from raw data file for debugging.
[ "Printing", "FEI4", "data", "from", "raw", "data", "file", "for", "debugging", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1615-L1632
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
print_raw_data
def print_raw_data(raw_data, start_index=0, limit=200, flavor='fei4b', index_offset=0, select=None, tdc_trig_dist=False, trigger_data_mode=0): """Printing FEI4 raw data array for debugging. """ if not select: select = ['DH', 'TW', "AR", "VR", "SR", "DR", 'TDC', 'UNKNOWN FE WORD', 'UNKNOWN WORD'] total_words = 0 for index in range(start_index, raw_data.shape[0]): dw = FEI4Record(raw_data[index], chip_flavor=flavor, tdc_trig_dist=tdc_trig_dist, trigger_data_mode=trigger_data_mode) if dw in select: print index + index_offset, '{0:12d} {1:08b} {2:08b} {3:08b} {4:08b}'.format(raw_data[index], (raw_data[index] & 0xFF000000) >> 24, (raw_data[index] & 0x00FF0000) >> 16, (raw_data[index] & 0x0000FF00) >> 8, (raw_data[index] & 0x000000FF) >> 0), dw total_words += 1 if limit and total_words >= limit: break return total_words
python
def print_raw_data(raw_data, start_index=0, limit=200, flavor='fei4b', index_offset=0, select=None, tdc_trig_dist=False, trigger_data_mode=0): """Printing FEI4 raw data array for debugging. """ if not select: select = ['DH', 'TW', "AR", "VR", "SR", "DR", 'TDC', 'UNKNOWN FE WORD', 'UNKNOWN WORD'] total_words = 0 for index in range(start_index, raw_data.shape[0]): dw = FEI4Record(raw_data[index], chip_flavor=flavor, tdc_trig_dist=tdc_trig_dist, trigger_data_mode=trigger_data_mode) if dw in select: print index + index_offset, '{0:12d} {1:08b} {2:08b} {3:08b} {4:08b}'.format(raw_data[index], (raw_data[index] & 0xFF000000) >> 24, (raw_data[index] & 0x00FF0000) >> 16, (raw_data[index] & 0x0000FF00) >> 8, (raw_data[index] & 0x000000FF) >> 0), dw total_words += 1 if limit and total_words >= limit: break return total_words
[ "def", "print_raw_data", "(", "raw_data", ",", "start_index", "=", "0", ",", "limit", "=", "200", ",", "flavor", "=", "'fei4b'", ",", "index_offset", "=", "0", ",", "select", "=", "None", ",", "tdc_trig_dist", "=", "False", ",", "trigger_data_mode", "=", "0", ")", ":", "if", "not", "select", ":", "select", "=", "[", "'DH'", ",", "'TW'", ",", "\"AR\"", ",", "\"VR\"", ",", "\"SR\"", ",", "\"DR\"", ",", "'TDC'", ",", "'UNKNOWN FE WORD'", ",", "'UNKNOWN WORD'", "]", "total_words", "=", "0", "for", "index", "in", "range", "(", "start_index", ",", "raw_data", ".", "shape", "[", "0", "]", ")", ":", "dw", "=", "FEI4Record", "(", "raw_data", "[", "index", "]", ",", "chip_flavor", "=", "flavor", ",", "tdc_trig_dist", "=", "tdc_trig_dist", ",", "trigger_data_mode", "=", "trigger_data_mode", ")", "if", "dw", "in", "select", ":", "print", "index", "+", "index_offset", ",", "'{0:12d} {1:08b} {2:08b} {3:08b} {4:08b}'", ".", "format", "(", "raw_data", "[", "index", "]", ",", "(", "raw_data", "[", "index", "]", "&", "0xFF000000", ")", ">>", "24", ",", "(", "raw_data", "[", "index", "]", "&", "0x00FF0000", ")", ">>", "16", ",", "(", "raw_data", "[", "index", "]", "&", "0x0000FF00", ")", ">>", "8", ",", "(", "raw_data", "[", "index", "]", "&", "0x000000FF", ")", ">>", "0", ")", ",", "dw", "total_words", "+=", "1", "if", "limit", "and", "total_words", ">=", "limit", ":", "break", "return", "total_words" ]
Printing FEI4 raw data array for debugging.
[ "Printing", "FEI4", "raw", "data", "array", "for", "debugging", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1635-L1648
SiLab-Bonn/pyBAR
pybar/analysis/analysis_utils.py
ETA.update
def update(self, pbar): 'Updates the widget to show the ETA or total time when finished.' self.n_refresh += 1 if pbar.currval == 0: return 'ETA: --:--:--' elif pbar.finished: return 'Time: %s' % self.format_time(pbar.seconds_elapsed) else: elapsed = pbar.seconds_elapsed try: speed = pbar.currval / elapsed if self.speed_smooth is not None: self.speed_smooth = (self.speed_smooth * (1 - self.SMOOTHING)) + (speed * self.SMOOTHING) else: self.speed_smooth = speed eta = float(pbar.maxval) / self.speed_smooth - elapsed + 1 if float(pbar.maxval) / self.speed_smooth - elapsed + 1 > 0 else 0 if float(pbar.currval) / pbar.maxval > 0.30 or self.n_refresh > 10: # ETA only rather precise if > 30% is already finished or more than 10 times updated return 'ETA: %s' % self.format_time(eta) if self.old_eta is not None and self.old_eta < eta: # do not show jumping ETA if non precise mode is active return 'ETA: ~%s' % self.format_time(self.old_eta) else: self.old_eta = eta return 'ETA: ~%s' % self.format_time(eta) except ZeroDivisionError: speed = 0
python
def update(self, pbar): 'Updates the widget to show the ETA or total time when finished.' self.n_refresh += 1 if pbar.currval == 0: return 'ETA: --:--:--' elif pbar.finished: return 'Time: %s' % self.format_time(pbar.seconds_elapsed) else: elapsed = pbar.seconds_elapsed try: speed = pbar.currval / elapsed if self.speed_smooth is not None: self.speed_smooth = (self.speed_smooth * (1 - self.SMOOTHING)) + (speed * self.SMOOTHING) else: self.speed_smooth = speed eta = float(pbar.maxval) / self.speed_smooth - elapsed + 1 if float(pbar.maxval) / self.speed_smooth - elapsed + 1 > 0 else 0 if float(pbar.currval) / pbar.maxval > 0.30 or self.n_refresh > 10: # ETA only rather precise if > 30% is already finished or more than 10 times updated return 'ETA: %s' % self.format_time(eta) if self.old_eta is not None and self.old_eta < eta: # do not show jumping ETA if non precise mode is active return 'ETA: ~%s' % self.format_time(self.old_eta) else: self.old_eta = eta return 'ETA: ~%s' % self.format_time(eta) except ZeroDivisionError: speed = 0
[ "def", "update", "(", "self", ",", "pbar", ")", ":", "self", ".", "n_refresh", "+=", "1", "if", "pbar", ".", "currval", "==", "0", ":", "return", "'ETA: --:--:--'", "elif", "pbar", ".", "finished", ":", "return", "'Time: %s'", "%", "self", ".", "format_time", "(", "pbar", ".", "seconds_elapsed", ")", "else", ":", "elapsed", "=", "pbar", ".", "seconds_elapsed", "try", ":", "speed", "=", "pbar", ".", "currval", "/", "elapsed", "if", "self", ".", "speed_smooth", "is", "not", "None", ":", "self", ".", "speed_smooth", "=", "(", "self", ".", "speed_smooth", "*", "(", "1", "-", "self", ".", "SMOOTHING", ")", ")", "+", "(", "speed", "*", "self", ".", "SMOOTHING", ")", "else", ":", "self", ".", "speed_smooth", "=", "speed", "eta", "=", "float", "(", "pbar", ".", "maxval", ")", "/", "self", ".", "speed_smooth", "-", "elapsed", "+", "1", "if", "float", "(", "pbar", ".", "maxval", ")", "/", "self", ".", "speed_smooth", "-", "elapsed", "+", "1", ">", "0", "else", "0", "if", "float", "(", "pbar", ".", "currval", ")", "/", "pbar", ".", "maxval", ">", "0.30", "or", "self", ".", "n_refresh", ">", "10", ":", "# ETA only rather precise if > 30% is already finished or more than 10 times updated", "return", "'ETA: %s'", "%", "self", ".", "format_time", "(", "eta", ")", "if", "self", ".", "old_eta", "is", "not", "None", "and", "self", ".", "old_eta", "<", "eta", ":", "# do not show jumping ETA if non precise mode is active", "return", "'ETA: ~%s'", "%", "self", ".", "format_time", "(", "self", ".", "old_eta", ")", "else", ":", "self", ".", "old_eta", "=", "eta", "return", "'ETA: ~%s'", "%", "self", ".", "format_time", "(", "eta", ")", "except", "ZeroDivisionError", ":", "speed", "=", "0" ]
Updates the widget to show the ETA or total time when finished.
[ "Updates", "the", "widget", "to", "show", "the", "ETA", "or", "total", "time", "when", "finished", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analysis_utils.py#L1442-L1467
SiLab-Bonn/pyBAR
pybar/scans/analyze_source_scan_gdac_data.py
plot_result
def plot_result(x_p, y_p, y_p_e, smoothed_data, smoothed_data_diff, filename=None): ''' Fit spline to the profile histogramed data, differentiate, determine MPV and plot. Parameters ---------- x_p, y_p : array like data points (x,y) y_p_e : array like error bars in y ''' logging.info('Plot results') plt.close() p1 = plt.errorbar(x_p * analysis_configuration['vcal_calibration'], y_p, yerr=y_p_e, fmt='o') # plot data with error bars p2, = plt.plot(x_p * analysis_configuration['vcal_calibration'], smoothed_data, '-r') # plot smoothed data factor = np.amax(y_p) / np.amin(smoothed_data_diff) * 1.1 p3, = plt.plot(x_p * analysis_configuration['vcal_calibration'], factor * smoothed_data_diff, '-', lw=2) # plot differentiated data mpv_index = np.argmax(-analysis_utils.smooth_differentiation(x_p, y_p, weigths=1 / y_p_e, order=3, smoothness=analysis_configuration['smoothness'], derivation=1)) p4, = plt.plot([x_p[mpv_index] * analysis_configuration['vcal_calibration'], x_p[mpv_index] * analysis_configuration['vcal_calibration']], [0, factor * smoothed_data_diff[mpv_index]], 'k-', lw=2) text = 'MPV ' + str(int(x_p[mpv_index] * analysis_configuration['vcal_calibration'])) + ' e' plt.text(1.01 * x_p[mpv_index] * analysis_configuration['vcal_calibration'], -10. * smoothed_data_diff[mpv_index], text, ha='left') plt.legend([p1, p2, p3, p4], ['data', 'smoothed spline', 'spline differentiation', text], prop={'size': 12}, loc=0) plt.title('\'Single hit cluster\'-occupancy for different pixel thresholds') plt.xlabel('Pixel threshold [e]') plt.ylabel('Single hit cluster occupancy [a.u.]') plt.ylim(0, np.amax(y_p) * 1.15) if filename is None: plt.show() else: filename.savefig(plt.gcf()) return smoothed_data_diff
python
def plot_result(x_p, y_p, y_p_e, smoothed_data, smoothed_data_diff, filename=None): ''' Fit spline to the profile histogramed data, differentiate, determine MPV and plot. Parameters ---------- x_p, y_p : array like data points (x,y) y_p_e : array like error bars in y ''' logging.info('Plot results') plt.close() p1 = plt.errorbar(x_p * analysis_configuration['vcal_calibration'], y_p, yerr=y_p_e, fmt='o') # plot data with error bars p2, = plt.plot(x_p * analysis_configuration['vcal_calibration'], smoothed_data, '-r') # plot smoothed data factor = np.amax(y_p) / np.amin(smoothed_data_diff) * 1.1 p3, = plt.plot(x_p * analysis_configuration['vcal_calibration'], factor * smoothed_data_diff, '-', lw=2) # plot differentiated data mpv_index = np.argmax(-analysis_utils.smooth_differentiation(x_p, y_p, weigths=1 / y_p_e, order=3, smoothness=analysis_configuration['smoothness'], derivation=1)) p4, = plt.plot([x_p[mpv_index] * analysis_configuration['vcal_calibration'], x_p[mpv_index] * analysis_configuration['vcal_calibration']], [0, factor * smoothed_data_diff[mpv_index]], 'k-', lw=2) text = 'MPV ' + str(int(x_p[mpv_index] * analysis_configuration['vcal_calibration'])) + ' e' plt.text(1.01 * x_p[mpv_index] * analysis_configuration['vcal_calibration'], -10. * smoothed_data_diff[mpv_index], text, ha='left') plt.legend([p1, p2, p3, p4], ['data', 'smoothed spline', 'spline differentiation', text], prop={'size': 12}, loc=0) plt.title('\'Single hit cluster\'-occupancy for different pixel thresholds') plt.xlabel('Pixel threshold [e]') plt.ylabel('Single hit cluster occupancy [a.u.]') plt.ylim(0, np.amax(y_p) * 1.15) if filename is None: plt.show() else: filename.savefig(plt.gcf()) return smoothed_data_diff
[ "def", "plot_result", "(", "x_p", ",", "y_p", ",", "y_p_e", ",", "smoothed_data", ",", "smoothed_data_diff", ",", "filename", "=", "None", ")", ":", "logging", ".", "info", "(", "'Plot results'", ")", "plt", ".", "close", "(", ")", "p1", "=", "plt", ".", "errorbar", "(", "x_p", "*", "analysis_configuration", "[", "'vcal_calibration'", "]", ",", "y_p", ",", "yerr", "=", "y_p_e", ",", "fmt", "=", "'o'", ")", "# plot data with error bars", "p2", ",", "=", "plt", ".", "plot", "(", "x_p", "*", "analysis_configuration", "[", "'vcal_calibration'", "]", ",", "smoothed_data", ",", "'-r'", ")", "# plot smoothed data", "factor", "=", "np", ".", "amax", "(", "y_p", ")", "/", "np", ".", "amin", "(", "smoothed_data_diff", ")", "*", "1.1", "p3", ",", "=", "plt", ".", "plot", "(", "x_p", "*", "analysis_configuration", "[", "'vcal_calibration'", "]", ",", "factor", "*", "smoothed_data_diff", ",", "'-'", ",", "lw", "=", "2", ")", "# plot differentiated data", "mpv_index", "=", "np", ".", "argmax", "(", "-", "analysis_utils", ".", "smooth_differentiation", "(", "x_p", ",", "y_p", ",", "weigths", "=", "1", "/", "y_p_e", ",", "order", "=", "3", ",", "smoothness", "=", "analysis_configuration", "[", "'smoothness'", "]", ",", "derivation", "=", "1", ")", ")", "p4", ",", "=", "plt", ".", "plot", "(", "[", "x_p", "[", "mpv_index", "]", "*", "analysis_configuration", "[", "'vcal_calibration'", "]", ",", "x_p", "[", "mpv_index", "]", "*", "analysis_configuration", "[", "'vcal_calibration'", "]", "]", ",", "[", "0", ",", "factor", "*", "smoothed_data_diff", "[", "mpv_index", "]", "]", ",", "'k-'", ",", "lw", "=", "2", ")", "text", "=", "'MPV '", "+", "str", "(", "int", "(", "x_p", "[", "mpv_index", "]", "*", "analysis_configuration", "[", "'vcal_calibration'", "]", ")", ")", "+", "' e'", "plt", ".", "text", "(", "1.01", "*", "x_p", "[", "mpv_index", "]", "*", "analysis_configuration", "[", "'vcal_calibration'", "]", ",", "-", "10.", "*", "smoothed_data_diff", "[", "mpv_index", "]", ",", "text", ",", "ha", "=", "'left'", ")", "plt", ".", "legend", "(", "[", "p1", ",", "p2", ",", "p3", ",", "p4", "]", ",", "[", "'data'", ",", "'smoothed spline'", ",", "'spline differentiation'", ",", "text", "]", ",", "prop", "=", "{", "'size'", ":", "12", "}", ",", "loc", "=", "0", ")", "plt", ".", "title", "(", "'\\'Single hit cluster\\'-occupancy for different pixel thresholds'", ")", "plt", ".", "xlabel", "(", "'Pixel threshold [e]'", ")", "plt", ".", "ylabel", "(", "'Single hit cluster occupancy [a.u.]'", ")", "plt", ".", "ylim", "(", "0", ",", "np", ".", "amax", "(", "y_p", ")", "*", "1.15", ")", "if", "filename", "is", "None", ":", "plt", ".", "show", "(", ")", "else", ":", "filename", ".", "savefig", "(", "plt", ".", "gcf", "(", ")", ")", "return", "smoothed_data_diff" ]
Fit spline to the profile histogramed data, differentiate, determine MPV and plot. Parameters ---------- x_p, y_p : array like data points (x,y) y_p_e : array like error bars in y
[ "Fit", "spline", "to", "the", "profile", "histogramed", "data", "differentiate", "determine", "MPV", "and", "plot", ".", "Parameters", "----------", "x_p", "y_p", ":", "array", "like", "data", "points", "(", "x", "y", ")", "y_p_e", ":", "array", "like", "error", "bars", "in", "y" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/scans/analyze_source_scan_gdac_data.py#L77-L106
SiLab-Bonn/pyBAR
pybar/scans/calibrate_hit_or.py
create_hitor_calibration
def create_hitor_calibration(output_filename, plot_pixel_calibrations=False): '''Generating HitOr calibration file (_calibration.h5) from raw data file and plotting of calibration data. Parameters ---------- output_filename : string Input raw data file name. plot_pixel_calibrations : bool, iterable If True, genearating additional pixel calibration plots. If list of column and row tuples (from 1 to 80 / 336), print selected pixels. Returns ------- nothing ''' logging.info('Analyze HitOR calibration data and plot results of %s', output_filename) with AnalyzeRawData(raw_data_file=output_filename, create_pdf=True) as analyze_raw_data: # Interpret the raw data file analyze_raw_data.create_occupancy_hist = False # too many scan parameters to do in ram histogramming analyze_raw_data.create_hit_table = True analyze_raw_data.create_tdc_hist = True analyze_raw_data.align_at_tdc = True # align events at TDC words, first word of event has to be a tdc word analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() n_injections = analyze_raw_data.n_injections # use later meta_data = analyze_raw_data.out_file_h5.root.meta_data[:] scan_parameters_dict = get_scan_parameter(meta_data) inner_loop_parameter_values = scan_parameters_dict[next(reversed(scan_parameters_dict))] # inner loop parameter name is unknown scan_parameter_names = scan_parameters_dict.keys() # col_row_combinations = get_unique_scan_parameter_combinations(analyze_raw_data.out_file_h5.root.meta_data[:], scan_parameters=('column', 'row'), scan_parameter_columns_only=True) meta_data_table_at_scan_parameter = get_unique_scan_parameter_combinations(meta_data, scan_parameters=scan_parameter_names) scan_parameter_values = get_scan_parameters_table_from_meta_data(meta_data_table_at_scan_parameter, scan_parameter_names) event_number_ranges = get_ranges_from_array(meta_data_table_at_scan_parameter['event_number']) event_ranges_per_parameter = np.column_stack((scan_parameter_values, event_number_ranges)) if analyze_raw_data.out_file_h5.root.Hits.nrows == 0: raise AnalysisError("Found no hits.") hits = analyze_raw_data.out_file_h5.root.Hits[:] event_numbers = hits['event_number'].copy() # create contigous array, otherwise np.searchsorted too slow, http://stackoverflow.com/questions/15139299/performance-of-numpy-searchsorted-is-poor-on-structured-arrays output_filename = os.path.splitext(output_filename)[0] with tb.open_file(output_filename + "_calibration.h5", mode="w") as calibration_data_file: logging.info('Create calibration') calibration_data = np.full(shape=(80, 336, len(inner_loop_parameter_values), 4), fill_value=np.nan, dtype='f4') # result of the calibration is a histogram with col_index, row_index, plsrDAC value, mean discrete tot, rms discrete tot, mean tot from TDC, rms tot from TDC progress_bar = progressbar.ProgressBar(widgets=['', progressbar.Percentage(), ' ', progressbar.Bar(marker='*', left='|', right='|'), ' ', progressbar.AdaptiveETA()], maxval=len(event_ranges_per_parameter), term_width=80) progress_bar.start() for index, (actual_scan_parameter_values, event_start, event_stop) in enumerate(event_ranges_per_parameter): if event_stop is None: # happens for the last chunk event_stop = hits[-1]['event_number'] + 1 array_index = np.searchsorted(event_numbers, np.array([event_start, event_stop])) actual_hits = hits[array_index[0]:array_index[1]] for item_index, item in enumerate(scan_parameter_names): if item == "column": actual_col = actual_scan_parameter_values[item_index] elif item == "row": actual_row = actual_scan_parameter_values[item_index] elif item == "PlsrDAC": plser_dac = actual_scan_parameter_values[item_index] else: raise ValueError("Unknown scan parameter %s" % item) # Only pixel of actual column/row should be in the actual data chunk but since FIFO is not cleared for each scan step due to speed reasons and there might be noisy pixels this is not always the case n_wrong_pixel = np.count_nonzero(np.logical_or(actual_hits['column'] != actual_col, actual_hits['row'] != actual_row)) if n_wrong_pixel != 0: logging.warning('%d hit(s) from other pixels for scan parameters %s', n_wrong_pixel, ', '.join(['%s=%s' % (name, value) for (name, value) in zip(scan_parameter_names, actual_scan_parameter_values)])) actual_hits = actual_hits[np.logical_and(actual_hits['column'] == actual_col, actual_hits['row'] == actual_row)] # Only take data from selected pixel actual_tdc_hits = actual_hits[(actual_hits['event_status'] & 0b0000111110011100) == 0b0000000100000000] # only take hits from good events (one TDC word only, no error) actual_tot_hits = actual_hits[(actual_hits['event_status'] & 0b0000100010011100) == 0b0000000000000000] # only take hits from good events for tot tot, tdc = actual_tot_hits['tot'], actual_tdc_hits['TDC'] if tdc.shape[0] < n_injections: logging.info('%d of %d expected TDC hits for scan parameters %s', tdc.shape[0], n_injections, ', '.join(['%s=%s' % (name, value) for (name, value) in zip(scan_parameter_names, actual_scan_parameter_values)])) if tot.shape[0] < n_injections: logging.info('%d of %d expected hits for scan parameters %s', tot.shape[0], n_injections, ', '.join(['%s=%s' % (name, value) for (name, value) in zip(scan_parameter_names, actual_scan_parameter_values)])) inner_loop_scan_parameter_index = np.where(plser_dac == inner_loop_parameter_values)[0][0] # translate the scan parameter value to an index for the result histogram # numpy mean and std return nan if array is empty calibration_data[actual_col - 1, actual_row - 1, inner_loop_scan_parameter_index, 0] = np.mean(tot) calibration_data[actual_col - 1, actual_row - 1, inner_loop_scan_parameter_index, 1] = np.mean(tdc) calibration_data[actual_col - 1, actual_row - 1, inner_loop_scan_parameter_index, 2] = np.std(tot) calibration_data[actual_col - 1, actual_row - 1, inner_loop_scan_parameter_index, 3] = np.std(tdc) progress_bar.update(index) progress_bar.finish() calibration_data_out = calibration_data_file.create_carray(calibration_data_file.root, name='HitOrCalibration', title='Hit OR calibration data', atom=tb.Atom.from_dtype(calibration_data.dtype), shape=calibration_data.shape, filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False)) calibration_data_out[:] = calibration_data calibration_data_out.attrs.dimensions = scan_parameter_names calibration_data_out.attrs.scan_parameter_values = inner_loop_parameter_values calibration_data_out.flush() # with PdfPages(output_filename + "_calibration.pdf") as output_pdf: plot_scurves(calibration_data[:, :, :, 0], inner_loop_parameter_values, "ToT calibration", "ToT", 15, "Charge [PlsrDAC]", filename=analyze_raw_data.output_pdf) plot_scurves(calibration_data[:, :, :, 1], inner_loop_parameter_values, "TDC calibration", "TDC [ns]", None, "Charge [PlsrDAC]", filename=analyze_raw_data.output_pdf) tot_mean_all_pix = np.nanmean(calibration_data[:, :, :, 0], axis=(0, 1)) tot_error_all_pix = np.nanstd(calibration_data[:, :, :, 0], axis=(0, 1)) tdc_mean_all_pix = np.nanmean(calibration_data[:, :, :, 1], axis=(0, 1)) tdc_error_all_pix = np.nanstd(calibration_data[:, :, :, 1], axis=(0, 1)) plot_tot_tdc_calibration(scan_parameters=inner_loop_parameter_values, tot_mean=tot_mean_all_pix, tot_error=tot_error_all_pix, tdc_mean=tdc_mean_all_pix, tdc_error=tdc_error_all_pix, filename=analyze_raw_data.output_pdf, title="Mean charge calibration of %d pixel(s)" % np.count_nonzero(~np.all(np.isnan(calibration_data[:, :, :, 0]), axis=2))) # plotting individual pixels if plot_pixel_calibrations is True: # selecting pixels with non-nan entries col_row_non_nan = np.nonzero(~np.all(np.isnan(calibration_data[:, :, :, 0]), axis=2)) plot_pixel_calibrations = np.dstack(col_row_non_nan)[0] elif plot_pixel_calibrations is False: plot_pixel_calibrations = np.array([], dtype=np.int) else: # assuming list of column / row tuples plot_pixel_calibrations = np.array(plot_pixel_calibrations) - 1 # generate index array pixel_indices = np.arange(plot_pixel_calibrations.shape[0]) plot_n_pixels = 10 # number of pixels at the beginning, center and end of the array np.random.seed(0) # select random pixels if pixel_indices.size - 2 * plot_n_pixels >= 0: random_pixel_indices = np.sort(np.random.choice(pixel_indices[plot_n_pixels:-plot_n_pixels], min(plot_n_pixels, pixel_indices.size - 2 * plot_n_pixels), replace=False)) else: random_pixel_indices = np.array([], dtype=np.int) selected_pixel_indices = np.unique(np.hstack([pixel_indices[:plot_n_pixels], random_pixel_indices, pixel_indices[-plot_n_pixels:]])) # plotting individual pixels for (column, row) in plot_pixel_calibrations[selected_pixel_indices]: logging.info("Plotting charge calibration for pixel column " + str(column + 1) + " / row " + str(row + 1)) tot_mean_single_pix = calibration_data[column, row, :, 0] tot_std_single_pix = calibration_data[column, row, :, 2] tdc_mean_single_pix = calibration_data[column, row, :, 1] tdc_std_single_pix = calibration_data[column, row, :, 3] plot_tot_tdc_calibration(scan_parameters=inner_loop_parameter_values, tot_mean=tot_mean_single_pix, tot_error=tot_std_single_pix, tdc_mean=tdc_mean_single_pix, tdc_error=tdc_std_single_pix, filename=analyze_raw_data.output_pdf, title="Charge calibration for pixel column " + str(column + 1) + " / row " + str(row + 1))
python
def create_hitor_calibration(output_filename, plot_pixel_calibrations=False): '''Generating HitOr calibration file (_calibration.h5) from raw data file and plotting of calibration data. Parameters ---------- output_filename : string Input raw data file name. plot_pixel_calibrations : bool, iterable If True, genearating additional pixel calibration plots. If list of column and row tuples (from 1 to 80 / 336), print selected pixels. Returns ------- nothing ''' logging.info('Analyze HitOR calibration data and plot results of %s', output_filename) with AnalyzeRawData(raw_data_file=output_filename, create_pdf=True) as analyze_raw_data: # Interpret the raw data file analyze_raw_data.create_occupancy_hist = False # too many scan parameters to do in ram histogramming analyze_raw_data.create_hit_table = True analyze_raw_data.create_tdc_hist = True analyze_raw_data.align_at_tdc = True # align events at TDC words, first word of event has to be a tdc word analyze_raw_data.interpret_word_table() analyze_raw_data.interpreter.print_summary() analyze_raw_data.plot_histograms() n_injections = analyze_raw_data.n_injections # use later meta_data = analyze_raw_data.out_file_h5.root.meta_data[:] scan_parameters_dict = get_scan_parameter(meta_data) inner_loop_parameter_values = scan_parameters_dict[next(reversed(scan_parameters_dict))] # inner loop parameter name is unknown scan_parameter_names = scan_parameters_dict.keys() # col_row_combinations = get_unique_scan_parameter_combinations(analyze_raw_data.out_file_h5.root.meta_data[:], scan_parameters=('column', 'row'), scan_parameter_columns_only=True) meta_data_table_at_scan_parameter = get_unique_scan_parameter_combinations(meta_data, scan_parameters=scan_parameter_names) scan_parameter_values = get_scan_parameters_table_from_meta_data(meta_data_table_at_scan_parameter, scan_parameter_names) event_number_ranges = get_ranges_from_array(meta_data_table_at_scan_parameter['event_number']) event_ranges_per_parameter = np.column_stack((scan_parameter_values, event_number_ranges)) if analyze_raw_data.out_file_h5.root.Hits.nrows == 0: raise AnalysisError("Found no hits.") hits = analyze_raw_data.out_file_h5.root.Hits[:] event_numbers = hits['event_number'].copy() # create contigous array, otherwise np.searchsorted too slow, http://stackoverflow.com/questions/15139299/performance-of-numpy-searchsorted-is-poor-on-structured-arrays output_filename = os.path.splitext(output_filename)[0] with tb.open_file(output_filename + "_calibration.h5", mode="w") as calibration_data_file: logging.info('Create calibration') calibration_data = np.full(shape=(80, 336, len(inner_loop_parameter_values), 4), fill_value=np.nan, dtype='f4') # result of the calibration is a histogram with col_index, row_index, plsrDAC value, mean discrete tot, rms discrete tot, mean tot from TDC, rms tot from TDC progress_bar = progressbar.ProgressBar(widgets=['', progressbar.Percentage(), ' ', progressbar.Bar(marker='*', left='|', right='|'), ' ', progressbar.AdaptiveETA()], maxval=len(event_ranges_per_parameter), term_width=80) progress_bar.start() for index, (actual_scan_parameter_values, event_start, event_stop) in enumerate(event_ranges_per_parameter): if event_stop is None: # happens for the last chunk event_stop = hits[-1]['event_number'] + 1 array_index = np.searchsorted(event_numbers, np.array([event_start, event_stop])) actual_hits = hits[array_index[0]:array_index[1]] for item_index, item in enumerate(scan_parameter_names): if item == "column": actual_col = actual_scan_parameter_values[item_index] elif item == "row": actual_row = actual_scan_parameter_values[item_index] elif item == "PlsrDAC": plser_dac = actual_scan_parameter_values[item_index] else: raise ValueError("Unknown scan parameter %s" % item) # Only pixel of actual column/row should be in the actual data chunk but since FIFO is not cleared for each scan step due to speed reasons and there might be noisy pixels this is not always the case n_wrong_pixel = np.count_nonzero(np.logical_or(actual_hits['column'] != actual_col, actual_hits['row'] != actual_row)) if n_wrong_pixel != 0: logging.warning('%d hit(s) from other pixels for scan parameters %s', n_wrong_pixel, ', '.join(['%s=%s' % (name, value) for (name, value) in zip(scan_parameter_names, actual_scan_parameter_values)])) actual_hits = actual_hits[np.logical_and(actual_hits['column'] == actual_col, actual_hits['row'] == actual_row)] # Only take data from selected pixel actual_tdc_hits = actual_hits[(actual_hits['event_status'] & 0b0000111110011100) == 0b0000000100000000] # only take hits from good events (one TDC word only, no error) actual_tot_hits = actual_hits[(actual_hits['event_status'] & 0b0000100010011100) == 0b0000000000000000] # only take hits from good events for tot tot, tdc = actual_tot_hits['tot'], actual_tdc_hits['TDC'] if tdc.shape[0] < n_injections: logging.info('%d of %d expected TDC hits for scan parameters %s', tdc.shape[0], n_injections, ', '.join(['%s=%s' % (name, value) for (name, value) in zip(scan_parameter_names, actual_scan_parameter_values)])) if tot.shape[0] < n_injections: logging.info('%d of %d expected hits for scan parameters %s', tot.shape[0], n_injections, ', '.join(['%s=%s' % (name, value) for (name, value) in zip(scan_parameter_names, actual_scan_parameter_values)])) inner_loop_scan_parameter_index = np.where(plser_dac == inner_loop_parameter_values)[0][0] # translate the scan parameter value to an index for the result histogram # numpy mean and std return nan if array is empty calibration_data[actual_col - 1, actual_row - 1, inner_loop_scan_parameter_index, 0] = np.mean(tot) calibration_data[actual_col - 1, actual_row - 1, inner_loop_scan_parameter_index, 1] = np.mean(tdc) calibration_data[actual_col - 1, actual_row - 1, inner_loop_scan_parameter_index, 2] = np.std(tot) calibration_data[actual_col - 1, actual_row - 1, inner_loop_scan_parameter_index, 3] = np.std(tdc) progress_bar.update(index) progress_bar.finish() calibration_data_out = calibration_data_file.create_carray(calibration_data_file.root, name='HitOrCalibration', title='Hit OR calibration data', atom=tb.Atom.from_dtype(calibration_data.dtype), shape=calibration_data.shape, filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False)) calibration_data_out[:] = calibration_data calibration_data_out.attrs.dimensions = scan_parameter_names calibration_data_out.attrs.scan_parameter_values = inner_loop_parameter_values calibration_data_out.flush() # with PdfPages(output_filename + "_calibration.pdf") as output_pdf: plot_scurves(calibration_data[:, :, :, 0], inner_loop_parameter_values, "ToT calibration", "ToT", 15, "Charge [PlsrDAC]", filename=analyze_raw_data.output_pdf) plot_scurves(calibration_data[:, :, :, 1], inner_loop_parameter_values, "TDC calibration", "TDC [ns]", None, "Charge [PlsrDAC]", filename=analyze_raw_data.output_pdf) tot_mean_all_pix = np.nanmean(calibration_data[:, :, :, 0], axis=(0, 1)) tot_error_all_pix = np.nanstd(calibration_data[:, :, :, 0], axis=(0, 1)) tdc_mean_all_pix = np.nanmean(calibration_data[:, :, :, 1], axis=(0, 1)) tdc_error_all_pix = np.nanstd(calibration_data[:, :, :, 1], axis=(0, 1)) plot_tot_tdc_calibration(scan_parameters=inner_loop_parameter_values, tot_mean=tot_mean_all_pix, tot_error=tot_error_all_pix, tdc_mean=tdc_mean_all_pix, tdc_error=tdc_error_all_pix, filename=analyze_raw_data.output_pdf, title="Mean charge calibration of %d pixel(s)" % np.count_nonzero(~np.all(np.isnan(calibration_data[:, :, :, 0]), axis=2))) # plotting individual pixels if plot_pixel_calibrations is True: # selecting pixels with non-nan entries col_row_non_nan = np.nonzero(~np.all(np.isnan(calibration_data[:, :, :, 0]), axis=2)) plot_pixel_calibrations = np.dstack(col_row_non_nan)[0] elif plot_pixel_calibrations is False: plot_pixel_calibrations = np.array([], dtype=np.int) else: # assuming list of column / row tuples plot_pixel_calibrations = np.array(plot_pixel_calibrations) - 1 # generate index array pixel_indices = np.arange(plot_pixel_calibrations.shape[0]) plot_n_pixels = 10 # number of pixels at the beginning, center and end of the array np.random.seed(0) # select random pixels if pixel_indices.size - 2 * plot_n_pixels >= 0: random_pixel_indices = np.sort(np.random.choice(pixel_indices[plot_n_pixels:-plot_n_pixels], min(plot_n_pixels, pixel_indices.size - 2 * plot_n_pixels), replace=False)) else: random_pixel_indices = np.array([], dtype=np.int) selected_pixel_indices = np.unique(np.hstack([pixel_indices[:plot_n_pixels], random_pixel_indices, pixel_indices[-plot_n_pixels:]])) # plotting individual pixels for (column, row) in plot_pixel_calibrations[selected_pixel_indices]: logging.info("Plotting charge calibration for pixel column " + str(column + 1) + " / row " + str(row + 1)) tot_mean_single_pix = calibration_data[column, row, :, 0] tot_std_single_pix = calibration_data[column, row, :, 2] tdc_mean_single_pix = calibration_data[column, row, :, 1] tdc_std_single_pix = calibration_data[column, row, :, 3] plot_tot_tdc_calibration(scan_parameters=inner_loop_parameter_values, tot_mean=tot_mean_single_pix, tot_error=tot_std_single_pix, tdc_mean=tdc_mean_single_pix, tdc_error=tdc_std_single_pix, filename=analyze_raw_data.output_pdf, title="Charge calibration for pixel column " + str(column + 1) + " / row " + str(row + 1))
[ "def", "create_hitor_calibration", "(", "output_filename", ",", "plot_pixel_calibrations", "=", "False", ")", ":", "logging", ".", "info", "(", "'Analyze HitOR calibration data and plot results of %s'", ",", "output_filename", ")", "with", "AnalyzeRawData", "(", "raw_data_file", "=", "output_filename", ",", "create_pdf", "=", "True", ")", "as", "analyze_raw_data", ":", "# Interpret the raw data file\r", "analyze_raw_data", ".", "create_occupancy_hist", "=", "False", "# too many scan parameters to do in ram histogramming\r", "analyze_raw_data", ".", "create_hit_table", "=", "True", "analyze_raw_data", ".", "create_tdc_hist", "=", "True", "analyze_raw_data", ".", "align_at_tdc", "=", "True", "# align events at TDC words, first word of event has to be a tdc word\r", "analyze_raw_data", ".", "interpret_word_table", "(", ")", "analyze_raw_data", ".", "interpreter", ".", "print_summary", "(", ")", "analyze_raw_data", ".", "plot_histograms", "(", ")", "n_injections", "=", "analyze_raw_data", ".", "n_injections", "# use later\r", "meta_data", "=", "analyze_raw_data", ".", "out_file_h5", ".", "root", ".", "meta_data", "[", ":", "]", "scan_parameters_dict", "=", "get_scan_parameter", "(", "meta_data", ")", "inner_loop_parameter_values", "=", "scan_parameters_dict", "[", "next", "(", "reversed", "(", "scan_parameters_dict", ")", ")", "]", "# inner loop parameter name is unknown\r", "scan_parameter_names", "=", "scan_parameters_dict", ".", "keys", "(", ")", "# col_row_combinations = get_unique_scan_parameter_combinations(analyze_raw_data.out_file_h5.root.meta_data[:], scan_parameters=('column', 'row'), scan_parameter_columns_only=True)\r", "meta_data_table_at_scan_parameter", "=", "get_unique_scan_parameter_combinations", "(", "meta_data", ",", "scan_parameters", "=", "scan_parameter_names", ")", "scan_parameter_values", "=", "get_scan_parameters_table_from_meta_data", "(", "meta_data_table_at_scan_parameter", ",", "scan_parameter_names", ")", "event_number_ranges", "=", "get_ranges_from_array", "(", "meta_data_table_at_scan_parameter", "[", "'event_number'", "]", ")", "event_ranges_per_parameter", "=", "np", ".", "column_stack", "(", "(", "scan_parameter_values", ",", "event_number_ranges", ")", ")", "if", "analyze_raw_data", ".", "out_file_h5", ".", "root", ".", "Hits", ".", "nrows", "==", "0", ":", "raise", "AnalysisError", "(", "\"Found no hits.\"", ")", "hits", "=", "analyze_raw_data", ".", "out_file_h5", ".", "root", ".", "Hits", "[", ":", "]", "event_numbers", "=", "hits", "[", "'event_number'", "]", ".", "copy", "(", ")", "# create contigous array, otherwise np.searchsorted too slow, http://stackoverflow.com/questions/15139299/performance-of-numpy-searchsorted-is-poor-on-structured-arrays\r", "output_filename", "=", "os", ".", "path", ".", "splitext", "(", "output_filename", ")", "[", "0", "]", "with", "tb", ".", "open_file", "(", "output_filename", "+", "\"_calibration.h5\"", ",", "mode", "=", "\"w\"", ")", "as", "calibration_data_file", ":", "logging", ".", "info", "(", "'Create calibration'", ")", "calibration_data", "=", "np", ".", "full", "(", "shape", "=", "(", "80", ",", "336", ",", "len", "(", "inner_loop_parameter_values", ")", ",", "4", ")", ",", "fill_value", "=", "np", ".", "nan", ",", "dtype", "=", "'f4'", ")", "# result of the calibration is a histogram with col_index, row_index, plsrDAC value, mean discrete tot, rms discrete tot, mean tot from TDC, rms tot from TDC\r", "progress_bar", "=", "progressbar", ".", "ProgressBar", "(", "widgets", "=", "[", "''", ",", "progressbar", ".", "Percentage", "(", ")", ",", "' '", ",", "progressbar", ".", "Bar", "(", "marker", "=", "'*'", ",", "left", "=", "'|'", ",", "right", "=", "'|'", ")", ",", "' '", ",", "progressbar", ".", "AdaptiveETA", "(", ")", "]", ",", "maxval", "=", "len", "(", "event_ranges_per_parameter", ")", ",", "term_width", "=", "80", ")", "progress_bar", ".", "start", "(", ")", "for", "index", ",", "(", "actual_scan_parameter_values", ",", "event_start", ",", "event_stop", ")", "in", "enumerate", "(", "event_ranges_per_parameter", ")", ":", "if", "event_stop", "is", "None", ":", "# happens for the last chunk\r", "event_stop", "=", "hits", "[", "-", "1", "]", "[", "'event_number'", "]", "+", "1", "array_index", "=", "np", ".", "searchsorted", "(", "event_numbers", ",", "np", ".", "array", "(", "[", "event_start", ",", "event_stop", "]", ")", ")", "actual_hits", "=", "hits", "[", "array_index", "[", "0", "]", ":", "array_index", "[", "1", "]", "]", "for", "item_index", ",", "item", "in", "enumerate", "(", "scan_parameter_names", ")", ":", "if", "item", "==", "\"column\"", ":", "actual_col", "=", "actual_scan_parameter_values", "[", "item_index", "]", "elif", "item", "==", "\"row\"", ":", "actual_row", "=", "actual_scan_parameter_values", "[", "item_index", "]", "elif", "item", "==", "\"PlsrDAC\"", ":", "plser_dac", "=", "actual_scan_parameter_values", "[", "item_index", "]", "else", ":", "raise", "ValueError", "(", "\"Unknown scan parameter %s\"", "%", "item", ")", "# Only pixel of actual column/row should be in the actual data chunk but since FIFO is not cleared for each scan step due to speed reasons and there might be noisy pixels this is not always the case\r", "n_wrong_pixel", "=", "np", ".", "count_nonzero", "(", "np", ".", "logical_or", "(", "actual_hits", "[", "'column'", "]", "!=", "actual_col", ",", "actual_hits", "[", "'row'", "]", "!=", "actual_row", ")", ")", "if", "n_wrong_pixel", "!=", "0", ":", "logging", ".", "warning", "(", "'%d hit(s) from other pixels for scan parameters %s'", ",", "n_wrong_pixel", ",", "', '", ".", "join", "(", "[", "'%s=%s'", "%", "(", "name", ",", "value", ")", "for", "(", "name", ",", "value", ")", "in", "zip", "(", "scan_parameter_names", ",", "actual_scan_parameter_values", ")", "]", ")", ")", "actual_hits", "=", "actual_hits", "[", "np", ".", "logical_and", "(", "actual_hits", "[", "'column'", "]", "==", "actual_col", ",", "actual_hits", "[", "'row'", "]", "==", "actual_row", ")", "]", "# Only take data from selected pixel\r", "actual_tdc_hits", "=", "actual_hits", "[", "(", "actual_hits", "[", "'event_status'", "]", "&", "0b0000111110011100", ")", "==", "0b0000000100000000", "]", "# only take hits from good events (one TDC word only, no error)\r", "actual_tot_hits", "=", "actual_hits", "[", "(", "actual_hits", "[", "'event_status'", "]", "&", "0b0000100010011100", ")", "==", "0b0000000000000000", "]", "# only take hits from good events for tot\r", "tot", ",", "tdc", "=", "actual_tot_hits", "[", "'tot'", "]", ",", "actual_tdc_hits", "[", "'TDC'", "]", "if", "tdc", ".", "shape", "[", "0", "]", "<", "n_injections", ":", "logging", ".", "info", "(", "'%d of %d expected TDC hits for scan parameters %s'", ",", "tdc", ".", "shape", "[", "0", "]", ",", "n_injections", ",", "', '", ".", "join", "(", "[", "'%s=%s'", "%", "(", "name", ",", "value", ")", "for", "(", "name", ",", "value", ")", "in", "zip", "(", "scan_parameter_names", ",", "actual_scan_parameter_values", ")", "]", ")", ")", "if", "tot", ".", "shape", "[", "0", "]", "<", "n_injections", ":", "logging", ".", "info", "(", "'%d of %d expected hits for scan parameters %s'", ",", "tot", ".", "shape", "[", "0", "]", ",", "n_injections", ",", "', '", ".", "join", "(", "[", "'%s=%s'", "%", "(", "name", ",", "value", ")", "for", "(", "name", ",", "value", ")", "in", "zip", "(", "scan_parameter_names", ",", "actual_scan_parameter_values", ")", "]", ")", ")", "inner_loop_scan_parameter_index", "=", "np", ".", "where", "(", "plser_dac", "==", "inner_loop_parameter_values", ")", "[", "0", "]", "[", "0", "]", "# translate the scan parameter value to an index for the result histogram\r", "# numpy mean and std return nan if array is empty\r", "calibration_data", "[", "actual_col", "-", "1", ",", "actual_row", "-", "1", ",", "inner_loop_scan_parameter_index", ",", "0", "]", "=", "np", ".", "mean", "(", "tot", ")", "calibration_data", "[", "actual_col", "-", "1", ",", "actual_row", "-", "1", ",", "inner_loop_scan_parameter_index", ",", "1", "]", "=", "np", ".", "mean", "(", "tdc", ")", "calibration_data", "[", "actual_col", "-", "1", ",", "actual_row", "-", "1", ",", "inner_loop_scan_parameter_index", ",", "2", "]", "=", "np", ".", "std", "(", "tot", ")", "calibration_data", "[", "actual_col", "-", "1", ",", "actual_row", "-", "1", ",", "inner_loop_scan_parameter_index", ",", "3", "]", "=", "np", ".", "std", "(", "tdc", ")", "progress_bar", ".", "update", "(", "index", ")", "progress_bar", ".", "finish", "(", ")", "calibration_data_out", "=", "calibration_data_file", ".", "create_carray", "(", "calibration_data_file", ".", "root", ",", "name", "=", "'HitOrCalibration'", ",", "title", "=", "'Hit OR calibration data'", ",", "atom", "=", "tb", ".", "Atom", ".", "from_dtype", "(", "calibration_data", ".", "dtype", ")", ",", "shape", "=", "calibration_data", ".", "shape", ",", "filters", "=", "tb", ".", "Filters", "(", "complib", "=", "'blosc'", ",", "complevel", "=", "5", ",", "fletcher32", "=", "False", ")", ")", "calibration_data_out", "[", ":", "]", "=", "calibration_data", "calibration_data_out", ".", "attrs", ".", "dimensions", "=", "scan_parameter_names", "calibration_data_out", ".", "attrs", ".", "scan_parameter_values", "=", "inner_loop_parameter_values", "calibration_data_out", ".", "flush", "(", ")", "# with PdfPages(output_filename + \"_calibration.pdf\") as output_pdf:\r", "plot_scurves", "(", "calibration_data", "[", ":", ",", ":", ",", ":", ",", "0", "]", ",", "inner_loop_parameter_values", ",", "\"ToT calibration\"", ",", "\"ToT\"", ",", "15", ",", "\"Charge [PlsrDAC]\"", ",", "filename", "=", "analyze_raw_data", ".", "output_pdf", ")", "plot_scurves", "(", "calibration_data", "[", ":", ",", ":", ",", ":", ",", "1", "]", ",", "inner_loop_parameter_values", ",", "\"TDC calibration\"", ",", "\"TDC [ns]\"", ",", "None", ",", "\"Charge [PlsrDAC]\"", ",", "filename", "=", "analyze_raw_data", ".", "output_pdf", ")", "tot_mean_all_pix", "=", "np", ".", "nanmean", "(", "calibration_data", "[", ":", ",", ":", ",", ":", ",", "0", "]", ",", "axis", "=", "(", "0", ",", "1", ")", ")", "tot_error_all_pix", "=", "np", ".", "nanstd", "(", "calibration_data", "[", ":", ",", ":", ",", ":", ",", "0", "]", ",", "axis", "=", "(", "0", ",", "1", ")", ")", "tdc_mean_all_pix", "=", "np", ".", "nanmean", "(", "calibration_data", "[", ":", ",", ":", ",", ":", ",", "1", "]", ",", "axis", "=", "(", "0", ",", "1", ")", ")", "tdc_error_all_pix", "=", "np", ".", "nanstd", "(", "calibration_data", "[", ":", ",", ":", ",", ":", ",", "1", "]", ",", "axis", "=", "(", "0", ",", "1", ")", ")", "plot_tot_tdc_calibration", "(", "scan_parameters", "=", "inner_loop_parameter_values", ",", "tot_mean", "=", "tot_mean_all_pix", ",", "tot_error", "=", "tot_error_all_pix", ",", "tdc_mean", "=", "tdc_mean_all_pix", ",", "tdc_error", "=", "tdc_error_all_pix", ",", "filename", "=", "analyze_raw_data", ".", "output_pdf", ",", "title", "=", "\"Mean charge calibration of %d pixel(s)\"", "%", "np", ".", "count_nonzero", "(", "~", "np", ".", "all", "(", "np", ".", "isnan", "(", "calibration_data", "[", ":", ",", ":", ",", ":", ",", "0", "]", ")", ",", "axis", "=", "2", ")", ")", ")", "# plotting individual pixels\r", "if", "plot_pixel_calibrations", "is", "True", ":", "# selecting pixels with non-nan entries\r", "col_row_non_nan", "=", "np", ".", "nonzero", "(", "~", "np", ".", "all", "(", "np", ".", "isnan", "(", "calibration_data", "[", ":", ",", ":", ",", ":", ",", "0", "]", ")", ",", "axis", "=", "2", ")", ")", "plot_pixel_calibrations", "=", "np", ".", "dstack", "(", "col_row_non_nan", ")", "[", "0", "]", "elif", "plot_pixel_calibrations", "is", "False", ":", "plot_pixel_calibrations", "=", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "int", ")", "else", ":", "# assuming list of column / row tuples\r", "plot_pixel_calibrations", "=", "np", ".", "array", "(", "plot_pixel_calibrations", ")", "-", "1", "# generate index array\r", "pixel_indices", "=", "np", ".", "arange", "(", "plot_pixel_calibrations", ".", "shape", "[", "0", "]", ")", "plot_n_pixels", "=", "10", "# number of pixels at the beginning, center and end of the array\r", "np", ".", "random", ".", "seed", "(", "0", ")", "# select random pixels\r", "if", "pixel_indices", ".", "size", "-", "2", "*", "plot_n_pixels", ">=", "0", ":", "random_pixel_indices", "=", "np", ".", "sort", "(", "np", ".", "random", ".", "choice", "(", "pixel_indices", "[", "plot_n_pixels", ":", "-", "plot_n_pixels", "]", ",", "min", "(", "plot_n_pixels", ",", "pixel_indices", ".", "size", "-", "2", "*", "plot_n_pixels", ")", ",", "replace", "=", "False", ")", ")", "else", ":", "random_pixel_indices", "=", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "int", ")", "selected_pixel_indices", "=", "np", ".", "unique", "(", "np", ".", "hstack", "(", "[", "pixel_indices", "[", ":", "plot_n_pixels", "]", ",", "random_pixel_indices", ",", "pixel_indices", "[", "-", "plot_n_pixels", ":", "]", "]", ")", ")", "# plotting individual pixels\r", "for", "(", "column", ",", "row", ")", "in", "plot_pixel_calibrations", "[", "selected_pixel_indices", "]", ":", "logging", ".", "info", "(", "\"Plotting charge calibration for pixel column \"", "+", "str", "(", "column", "+", "1", ")", "+", "\" / row \"", "+", "str", "(", "row", "+", "1", ")", ")", "tot_mean_single_pix", "=", "calibration_data", "[", "column", ",", "row", ",", ":", ",", "0", "]", "tot_std_single_pix", "=", "calibration_data", "[", "column", ",", "row", ",", ":", ",", "2", "]", "tdc_mean_single_pix", "=", "calibration_data", "[", "column", ",", "row", ",", ":", ",", "1", "]", "tdc_std_single_pix", "=", "calibration_data", "[", "column", ",", "row", ",", ":", ",", "3", "]", "plot_tot_tdc_calibration", "(", "scan_parameters", "=", "inner_loop_parameter_values", ",", "tot_mean", "=", "tot_mean_single_pix", ",", "tot_error", "=", "tot_std_single_pix", ",", "tdc_mean", "=", "tdc_mean_single_pix", ",", "tdc_error", "=", "tdc_std_single_pix", ",", "filename", "=", "analyze_raw_data", ".", "output_pdf", ",", "title", "=", "\"Charge calibration for pixel column \"", "+", "str", "(", "column", "+", "1", ")", "+", "\" / row \"", "+", "str", "(", "row", "+", "1", ")", ")" ]
Generating HitOr calibration file (_calibration.h5) from raw data file and plotting of calibration data. Parameters ---------- output_filename : string Input raw data file name. plot_pixel_calibrations : bool, iterable If True, genearating additional pixel calibration plots. If list of column and row tuples (from 1 to 80 / 336), print selected pixels. Returns ------- nothing
[ "Generating", "HitOr", "calibration", "file", "(", "_calibration", ".", "h5", ")", "from", "raw", "data", "file", "and", "plotting", "of", "calibration", "data", ".", "Parameters", "----------", "output_filename", ":", "string", "Input", "raw", "data", "file", "name", ".", "plot_pixel_calibrations", ":", "bool", "iterable", "If", "True", "genearating", "additional", "pixel", "calibration", "plots", ".", "If", "list", "of", "column", "and", "row", "tuples", "(", "from", "1", "to", "80", "/", "336", ")", "print", "selected", "pixels", ".", "Returns", "-------", "nothing" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/scans/calibrate_hit_or.py#L20-L148
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
interval_timed
def interval_timed(interval): '''Interval timer decorator. Taken from: http://stackoverflow.com/questions/12435211/python-threading-timer-repeat-function-every-n-seconds/12435256 ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): stopped = Event() def loop(): # executed in another thread while not stopped.wait(interval): # until stopped f(*args, **kwargs) t = Thread(name='IntervalTimerThread', target=loop) t.daemon = True # stop if the program exits t.start() return stopped.set return wrapper return decorator
python
def interval_timed(interval): '''Interval timer decorator. Taken from: http://stackoverflow.com/questions/12435211/python-threading-timer-repeat-function-every-n-seconds/12435256 ''' def decorator(f): @wraps(f) def wrapper(*args, **kwargs): stopped = Event() def loop(): # executed in another thread while not stopped.wait(interval): # until stopped f(*args, **kwargs) t = Thread(name='IntervalTimerThread', target=loop) t.daemon = True # stop if the program exits t.start() return stopped.set return wrapper return decorator
[ "def", "interval_timed", "(", "interval", ")", ":", "def", "decorator", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "stopped", "=", "Event", "(", ")", "def", "loop", "(", ")", ":", "# executed in another thread", "while", "not", "stopped", ".", "wait", "(", "interval", ")", ":", "# until stopped", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "t", "=", "Thread", "(", "name", "=", "'IntervalTimerThread'", ",", "target", "=", "loop", ")", "t", ".", "daemon", "=", "True", "# stop if the program exits", "t", ".", "start", "(", ")", "return", "stopped", ".", "set", "return", "wrapper", "return", "decorator" ]
Interval timer decorator. Taken from: http://stackoverflow.com/questions/12435211/python-threading-timer-repeat-function-every-n-seconds/12435256
[ "Interval", "timer", "decorator", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1423-L1442
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
interval_timer
def interval_timer(interval, func, *args, **kwargs): '''Interval timer function. Taken from: http://stackoverflow.com/questions/22498038/improvement-on-interval-python/22498708 ''' stopped = Event() def loop(): while not stopped.wait(interval): # the first call is after interval func(*args, **kwargs) Thread(name='IntervalTimerThread', target=loop).start() return stopped.set
python
def interval_timer(interval, func, *args, **kwargs): '''Interval timer function. Taken from: http://stackoverflow.com/questions/22498038/improvement-on-interval-python/22498708 ''' stopped = Event() def loop(): while not stopped.wait(interval): # the first call is after interval func(*args, **kwargs) Thread(name='IntervalTimerThread', target=loop).start() return stopped.set
[ "def", "interval_timer", "(", "interval", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "stopped", "=", "Event", "(", ")", "def", "loop", "(", ")", ":", "while", "not", "stopped", ".", "wait", "(", "interval", ")", ":", "# the first call is after interval", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "Thread", "(", "name", "=", "'IntervalTimerThread'", ",", "target", "=", "loop", ")", ".", "start", "(", ")", "return", "stopped", ".", "set" ]
Interval timer function. Taken from: http://stackoverflow.com/questions/22498038/improvement-on-interval-python/22498708
[ "Interval", "timer", "function", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1445-L1457
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
send_mail
def send_mail(subject, body, smtp_server, user, password, from_addr, to_addrs): ''' Sends a run status mail with the traceback to a specified E-Mail address if a run crashes. ''' logging.info('Send status E-Mail (' + subject + ')') content = string.join(( "From: %s" % from_addr, "To: %s" % ','.join(to_addrs), # comma separated according to RFC822 "Subject: %s" % subject, "", body), "\r\n") server = smtplib.SMTP_SSL(smtp_server) server.login(user, password) server.sendmail(from_addr, to_addrs, content) server.quit()
python
def send_mail(subject, body, smtp_server, user, password, from_addr, to_addrs): ''' Sends a run status mail with the traceback to a specified E-Mail address if a run crashes. ''' logging.info('Send status E-Mail (' + subject + ')') content = string.join(( "From: %s" % from_addr, "To: %s" % ','.join(to_addrs), # comma separated according to RFC822 "Subject: %s" % subject, "", body), "\r\n") server = smtplib.SMTP_SSL(smtp_server) server.login(user, password) server.sendmail(from_addr, to_addrs, content) server.quit()
[ "def", "send_mail", "(", "subject", ",", "body", ",", "smtp_server", ",", "user", ",", "password", ",", "from_addr", ",", "to_addrs", ")", ":", "logging", ".", "info", "(", "'Send status E-Mail ('", "+", "subject", "+", "')'", ")", "content", "=", "string", ".", "join", "(", "(", "\"From: %s\"", "%", "from_addr", ",", "\"To: %s\"", "%", "','", ".", "join", "(", "to_addrs", ")", ",", "# comma separated according to RFC822", "\"Subject: %s\"", "%", "subject", ",", "\"\"", ",", "body", ")", ",", "\"\\r\\n\"", ")", "server", "=", "smtplib", ".", "SMTP_SSL", "(", "smtp_server", ")", "server", ".", "login", "(", "user", ",", "password", ")", "server", ".", "sendmail", "(", "from_addr", ",", "to_addrs", ",", "content", ")", "server", ".", "quit", "(", ")" ]
Sends a run status mail with the traceback to a specified E-Mail address if a run crashes.
[ "Sends", "a", "run", "status", "mail", "with", "the", "traceback", "to", "a", "specified", "E", "-", "Mail", "address", "if", "a", "run", "crashes", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1493-L1507
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase._parse_module_cfgs
def _parse_module_cfgs(self): ''' Extracts the configuration of the modules. ''' # Adding here default run config parameters. if "dut" not in self._conf or self._conf["dut"] is None: raise ValueError('Parameter "dut" not defined.') if "dut_configuration" not in self._conf or self._conf["dut_configuration"] is None: raise ValueError('Parameter "dut_configuration" not defined.') self._conf.setdefault('working_dir', None) # string, if None, absolute path of configuration.yaml file will be used if 'modules' in self._conf and self._conf['modules']: for module_id, module_cfg in [(key, value) for key, value in self._conf['modules'].items() if ("activate" not in value or ("activate" in value and value["activate"] is True))]: # Check here for missing module config items. # Capital letter keys are Basil drivers, other keys are parameters. # FIFO, RX, TX, TLU and TDC are generic driver names which are used in the scan implementations. # The use of these reserved driver names allows for abstraction. # Accessing Basil drivers with real name is still possible. if "module_group" in module_id: raise ValueError('The module ID "%s" contains the reserved name "module_group".' % module_id) if "flavor" not in module_cfg or module_cfg["flavor"] is None: raise ValueError('No parameter "flavor" defined for module "%s".' % module_id) if module_cfg["flavor"] in fe_flavors: for driver_name in _reserved_driver_names: # TDC is not mandatory if driver_name == "TDC": # TDC is allowed to have set None module_cfg.setdefault('TDC', None) continue if driver_name not in module_cfg or module_cfg[driver_name] is None: raise ValueError('No parameter "%s" defined for module "%s".' % (driver_name, module_id)) if "rx_channel" not in module_cfg or module_cfg["rx_channel"] is None: raise ValueError('No parameter "rx_channel" defined for module "%s".' % module_id) if "tx_channel" not in module_cfg or module_cfg["tx_channel"] is None: raise ValueError('No parameter "tx_channel" defined for module "%s".' % module_id) if "chip_address" not in module_cfg: raise ValueError('No parameter "chip_address" defined for module "%s".' % module_id) module_cfg.setdefault("tdc_channel", None) module_cfg.setdefault("configuration", None) # string or number, if None, using the last valid configuration module_cfg.setdefault("send_data", None) # address string of PUB socket module_cfg.setdefault("activate", True) # set module active by default # Save config to dict. self._module_cfgs[module_id] = module_cfg self._modules[module_id] = [module_id] else: raise ValueError("No module configuration specified")
python
def _parse_module_cfgs(self): ''' Extracts the configuration of the modules. ''' # Adding here default run config parameters. if "dut" not in self._conf or self._conf["dut"] is None: raise ValueError('Parameter "dut" not defined.') if "dut_configuration" not in self._conf or self._conf["dut_configuration"] is None: raise ValueError('Parameter "dut_configuration" not defined.') self._conf.setdefault('working_dir', None) # string, if None, absolute path of configuration.yaml file will be used if 'modules' in self._conf and self._conf['modules']: for module_id, module_cfg in [(key, value) for key, value in self._conf['modules'].items() if ("activate" not in value or ("activate" in value and value["activate"] is True))]: # Check here for missing module config items. # Capital letter keys are Basil drivers, other keys are parameters. # FIFO, RX, TX, TLU and TDC are generic driver names which are used in the scan implementations. # The use of these reserved driver names allows for abstraction. # Accessing Basil drivers with real name is still possible. if "module_group" in module_id: raise ValueError('The module ID "%s" contains the reserved name "module_group".' % module_id) if "flavor" not in module_cfg or module_cfg["flavor"] is None: raise ValueError('No parameter "flavor" defined for module "%s".' % module_id) if module_cfg["flavor"] in fe_flavors: for driver_name in _reserved_driver_names: # TDC is not mandatory if driver_name == "TDC": # TDC is allowed to have set None module_cfg.setdefault('TDC', None) continue if driver_name not in module_cfg or module_cfg[driver_name] is None: raise ValueError('No parameter "%s" defined for module "%s".' % (driver_name, module_id)) if "rx_channel" not in module_cfg or module_cfg["rx_channel"] is None: raise ValueError('No parameter "rx_channel" defined for module "%s".' % module_id) if "tx_channel" not in module_cfg or module_cfg["tx_channel"] is None: raise ValueError('No parameter "tx_channel" defined for module "%s".' % module_id) if "chip_address" not in module_cfg: raise ValueError('No parameter "chip_address" defined for module "%s".' % module_id) module_cfg.setdefault("tdc_channel", None) module_cfg.setdefault("configuration", None) # string or number, if None, using the last valid configuration module_cfg.setdefault("send_data", None) # address string of PUB socket module_cfg.setdefault("activate", True) # set module active by default # Save config to dict. self._module_cfgs[module_id] = module_cfg self._modules[module_id] = [module_id] else: raise ValueError("No module configuration specified")
[ "def", "_parse_module_cfgs", "(", "self", ")", ":", "# Adding here default run config parameters.", "if", "\"dut\"", "not", "in", "self", ".", "_conf", "or", "self", ".", "_conf", "[", "\"dut\"", "]", "is", "None", ":", "raise", "ValueError", "(", "'Parameter \"dut\" not defined.'", ")", "if", "\"dut_configuration\"", "not", "in", "self", ".", "_conf", "or", "self", ".", "_conf", "[", "\"dut_configuration\"", "]", "is", "None", ":", "raise", "ValueError", "(", "'Parameter \"dut_configuration\" not defined.'", ")", "self", ".", "_conf", ".", "setdefault", "(", "'working_dir'", ",", "None", ")", "# string, if None, absolute path of configuration.yaml file will be used", "if", "'modules'", "in", "self", ".", "_conf", "and", "self", ".", "_conf", "[", "'modules'", "]", ":", "for", "module_id", ",", "module_cfg", "in", "[", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "self", ".", "_conf", "[", "'modules'", "]", ".", "items", "(", ")", "if", "(", "\"activate\"", "not", "in", "value", "or", "(", "\"activate\"", "in", "value", "and", "value", "[", "\"activate\"", "]", "is", "True", ")", ")", "]", ":", "# Check here for missing module config items.", "# Capital letter keys are Basil drivers, other keys are parameters.", "# FIFO, RX, TX, TLU and TDC are generic driver names which are used in the scan implementations.", "# The use of these reserved driver names allows for abstraction.", "# Accessing Basil drivers with real name is still possible.", "if", "\"module_group\"", "in", "module_id", ":", "raise", "ValueError", "(", "'The module ID \"%s\" contains the reserved name \"module_group\".'", "%", "module_id", ")", "if", "\"flavor\"", "not", "in", "module_cfg", "or", "module_cfg", "[", "\"flavor\"", "]", "is", "None", ":", "raise", "ValueError", "(", "'No parameter \"flavor\" defined for module \"%s\".'", "%", "module_id", ")", "if", "module_cfg", "[", "\"flavor\"", "]", "in", "fe_flavors", ":", "for", "driver_name", "in", "_reserved_driver_names", ":", "# TDC is not mandatory", "if", "driver_name", "==", "\"TDC\"", ":", "# TDC is allowed to have set None", "module_cfg", ".", "setdefault", "(", "'TDC'", ",", "None", ")", "continue", "if", "driver_name", "not", "in", "module_cfg", "or", "module_cfg", "[", "driver_name", "]", "is", "None", ":", "raise", "ValueError", "(", "'No parameter \"%s\" defined for module \"%s\".'", "%", "(", "driver_name", ",", "module_id", ")", ")", "if", "\"rx_channel\"", "not", "in", "module_cfg", "or", "module_cfg", "[", "\"rx_channel\"", "]", "is", "None", ":", "raise", "ValueError", "(", "'No parameter \"rx_channel\" defined for module \"%s\".'", "%", "module_id", ")", "if", "\"tx_channel\"", "not", "in", "module_cfg", "or", "module_cfg", "[", "\"tx_channel\"", "]", "is", "None", ":", "raise", "ValueError", "(", "'No parameter \"tx_channel\" defined for module \"%s\".'", "%", "module_id", ")", "if", "\"chip_address\"", "not", "in", "module_cfg", ":", "raise", "ValueError", "(", "'No parameter \"chip_address\" defined for module \"%s\".'", "%", "module_id", ")", "module_cfg", ".", "setdefault", "(", "\"tdc_channel\"", ",", "None", ")", "module_cfg", ".", "setdefault", "(", "\"configuration\"", ",", "None", ")", "# string or number, if None, using the last valid configuration", "module_cfg", ".", "setdefault", "(", "\"send_data\"", ",", "None", ")", "# address string of PUB socket", "module_cfg", ".", "setdefault", "(", "\"activate\"", ",", "True", ")", "# set module active by default", "# Save config to dict.", "self", ".", "_module_cfgs", "[", "module_id", "]", "=", "module_cfg", "self", ".", "_modules", "[", "module_id", "]", "=", "[", "module_id", "]", "else", ":", "raise", "ValueError", "(", "\"No module configuration specified\"", ")" ]
Extracts the configuration of the modules.
[ "Extracts", "the", "configuration", "of", "the", "modules", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L175-L219
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase._set_default_cfg
def _set_default_cfg(self): ''' Sets the default parameters if they are not specified. ''' # adding special conf for accessing all DUT drivers self._module_cfgs[None] = { 'flavor': None, 'chip_address': None, 'FIFO': list(set([self._module_cfgs[module_id]['FIFO'] for module_id in self._modules])), 'RX': list(set([self._module_cfgs[module_id]['RX'] for module_id in self._modules])), 'rx_channel': list(set([self._module_cfgs[module_id]['rx_channel'] for module_id in self._modules])), 'TX': list(set([self._module_cfgs[module_id]['TX'] for module_id in self._modules])), 'tx_channel': list(set([self._module_cfgs[module_id]['tx_channel'] for module_id in self._modules])), 'TDC': list(set([self._module_cfgs[module_id]['TDC'] for module_id in self._modules])), 'tdc_channel': list(set([self._module_cfgs[module_id]['tdc_channel'] for module_id in self._modules])), 'TLU': list(set([self._module_cfgs[module_id]['TLU'] for module_id in self._modules])), 'configuration': None, 'send_data': None} tx_groups = groupby_dict({key: value for (key, value) in self._module_cfgs.items() if key in self._modules}, "TX") for tx, module_group in tx_groups.items(): flavors = list(set([module_cfg['flavor'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])) if len(flavors) != 1: raise ValueError("Parameter 'flavor' must be the same for module group TX=%s." % tx) chip_addresses = list(set([module_cfg['chip_address'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])) if len(module_group) != len(chip_addresses) or (len(module_group) != 1 and None in chip_addresses): raise ValueError("Parameter 'chip_address' must be different for each module in module group TX=%s." % tx) # Adding broadcast config for parallel mode. self._module_cfgs["module_group_TX=" + tx] = { 'flavor': flavors[0], 'chip_address': None, # broadcast 'FIFO': list(set([module_cfg['FIFO'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'RX': list(set([module_cfg['RX'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'rx_channel': list(set([module_cfg['rx_channel'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'TX': tx, 'tx_channel': list(set([module_cfg['tx_channel'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'TDC': list(set([module_cfg['TDC'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'tdc_channel': list(set([module_cfg['tdc_channel'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'TLU': list(set([module_cfg['TLU'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'configuration': None, 'send_data': None} self._tx_module_groups["module_group_TX=" + tx] = module_group # Setting up per module attributes self._module_attr = {key: {} for key in self._module_cfgs} # Setting up per module run conf for module_id in self._module_cfgs: sc = namedtuple('run_configuration', field_names=self._default_run_conf.keys()) run_conf = sc(**self._run_conf) if module_id in self._modules and self.__class__.__name__ in self._conf["modules"][module_id] and self._conf["modules"][module_id][self.__class__.__name__] is not None: self._module_run_conf[module_id] = run_conf._replace(**self._conf["modules"][module_id][self.__class__.__name__])._asdict() else: self._module_run_conf[module_id] = run_conf._asdict() # update module group with run specific configuration if module_id in self._tx_module_groups and self._tx_module_groups[module_id]: selected_module_id = self._tx_module_groups[module_id][0] if self.__class__.__name__ in self._conf["modules"][selected_module_id] and self._conf["modules"][selected_module_id][self.__class__.__name__] is not None: self._module_run_conf[module_id] = run_conf._replace(**self._conf["modules"][selected_module_id][self.__class__.__name__])._asdict()
python
def _set_default_cfg(self): ''' Sets the default parameters if they are not specified. ''' # adding special conf for accessing all DUT drivers self._module_cfgs[None] = { 'flavor': None, 'chip_address': None, 'FIFO': list(set([self._module_cfgs[module_id]['FIFO'] for module_id in self._modules])), 'RX': list(set([self._module_cfgs[module_id]['RX'] for module_id in self._modules])), 'rx_channel': list(set([self._module_cfgs[module_id]['rx_channel'] for module_id in self._modules])), 'TX': list(set([self._module_cfgs[module_id]['TX'] for module_id in self._modules])), 'tx_channel': list(set([self._module_cfgs[module_id]['tx_channel'] for module_id in self._modules])), 'TDC': list(set([self._module_cfgs[module_id]['TDC'] for module_id in self._modules])), 'tdc_channel': list(set([self._module_cfgs[module_id]['tdc_channel'] for module_id in self._modules])), 'TLU': list(set([self._module_cfgs[module_id]['TLU'] for module_id in self._modules])), 'configuration': None, 'send_data': None} tx_groups = groupby_dict({key: value for (key, value) in self._module_cfgs.items() if key in self._modules}, "TX") for tx, module_group in tx_groups.items(): flavors = list(set([module_cfg['flavor'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])) if len(flavors) != 1: raise ValueError("Parameter 'flavor' must be the same for module group TX=%s." % tx) chip_addresses = list(set([module_cfg['chip_address'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])) if len(module_group) != len(chip_addresses) or (len(module_group) != 1 and None in chip_addresses): raise ValueError("Parameter 'chip_address' must be different for each module in module group TX=%s." % tx) # Adding broadcast config for parallel mode. self._module_cfgs["module_group_TX=" + tx] = { 'flavor': flavors[0], 'chip_address': None, # broadcast 'FIFO': list(set([module_cfg['FIFO'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'RX': list(set([module_cfg['RX'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'rx_channel': list(set([module_cfg['rx_channel'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'TX': tx, 'tx_channel': list(set([module_cfg['tx_channel'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'TDC': list(set([module_cfg['TDC'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'tdc_channel': list(set([module_cfg['tdc_channel'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'TLU': list(set([module_cfg['TLU'] for module_id, module_cfg in self._module_cfgs.items() if module_id in module_group])), 'configuration': None, 'send_data': None} self._tx_module_groups["module_group_TX=" + tx] = module_group # Setting up per module attributes self._module_attr = {key: {} for key in self._module_cfgs} # Setting up per module run conf for module_id in self._module_cfgs: sc = namedtuple('run_configuration', field_names=self._default_run_conf.keys()) run_conf = sc(**self._run_conf) if module_id in self._modules and self.__class__.__name__ in self._conf["modules"][module_id] and self._conf["modules"][module_id][self.__class__.__name__] is not None: self._module_run_conf[module_id] = run_conf._replace(**self._conf["modules"][module_id][self.__class__.__name__])._asdict() else: self._module_run_conf[module_id] = run_conf._asdict() # update module group with run specific configuration if module_id in self._tx_module_groups and self._tx_module_groups[module_id]: selected_module_id = self._tx_module_groups[module_id][0] if self.__class__.__name__ in self._conf["modules"][selected_module_id] and self._conf["modules"][selected_module_id][self.__class__.__name__] is not None: self._module_run_conf[module_id] = run_conf._replace(**self._conf["modules"][selected_module_id][self.__class__.__name__])._asdict()
[ "def", "_set_default_cfg", "(", "self", ")", ":", "# adding special conf for accessing all DUT drivers", "self", ".", "_module_cfgs", "[", "None", "]", "=", "{", "'flavor'", ":", "None", ",", "'chip_address'", ":", "None", ",", "'FIFO'", ":", "list", "(", "set", "(", "[", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'FIFO'", "]", "for", "module_id", "in", "self", ".", "_modules", "]", ")", ")", ",", "'RX'", ":", "list", "(", "set", "(", "[", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'RX'", "]", "for", "module_id", "in", "self", ".", "_modules", "]", ")", ")", ",", "'rx_channel'", ":", "list", "(", "set", "(", "[", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'rx_channel'", "]", "for", "module_id", "in", "self", ".", "_modules", "]", ")", ")", ",", "'TX'", ":", "list", "(", "set", "(", "[", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'TX'", "]", "for", "module_id", "in", "self", ".", "_modules", "]", ")", ")", ",", "'tx_channel'", ":", "list", "(", "set", "(", "[", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'tx_channel'", "]", "for", "module_id", "in", "self", ".", "_modules", "]", ")", ")", ",", "'TDC'", ":", "list", "(", "set", "(", "[", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'TDC'", "]", "for", "module_id", "in", "self", ".", "_modules", "]", ")", ")", ",", "'tdc_channel'", ":", "list", "(", "set", "(", "[", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'tdc_channel'", "]", "for", "module_id", "in", "self", ".", "_modules", "]", ")", ")", ",", "'TLU'", ":", "list", "(", "set", "(", "[", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'TLU'", "]", "for", "module_id", "in", "self", ".", "_modules", "]", ")", ")", ",", "'configuration'", ":", "None", ",", "'send_data'", ":", "None", "}", "tx_groups", "=", "groupby_dict", "(", "{", "key", ":", "value", "for", "(", "key", ",", "value", ")", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "key", "in", "self", ".", "_modules", "}", ",", "\"TX\"", ")", "for", "tx", ",", "module_group", "in", "tx_groups", ".", "items", "(", ")", ":", "flavors", "=", "list", "(", "set", "(", "[", "module_cfg", "[", "'flavor'", "]", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "module_id", "in", "module_group", "]", ")", ")", "if", "len", "(", "flavors", ")", "!=", "1", ":", "raise", "ValueError", "(", "\"Parameter 'flavor' must be the same for module group TX=%s.\"", "%", "tx", ")", "chip_addresses", "=", "list", "(", "set", "(", "[", "module_cfg", "[", "'chip_address'", "]", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "module_id", "in", "module_group", "]", ")", ")", "if", "len", "(", "module_group", ")", "!=", "len", "(", "chip_addresses", ")", "or", "(", "len", "(", "module_group", ")", "!=", "1", "and", "None", "in", "chip_addresses", ")", ":", "raise", "ValueError", "(", "\"Parameter 'chip_address' must be different for each module in module group TX=%s.\"", "%", "tx", ")", "# Adding broadcast config for parallel mode.", "self", ".", "_module_cfgs", "[", "\"module_group_TX=\"", "+", "tx", "]", "=", "{", "'flavor'", ":", "flavors", "[", "0", "]", ",", "'chip_address'", ":", "None", ",", "# broadcast", "'FIFO'", ":", "list", "(", "set", "(", "[", "module_cfg", "[", "'FIFO'", "]", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "module_id", "in", "module_group", "]", ")", ")", ",", "'RX'", ":", "list", "(", "set", "(", "[", "module_cfg", "[", "'RX'", "]", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "module_id", "in", "module_group", "]", ")", ")", ",", "'rx_channel'", ":", "list", "(", "set", "(", "[", "module_cfg", "[", "'rx_channel'", "]", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "module_id", "in", "module_group", "]", ")", ")", ",", "'TX'", ":", "tx", ",", "'tx_channel'", ":", "list", "(", "set", "(", "[", "module_cfg", "[", "'tx_channel'", "]", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "module_id", "in", "module_group", "]", ")", ")", ",", "'TDC'", ":", "list", "(", "set", "(", "[", "module_cfg", "[", "'TDC'", "]", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "module_id", "in", "module_group", "]", ")", ")", ",", "'tdc_channel'", ":", "list", "(", "set", "(", "[", "module_cfg", "[", "'tdc_channel'", "]", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "module_id", "in", "module_group", "]", ")", ")", ",", "'TLU'", ":", "list", "(", "set", "(", "[", "module_cfg", "[", "'TLU'", "]", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "module_id", "in", "module_group", "]", ")", ")", ",", "'configuration'", ":", "None", ",", "'send_data'", ":", "None", "}", "self", ".", "_tx_module_groups", "[", "\"module_group_TX=\"", "+", "tx", "]", "=", "module_group", "# Setting up per module attributes", "self", ".", "_module_attr", "=", "{", "key", ":", "{", "}", "for", "key", "in", "self", ".", "_module_cfgs", "}", "# Setting up per module run conf", "for", "module_id", "in", "self", ".", "_module_cfgs", ":", "sc", "=", "namedtuple", "(", "'run_configuration'", ",", "field_names", "=", "self", ".", "_default_run_conf", ".", "keys", "(", ")", ")", "run_conf", "=", "sc", "(", "*", "*", "self", ".", "_run_conf", ")", "if", "module_id", "in", "self", ".", "_modules", "and", "self", ".", "__class__", ".", "__name__", "in", "self", ".", "_conf", "[", "\"modules\"", "]", "[", "module_id", "]", "and", "self", ".", "_conf", "[", "\"modules\"", "]", "[", "module_id", "]", "[", "self", ".", "__class__", ".", "__name__", "]", "is", "not", "None", ":", "self", ".", "_module_run_conf", "[", "module_id", "]", "=", "run_conf", ".", "_replace", "(", "*", "*", "self", ".", "_conf", "[", "\"modules\"", "]", "[", "module_id", "]", "[", "self", ".", "__class__", ".", "__name__", "]", ")", ".", "_asdict", "(", ")", "else", ":", "self", ".", "_module_run_conf", "[", "module_id", "]", "=", "run_conf", ".", "_asdict", "(", ")", "# update module group with run specific configuration", "if", "module_id", "in", "self", ".", "_tx_module_groups", "and", "self", ".", "_tx_module_groups", "[", "module_id", "]", ":", "selected_module_id", "=", "self", ".", "_tx_module_groups", "[", "module_id", "]", "[", "0", "]", "if", "self", ".", "__class__", ".", "__name__", "in", "self", ".", "_conf", "[", "\"modules\"", "]", "[", "selected_module_id", "]", "and", "self", ".", "_conf", "[", "\"modules\"", "]", "[", "selected_module_id", "]", "[", "self", ".", "__class__", ".", "__name__", "]", "is", "not", "None", ":", "self", ".", "_module_run_conf", "[", "module_id", "]", "=", "run_conf", ".", "_replace", "(", "*", "*", "self", ".", "_conf", "[", "\"modules\"", "]", "[", "selected_module_id", "]", "[", "self", ".", "__class__", ".", "__name__", "]", ")", ".", "_asdict", "(", ")" ]
Sets the default parameters if they are not specified.
[ "Sets", "the", "default", "parameters", "if", "they", "are", "not", "specified", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L265-L323
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.init_modules
def init_modules(self): ''' Initialize all modules consecutively''' for module_id, module_cfg in self._module_cfgs.items(): if module_id in self._modules or module_id in self._tx_module_groups: if module_id in self._modules: module_id_str = "module " + module_id else: module_id_str = module_id.split('=', 1) module_id_str[0] = module_id_str[0].replace("_", " ") module_id_str = "=".join(module_id_str) logging.info("Initializing configuration for %s..." % module_id_str) # adding scan parameters to dict if 'scan_parameters' in self._module_run_conf[module_id] and self._module_run_conf[module_id]['scan_parameters'] is not None: # evaluating string for support of nested lists and other complex data structures if isinstance(self._module_run_conf[module_id]['scan_parameters'], basestring): self._module_run_conf[module_id]['scan_parameters'] = ast.literal_eval(self._module_run_conf[module_id]['scan_parameters']) sp = namedtuple('scan_parameters', field_names=zip(*self._module_run_conf[module_id]['scan_parameters'])[0]) self._scan_parameters[module_id] = sp(*zip(*self._module_run_conf[module_id]['scan_parameters'])[1]) else: sp = namedtuple_with_defaults('scan_parameters', field_names=[]) self._scan_parameters[module_id] = sp() # init FE config if module_id in self._modules: # only real modules can have an existing configuration last_configuration = self.get_configuration(module_id=module_id) else: last_configuration = None if (('configuration' not in module_cfg or module_cfg['configuration'] is None) and last_configuration is None) or (isinstance(module_cfg['configuration'], (int, long)) and module_cfg['configuration'] <= 0): if 'chip_address' in module_cfg: if module_cfg['chip_address'] is None: chip_address = 0 broadcast = True else: chip_address = module_cfg['chip_address'] broadcast = False else: raise ValueError('Parameter "chip_address" not specified for module "%s".' % module_id) if 'flavor' in module_cfg and module_cfg['flavor']: module_cfg['configuration'] = FEI4Register(fe_type=module_cfg['flavor'], chip_address=chip_address, broadcast=broadcast) else: raise ValueError('Parameter "flavor" not specified for module "%s".' % module_id) # use existing config elif not module_cfg['configuration'] and last_configuration: module_cfg['configuration'] = FEI4Register(configuration_file=last_configuration) # path string elif isinstance(module_cfg['configuration'], basestring): if os.path.isabs(module_cfg['configuration']): # absolute path module_cfg['configuration'] = FEI4Register(configuration_file=module_cfg['configuration']) else: # relative path module_cfg['configuration'] = FEI4Register(configuration_file=os.path.join(module_cfg['working_dir'], module_cfg['configuration'])) # run number elif isinstance(module_cfg['configuration'], (int, long)) and module_cfg['configuration'] > 0: module_cfg['configuration'] = FEI4Register(configuration_file=self.get_configuration(module_id=module_id, run_number=module_cfg['configuration'])) # assume configuration already initialized elif not isinstance(module_cfg['configuration'], FEI4Register): raise ValueError('Found no valid value for parameter "configuration" for module "%s".' % module_id) # init register utils self._registers[module_id] = self._module_cfgs[module_id]['configuration'] self._register_utils[module_id] = FEI4RegisterUtils(self._module_dut[module_id], self._module_cfgs[module_id]['configuration']) if module_id in self._modules: # Create module data path for real modules module_path = self.get_module_path(module_id) if not os.path.exists(module_path): os.makedirs(module_path) # Set all modules to conf mode to prevent from receiving BCR and ECR broadcast for module_id in self._tx_module_groups: with self.access_module(module_id=module_id): self.register_utils.set_conf_mode() # Initial configuration (reset and configuration) of all modules. # This is done by iterating over each module individually for module_id in self._modules: logging.info("Configuring %s..." % module_id) with self.access_module(module_id=module_id): if self._run_conf['configure_fe']: self.register_utils.global_reset() self.register_utils.configure_all() else: self.register_utils.set_conf_mode() if is_fe_ready(self): fe_not_ready = False else: fe_not_ready = True # BCR and ECR might result in RX errors # a reset of the RX and FIFO will happen just before scan() if self._run_conf['reset_fe']: self.register_utils.reset_bunch_counter() self.register_utils.reset_event_counter() if fe_not_ready: # resetting service records must be done once after power up self.register_utils.reset_service_records() if not is_fe_ready(self): logging.warning('Module "%s" is not sending any data.' % module_id) # set all modules to conf mode afterwards to be immune to ECR and BCR self.register_utils.set_conf_mode()
python
def init_modules(self): ''' Initialize all modules consecutively''' for module_id, module_cfg in self._module_cfgs.items(): if module_id in self._modules or module_id in self._tx_module_groups: if module_id in self._modules: module_id_str = "module " + module_id else: module_id_str = module_id.split('=', 1) module_id_str[0] = module_id_str[0].replace("_", " ") module_id_str = "=".join(module_id_str) logging.info("Initializing configuration for %s..." % module_id_str) # adding scan parameters to dict if 'scan_parameters' in self._module_run_conf[module_id] and self._module_run_conf[module_id]['scan_parameters'] is not None: # evaluating string for support of nested lists and other complex data structures if isinstance(self._module_run_conf[module_id]['scan_parameters'], basestring): self._module_run_conf[module_id]['scan_parameters'] = ast.literal_eval(self._module_run_conf[module_id]['scan_parameters']) sp = namedtuple('scan_parameters', field_names=zip(*self._module_run_conf[module_id]['scan_parameters'])[0]) self._scan_parameters[module_id] = sp(*zip(*self._module_run_conf[module_id]['scan_parameters'])[1]) else: sp = namedtuple_with_defaults('scan_parameters', field_names=[]) self._scan_parameters[module_id] = sp() # init FE config if module_id in self._modules: # only real modules can have an existing configuration last_configuration = self.get_configuration(module_id=module_id) else: last_configuration = None if (('configuration' not in module_cfg or module_cfg['configuration'] is None) and last_configuration is None) or (isinstance(module_cfg['configuration'], (int, long)) and module_cfg['configuration'] <= 0): if 'chip_address' in module_cfg: if module_cfg['chip_address'] is None: chip_address = 0 broadcast = True else: chip_address = module_cfg['chip_address'] broadcast = False else: raise ValueError('Parameter "chip_address" not specified for module "%s".' % module_id) if 'flavor' in module_cfg and module_cfg['flavor']: module_cfg['configuration'] = FEI4Register(fe_type=module_cfg['flavor'], chip_address=chip_address, broadcast=broadcast) else: raise ValueError('Parameter "flavor" not specified for module "%s".' % module_id) # use existing config elif not module_cfg['configuration'] and last_configuration: module_cfg['configuration'] = FEI4Register(configuration_file=last_configuration) # path string elif isinstance(module_cfg['configuration'], basestring): if os.path.isabs(module_cfg['configuration']): # absolute path module_cfg['configuration'] = FEI4Register(configuration_file=module_cfg['configuration']) else: # relative path module_cfg['configuration'] = FEI4Register(configuration_file=os.path.join(module_cfg['working_dir'], module_cfg['configuration'])) # run number elif isinstance(module_cfg['configuration'], (int, long)) and module_cfg['configuration'] > 0: module_cfg['configuration'] = FEI4Register(configuration_file=self.get_configuration(module_id=module_id, run_number=module_cfg['configuration'])) # assume configuration already initialized elif not isinstance(module_cfg['configuration'], FEI4Register): raise ValueError('Found no valid value for parameter "configuration" for module "%s".' % module_id) # init register utils self._registers[module_id] = self._module_cfgs[module_id]['configuration'] self._register_utils[module_id] = FEI4RegisterUtils(self._module_dut[module_id], self._module_cfgs[module_id]['configuration']) if module_id in self._modules: # Create module data path for real modules module_path = self.get_module_path(module_id) if not os.path.exists(module_path): os.makedirs(module_path) # Set all modules to conf mode to prevent from receiving BCR and ECR broadcast for module_id in self._tx_module_groups: with self.access_module(module_id=module_id): self.register_utils.set_conf_mode() # Initial configuration (reset and configuration) of all modules. # This is done by iterating over each module individually for module_id in self._modules: logging.info("Configuring %s..." % module_id) with self.access_module(module_id=module_id): if self._run_conf['configure_fe']: self.register_utils.global_reset() self.register_utils.configure_all() else: self.register_utils.set_conf_mode() if is_fe_ready(self): fe_not_ready = False else: fe_not_ready = True # BCR and ECR might result in RX errors # a reset of the RX and FIFO will happen just before scan() if self._run_conf['reset_fe']: self.register_utils.reset_bunch_counter() self.register_utils.reset_event_counter() if fe_not_ready: # resetting service records must be done once after power up self.register_utils.reset_service_records() if not is_fe_ready(self): logging.warning('Module "%s" is not sending any data.' % module_id) # set all modules to conf mode afterwards to be immune to ECR and BCR self.register_utils.set_conf_mode()
[ "def", "init_modules", "(", "self", ")", ":", "for", "module_id", ",", "module_cfg", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", ":", "if", "module_id", "in", "self", ".", "_modules", "or", "module_id", "in", "self", ".", "_tx_module_groups", ":", "if", "module_id", "in", "self", ".", "_modules", ":", "module_id_str", "=", "\"module \"", "+", "module_id", "else", ":", "module_id_str", "=", "module_id", ".", "split", "(", "'='", ",", "1", ")", "module_id_str", "[", "0", "]", "=", "module_id_str", "[", "0", "]", ".", "replace", "(", "\"_\"", ",", "\" \"", ")", "module_id_str", "=", "\"=\"", ".", "join", "(", "module_id_str", ")", "logging", ".", "info", "(", "\"Initializing configuration for %s...\"", "%", "module_id_str", ")", "# adding scan parameters to dict", "if", "'scan_parameters'", "in", "self", ".", "_module_run_conf", "[", "module_id", "]", "and", "self", ".", "_module_run_conf", "[", "module_id", "]", "[", "'scan_parameters'", "]", "is", "not", "None", ":", "# evaluating string for support of nested lists and other complex data structures", "if", "isinstance", "(", "self", ".", "_module_run_conf", "[", "module_id", "]", "[", "'scan_parameters'", "]", ",", "basestring", ")", ":", "self", ".", "_module_run_conf", "[", "module_id", "]", "[", "'scan_parameters'", "]", "=", "ast", ".", "literal_eval", "(", "self", ".", "_module_run_conf", "[", "module_id", "]", "[", "'scan_parameters'", "]", ")", "sp", "=", "namedtuple", "(", "'scan_parameters'", ",", "field_names", "=", "zip", "(", "*", "self", ".", "_module_run_conf", "[", "module_id", "]", "[", "'scan_parameters'", "]", ")", "[", "0", "]", ")", "self", ".", "_scan_parameters", "[", "module_id", "]", "=", "sp", "(", "*", "zip", "(", "*", "self", ".", "_module_run_conf", "[", "module_id", "]", "[", "'scan_parameters'", "]", ")", "[", "1", "]", ")", "else", ":", "sp", "=", "namedtuple_with_defaults", "(", "'scan_parameters'", ",", "field_names", "=", "[", "]", ")", "self", ".", "_scan_parameters", "[", "module_id", "]", "=", "sp", "(", ")", "# init FE config", "if", "module_id", "in", "self", ".", "_modules", ":", "# only real modules can have an existing configuration", "last_configuration", "=", "self", ".", "get_configuration", "(", "module_id", "=", "module_id", ")", "else", ":", "last_configuration", "=", "None", "if", "(", "(", "'configuration'", "not", "in", "module_cfg", "or", "module_cfg", "[", "'configuration'", "]", "is", "None", ")", "and", "last_configuration", "is", "None", ")", "or", "(", "isinstance", "(", "module_cfg", "[", "'configuration'", "]", ",", "(", "int", ",", "long", ")", ")", "and", "module_cfg", "[", "'configuration'", "]", "<=", "0", ")", ":", "if", "'chip_address'", "in", "module_cfg", ":", "if", "module_cfg", "[", "'chip_address'", "]", "is", "None", ":", "chip_address", "=", "0", "broadcast", "=", "True", "else", ":", "chip_address", "=", "module_cfg", "[", "'chip_address'", "]", "broadcast", "=", "False", "else", ":", "raise", "ValueError", "(", "'Parameter \"chip_address\" not specified for module \"%s\".'", "%", "module_id", ")", "if", "'flavor'", "in", "module_cfg", "and", "module_cfg", "[", "'flavor'", "]", ":", "module_cfg", "[", "'configuration'", "]", "=", "FEI4Register", "(", "fe_type", "=", "module_cfg", "[", "'flavor'", "]", ",", "chip_address", "=", "chip_address", ",", "broadcast", "=", "broadcast", ")", "else", ":", "raise", "ValueError", "(", "'Parameter \"flavor\" not specified for module \"%s\".'", "%", "module_id", ")", "# use existing config", "elif", "not", "module_cfg", "[", "'configuration'", "]", "and", "last_configuration", ":", "module_cfg", "[", "'configuration'", "]", "=", "FEI4Register", "(", "configuration_file", "=", "last_configuration", ")", "# path string", "elif", "isinstance", "(", "module_cfg", "[", "'configuration'", "]", ",", "basestring", ")", ":", "if", "os", ".", "path", ".", "isabs", "(", "module_cfg", "[", "'configuration'", "]", ")", ":", "# absolute path", "module_cfg", "[", "'configuration'", "]", "=", "FEI4Register", "(", "configuration_file", "=", "module_cfg", "[", "'configuration'", "]", ")", "else", ":", "# relative path", "module_cfg", "[", "'configuration'", "]", "=", "FEI4Register", "(", "configuration_file", "=", "os", ".", "path", ".", "join", "(", "module_cfg", "[", "'working_dir'", "]", ",", "module_cfg", "[", "'configuration'", "]", ")", ")", "# run number", "elif", "isinstance", "(", "module_cfg", "[", "'configuration'", "]", ",", "(", "int", ",", "long", ")", ")", "and", "module_cfg", "[", "'configuration'", "]", ">", "0", ":", "module_cfg", "[", "'configuration'", "]", "=", "FEI4Register", "(", "configuration_file", "=", "self", ".", "get_configuration", "(", "module_id", "=", "module_id", ",", "run_number", "=", "module_cfg", "[", "'configuration'", "]", ")", ")", "# assume configuration already initialized", "elif", "not", "isinstance", "(", "module_cfg", "[", "'configuration'", "]", ",", "FEI4Register", ")", ":", "raise", "ValueError", "(", "'Found no valid value for parameter \"configuration\" for module \"%s\".'", "%", "module_id", ")", "# init register utils", "self", ".", "_registers", "[", "module_id", "]", "=", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'configuration'", "]", "self", ".", "_register_utils", "[", "module_id", "]", "=", "FEI4RegisterUtils", "(", "self", ".", "_module_dut", "[", "module_id", "]", ",", "self", ".", "_module_cfgs", "[", "module_id", "]", "[", "'configuration'", "]", ")", "if", "module_id", "in", "self", ".", "_modules", ":", "# Create module data path for real modules", "module_path", "=", "self", ".", "get_module_path", "(", "module_id", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "module_path", ")", ":", "os", ".", "makedirs", "(", "module_path", ")", "# Set all modules to conf mode to prevent from receiving BCR and ECR broadcast", "for", "module_id", "in", "self", ".", "_tx_module_groups", ":", "with", "self", ".", "access_module", "(", "module_id", "=", "module_id", ")", ":", "self", ".", "register_utils", ".", "set_conf_mode", "(", ")", "# Initial configuration (reset and configuration) of all modules.", "# This is done by iterating over each module individually", "for", "module_id", "in", "self", ".", "_modules", ":", "logging", ".", "info", "(", "\"Configuring %s...\"", "%", "module_id", ")", "with", "self", ".", "access_module", "(", "module_id", "=", "module_id", ")", ":", "if", "self", ".", "_run_conf", "[", "'configure_fe'", "]", ":", "self", ".", "register_utils", ".", "global_reset", "(", ")", "self", ".", "register_utils", ".", "configure_all", "(", ")", "else", ":", "self", ".", "register_utils", ".", "set_conf_mode", "(", ")", "if", "is_fe_ready", "(", "self", ")", ":", "fe_not_ready", "=", "False", "else", ":", "fe_not_ready", "=", "True", "# BCR and ECR might result in RX errors", "# a reset of the RX and FIFO will happen just before scan()", "if", "self", ".", "_run_conf", "[", "'reset_fe'", "]", ":", "self", ".", "register_utils", ".", "reset_bunch_counter", "(", ")", "self", ".", "register_utils", ".", "reset_event_counter", "(", ")", "if", "fe_not_ready", ":", "# resetting service records must be done once after power up", "self", ".", "register_utils", ".", "reset_service_records", "(", ")", "if", "not", "is_fe_ready", "(", "self", ")", ":", "logging", ".", "warning", "(", "'Module \"%s\" is not sending any data.'", "%", "module_id", ")", "# set all modules to conf mode afterwards to be immune to ECR and BCR", "self", ".", "register_utils", ".", "set_conf_mode", "(", ")" ]
Initialize all modules consecutively
[ "Initialize", "all", "modules", "consecutively" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L466-L563
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.do_run
def do_run(self): ''' Start runs on all modules sequentially. Sets properties to access current module properties. ''' if self.broadcast_commands: # Broadcast FE commands if self.threaded_scan: with ExitStack() as restore_config_stack: # Configure each FE individually # Sort module config keys, configure broadcast modules first for module_id in itertools.chain(self._tx_module_groups, self._modules): if self.abort_run.is_set(): break with self.access_module(module_id=module_id): if module_id in self._modules: module_id_str = "module " + module_id else: module_id_str = module_id.split('=', 1) module_id_str[0] = module_id_str[0].replace("_", " ") module_id_str = "=".join(module_id_str) logging.info('Scan parameter(s) for %s: %s', module_id_str, ', '.join(['%s=%s' % (key, value) for (key, value) in self.scan_parameters._asdict().items()]) if self.scan_parameters else 'None') # storing register values until scan has finished and then restore configuration restore_config_stack.enter_context(self.register.restored(name=self.run_number)) self.configure() for module_id in self._tx_module_groups: if self.abort_run.is_set(): break with self.access_module(module_id=module_id): # set all modules to run mode by before entering scan() self.register_utils.set_run_mode() with self.access_module(module_id=None): self.fifo_readout.reset_rx() self.fifo_readout.reset_fifo(self._selected_fifos) self.fifo_readout.print_fei4_rx_status() with self.access_files(): self._scan_threads = [] for module_id in self._tx_module_groups: if self.abort_run.is_set(): break t = ExcThread(target=self.scan, name=module_id) t.daemon = True # exiting program even when thread is alive self._scan_threads.append(t) for t in self._scan_threads: t.start() while any([t.is_alive() for t in self._scan_threads]): # if self.abort_run.is_set(): # break for t in self._scan_threads: try: t.join(0.01) except Exception: self._scan_threads.remove(t) self.handle_err(sys.exc_info()) # alive_threads = [t.name for t in self._scan_threads if (not t.join(10.0) and t.is_alive())] # if alive_threads: # raise RuntimeError("Scan thread(s) not finished: %s" % ", ".join(alive_threads)) self._scan_threads = [] for module_id in self._tx_module_groups: if self.abort_run.is_set(): break with self.access_module(module_id=module_id): # set modules to conf mode by after finishing scan() self.register_utils.set_conf_mode() else: for tx_module_id, tx_group in self._tx_module_groups.items(): if self.abort_run.is_set(): break with ExitStack() as restore_config_stack: for module_id in itertools.chain([tx_module_id], tx_group): if self.abort_run.is_set(): break with self.access_module(module_id=module_id): logging.info('Scan parameter(s) for module %s: %s', module_id, ', '.join(['%s=%s' % (key, value) for (key, value) in self.scan_parameters._asdict().items()]) if self.scan_parameters else 'None') # storing register values until scan has finished and then restore configuration restore_config_stack.enter_context(self.register.restored(name=self.run_number)) self.configure() with self.access_module(module_id=tx_module_id): # set all modules to run mode by before entering scan() self.register_utils.set_run_mode() self.fifo_readout.reset_rx() self.fifo_readout.reset_fifo(self._selected_fifos) self.fifo_readout.print_fei4_rx_status() # some scans use this event to stop scan loop, clear event here to make another scan possible self.stop_run.clear() with self.access_files(): self.scan() with self.access_module(module_id=tx_module_id): # set modules to conf mode by after finishing scan() self.register_utils.set_conf_mode() else: # Scan each FE individually if self.threaded_scan: self._scan_threads = [] # loop over grpups of modules with different TX for tx_module_ids in zip_nofill(*self._tx_module_groups.values()): if self.abort_run.is_set(): break with ExitStack() as restore_config_stack: for module_id in tx_module_ids: if self.abort_run.is_set(): break with self.access_module(module_id=module_id): logging.info('Scan parameter(s) for module %s: %s', module_id, ', '.join(['%s=%s' % (key, value) for (key, value) in self.scan_parameters._asdict().items()]) if self.scan_parameters else 'None') # storing register values until scan has finished and then restore configuration restore_config_stack.enter_context(self.register.restored(name=self.run_number)) self.configure() # set modules to run mode by before entering scan() self.register_utils.set_run_mode() t = ExcThread(target=self.scan, name=module_id) t.daemon = True # exiting program even when thread is alive self._scan_threads.append(t) with self.access_module(module_id=tx_module_ids): self.fifo_readout.reset_rx() self.fifo_readout.reset_fifo(self._selected_fifos) self.fifo_readout.print_fei4_rx_status() with self.access_files(): # some scans use this event to stop scan loop, clear event here to make another scan possible self.stop_run.clear() for t in self._scan_threads: t.start() while any([t.is_alive() for t in self._scan_threads]): # if self.abort_run.is_set(): # break for t in self._scan_threads: try: t.join(0.01) except Exception: self._scan_threads.remove(t) self.handle_err(sys.exc_info()) # alive_threads = [t.name for t in self._scan_threads if (not t.join(10.0) and t.is_alive())] # if alive_threads: # raise RuntimeError("Scan thread(s) not finished: %s" % ", ".join(alive_threads)) self._scan_threads = [] for module_id in tx_module_ids: if self.abort_run.is_set(): break with self.access_module(module_id=module_id): # set modules to conf mode by after finishing scan() self.register_utils.set_conf_mode() else: for module_id in self._modules: if self.abort_run.is_set(): break # some scans use this event to stop scan loop, clear event here to make another scan possible self.stop_run.clear() with self.access_module(module_id=module_id): logging.info('Scan parameter(s) for module %s: %s', module_id, ', '.join(['%s=%s' % (key, value) for (key, value) in self.scan_parameters._asdict().items()]) if self.scan_parameters else 'None') with self.register.restored(name=self.run_number): self.configure() # set modules to run mode by before entering scan() self.register_utils.set_run_mode() self.fifo_readout.reset_rx() self.fifo_readout.reset_fifo(self._selected_fifos) self.fifo_readout.print_fei4_rx_status() # some scans use this event to stop scan loop, clear event here to make another scan possible self.stop_run.clear() with self.access_files(): self.scan() # set modules to conf mode by after finishing scan() self.register_utils.set_conf_mode() if self._modules: self.fifo_readout.print_readout_status()
python
def do_run(self): ''' Start runs on all modules sequentially. Sets properties to access current module properties. ''' if self.broadcast_commands: # Broadcast FE commands if self.threaded_scan: with ExitStack() as restore_config_stack: # Configure each FE individually # Sort module config keys, configure broadcast modules first for module_id in itertools.chain(self._tx_module_groups, self._modules): if self.abort_run.is_set(): break with self.access_module(module_id=module_id): if module_id in self._modules: module_id_str = "module " + module_id else: module_id_str = module_id.split('=', 1) module_id_str[0] = module_id_str[0].replace("_", " ") module_id_str = "=".join(module_id_str) logging.info('Scan parameter(s) for %s: %s', module_id_str, ', '.join(['%s=%s' % (key, value) for (key, value) in self.scan_parameters._asdict().items()]) if self.scan_parameters else 'None') # storing register values until scan has finished and then restore configuration restore_config_stack.enter_context(self.register.restored(name=self.run_number)) self.configure() for module_id in self._tx_module_groups: if self.abort_run.is_set(): break with self.access_module(module_id=module_id): # set all modules to run mode by before entering scan() self.register_utils.set_run_mode() with self.access_module(module_id=None): self.fifo_readout.reset_rx() self.fifo_readout.reset_fifo(self._selected_fifos) self.fifo_readout.print_fei4_rx_status() with self.access_files(): self._scan_threads = [] for module_id in self._tx_module_groups: if self.abort_run.is_set(): break t = ExcThread(target=self.scan, name=module_id) t.daemon = True # exiting program even when thread is alive self._scan_threads.append(t) for t in self._scan_threads: t.start() while any([t.is_alive() for t in self._scan_threads]): # if self.abort_run.is_set(): # break for t in self._scan_threads: try: t.join(0.01) except Exception: self._scan_threads.remove(t) self.handle_err(sys.exc_info()) # alive_threads = [t.name for t in self._scan_threads if (not t.join(10.0) and t.is_alive())] # if alive_threads: # raise RuntimeError("Scan thread(s) not finished: %s" % ", ".join(alive_threads)) self._scan_threads = [] for module_id in self._tx_module_groups: if self.abort_run.is_set(): break with self.access_module(module_id=module_id): # set modules to conf mode by after finishing scan() self.register_utils.set_conf_mode() else: for tx_module_id, tx_group in self._tx_module_groups.items(): if self.abort_run.is_set(): break with ExitStack() as restore_config_stack: for module_id in itertools.chain([tx_module_id], tx_group): if self.abort_run.is_set(): break with self.access_module(module_id=module_id): logging.info('Scan parameter(s) for module %s: %s', module_id, ', '.join(['%s=%s' % (key, value) for (key, value) in self.scan_parameters._asdict().items()]) if self.scan_parameters else 'None') # storing register values until scan has finished and then restore configuration restore_config_stack.enter_context(self.register.restored(name=self.run_number)) self.configure() with self.access_module(module_id=tx_module_id): # set all modules to run mode by before entering scan() self.register_utils.set_run_mode() self.fifo_readout.reset_rx() self.fifo_readout.reset_fifo(self._selected_fifos) self.fifo_readout.print_fei4_rx_status() # some scans use this event to stop scan loop, clear event here to make another scan possible self.stop_run.clear() with self.access_files(): self.scan() with self.access_module(module_id=tx_module_id): # set modules to conf mode by after finishing scan() self.register_utils.set_conf_mode() else: # Scan each FE individually if self.threaded_scan: self._scan_threads = [] # loop over grpups of modules with different TX for tx_module_ids in zip_nofill(*self._tx_module_groups.values()): if self.abort_run.is_set(): break with ExitStack() as restore_config_stack: for module_id in tx_module_ids: if self.abort_run.is_set(): break with self.access_module(module_id=module_id): logging.info('Scan parameter(s) for module %s: %s', module_id, ', '.join(['%s=%s' % (key, value) for (key, value) in self.scan_parameters._asdict().items()]) if self.scan_parameters else 'None') # storing register values until scan has finished and then restore configuration restore_config_stack.enter_context(self.register.restored(name=self.run_number)) self.configure() # set modules to run mode by before entering scan() self.register_utils.set_run_mode() t = ExcThread(target=self.scan, name=module_id) t.daemon = True # exiting program even when thread is alive self._scan_threads.append(t) with self.access_module(module_id=tx_module_ids): self.fifo_readout.reset_rx() self.fifo_readout.reset_fifo(self._selected_fifos) self.fifo_readout.print_fei4_rx_status() with self.access_files(): # some scans use this event to stop scan loop, clear event here to make another scan possible self.stop_run.clear() for t in self._scan_threads: t.start() while any([t.is_alive() for t in self._scan_threads]): # if self.abort_run.is_set(): # break for t in self._scan_threads: try: t.join(0.01) except Exception: self._scan_threads.remove(t) self.handle_err(sys.exc_info()) # alive_threads = [t.name for t in self._scan_threads if (not t.join(10.0) and t.is_alive())] # if alive_threads: # raise RuntimeError("Scan thread(s) not finished: %s" % ", ".join(alive_threads)) self._scan_threads = [] for module_id in tx_module_ids: if self.abort_run.is_set(): break with self.access_module(module_id=module_id): # set modules to conf mode by after finishing scan() self.register_utils.set_conf_mode() else: for module_id in self._modules: if self.abort_run.is_set(): break # some scans use this event to stop scan loop, clear event here to make another scan possible self.stop_run.clear() with self.access_module(module_id=module_id): logging.info('Scan parameter(s) for module %s: %s', module_id, ', '.join(['%s=%s' % (key, value) for (key, value) in self.scan_parameters._asdict().items()]) if self.scan_parameters else 'None') with self.register.restored(name=self.run_number): self.configure() # set modules to run mode by before entering scan() self.register_utils.set_run_mode() self.fifo_readout.reset_rx() self.fifo_readout.reset_fifo(self._selected_fifos) self.fifo_readout.print_fei4_rx_status() # some scans use this event to stop scan loop, clear event here to make another scan possible self.stop_run.clear() with self.access_files(): self.scan() # set modules to conf mode by after finishing scan() self.register_utils.set_conf_mode() if self._modules: self.fifo_readout.print_readout_status()
[ "def", "do_run", "(", "self", ")", ":", "if", "self", ".", "broadcast_commands", ":", "# Broadcast FE commands", "if", "self", ".", "threaded_scan", ":", "with", "ExitStack", "(", ")", "as", "restore_config_stack", ":", "# Configure each FE individually", "# Sort module config keys, configure broadcast modules first", "for", "module_id", "in", "itertools", ".", "chain", "(", "self", ".", "_tx_module_groups", ",", "self", ".", "_modules", ")", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "self", ".", "access_module", "(", "module_id", "=", "module_id", ")", ":", "if", "module_id", "in", "self", ".", "_modules", ":", "module_id_str", "=", "\"module \"", "+", "module_id", "else", ":", "module_id_str", "=", "module_id", ".", "split", "(", "'='", ",", "1", ")", "module_id_str", "[", "0", "]", "=", "module_id_str", "[", "0", "]", ".", "replace", "(", "\"_\"", ",", "\" \"", ")", "module_id_str", "=", "\"=\"", ".", "join", "(", "module_id_str", ")", "logging", ".", "info", "(", "'Scan parameter(s) for %s: %s'", ",", "module_id_str", ",", "', '", ".", "join", "(", "[", "'%s=%s'", "%", "(", "key", ",", "value", ")", "for", "(", "key", ",", "value", ")", "in", "self", ".", "scan_parameters", ".", "_asdict", "(", ")", ".", "items", "(", ")", "]", ")", "if", "self", ".", "scan_parameters", "else", "'None'", ")", "# storing register values until scan has finished and then restore configuration", "restore_config_stack", ".", "enter_context", "(", "self", ".", "register", ".", "restored", "(", "name", "=", "self", ".", "run_number", ")", ")", "self", ".", "configure", "(", ")", "for", "module_id", "in", "self", ".", "_tx_module_groups", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "self", ".", "access_module", "(", "module_id", "=", "module_id", ")", ":", "# set all modules to run mode by before entering scan()", "self", ".", "register_utils", ".", "set_run_mode", "(", ")", "with", "self", ".", "access_module", "(", "module_id", "=", "None", ")", ":", "self", ".", "fifo_readout", ".", "reset_rx", "(", ")", "self", ".", "fifo_readout", ".", "reset_fifo", "(", "self", ".", "_selected_fifos", ")", "self", ".", "fifo_readout", ".", "print_fei4_rx_status", "(", ")", "with", "self", ".", "access_files", "(", ")", ":", "self", ".", "_scan_threads", "=", "[", "]", "for", "module_id", "in", "self", ".", "_tx_module_groups", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "t", "=", "ExcThread", "(", "target", "=", "self", ".", "scan", ",", "name", "=", "module_id", ")", "t", ".", "daemon", "=", "True", "# exiting program even when thread is alive", "self", ".", "_scan_threads", ".", "append", "(", "t", ")", "for", "t", "in", "self", ".", "_scan_threads", ":", "t", ".", "start", "(", ")", "while", "any", "(", "[", "t", ".", "is_alive", "(", ")", "for", "t", "in", "self", ".", "_scan_threads", "]", ")", ":", "# if self.abort_run.is_set():", "# break", "for", "t", "in", "self", ".", "_scan_threads", ":", "try", ":", "t", ".", "join", "(", "0.01", ")", "except", "Exception", ":", "self", ".", "_scan_threads", ".", "remove", "(", "t", ")", "self", ".", "handle_err", "(", "sys", ".", "exc_info", "(", ")", ")", "# alive_threads = [t.name for t in self._scan_threads if (not t.join(10.0) and t.is_alive())]", "# if alive_threads:", "# raise RuntimeError(\"Scan thread(s) not finished: %s\" % \", \".join(alive_threads))", "self", ".", "_scan_threads", "=", "[", "]", "for", "module_id", "in", "self", ".", "_tx_module_groups", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "self", ".", "access_module", "(", "module_id", "=", "module_id", ")", ":", "# set modules to conf mode by after finishing scan()", "self", ".", "register_utils", ".", "set_conf_mode", "(", ")", "else", ":", "for", "tx_module_id", ",", "tx_group", "in", "self", ".", "_tx_module_groups", ".", "items", "(", ")", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "ExitStack", "(", ")", "as", "restore_config_stack", ":", "for", "module_id", "in", "itertools", ".", "chain", "(", "[", "tx_module_id", "]", ",", "tx_group", ")", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "self", ".", "access_module", "(", "module_id", "=", "module_id", ")", ":", "logging", ".", "info", "(", "'Scan parameter(s) for module %s: %s'", ",", "module_id", ",", "', '", ".", "join", "(", "[", "'%s=%s'", "%", "(", "key", ",", "value", ")", "for", "(", "key", ",", "value", ")", "in", "self", ".", "scan_parameters", ".", "_asdict", "(", ")", ".", "items", "(", ")", "]", ")", "if", "self", ".", "scan_parameters", "else", "'None'", ")", "# storing register values until scan has finished and then restore configuration", "restore_config_stack", ".", "enter_context", "(", "self", ".", "register", ".", "restored", "(", "name", "=", "self", ".", "run_number", ")", ")", "self", ".", "configure", "(", ")", "with", "self", ".", "access_module", "(", "module_id", "=", "tx_module_id", ")", ":", "# set all modules to run mode by before entering scan()", "self", ".", "register_utils", ".", "set_run_mode", "(", ")", "self", ".", "fifo_readout", ".", "reset_rx", "(", ")", "self", ".", "fifo_readout", ".", "reset_fifo", "(", "self", ".", "_selected_fifos", ")", "self", ".", "fifo_readout", ".", "print_fei4_rx_status", "(", ")", "# some scans use this event to stop scan loop, clear event here to make another scan possible", "self", ".", "stop_run", ".", "clear", "(", ")", "with", "self", ".", "access_files", "(", ")", ":", "self", ".", "scan", "(", ")", "with", "self", ".", "access_module", "(", "module_id", "=", "tx_module_id", ")", ":", "# set modules to conf mode by after finishing scan()", "self", ".", "register_utils", ".", "set_conf_mode", "(", ")", "else", ":", "# Scan each FE individually", "if", "self", ".", "threaded_scan", ":", "self", ".", "_scan_threads", "=", "[", "]", "# loop over grpups of modules with different TX", "for", "tx_module_ids", "in", "zip_nofill", "(", "*", "self", ".", "_tx_module_groups", ".", "values", "(", ")", ")", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "ExitStack", "(", ")", "as", "restore_config_stack", ":", "for", "module_id", "in", "tx_module_ids", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "self", ".", "access_module", "(", "module_id", "=", "module_id", ")", ":", "logging", ".", "info", "(", "'Scan parameter(s) for module %s: %s'", ",", "module_id", ",", "', '", ".", "join", "(", "[", "'%s=%s'", "%", "(", "key", ",", "value", ")", "for", "(", "key", ",", "value", ")", "in", "self", ".", "scan_parameters", ".", "_asdict", "(", ")", ".", "items", "(", ")", "]", ")", "if", "self", ".", "scan_parameters", "else", "'None'", ")", "# storing register values until scan has finished and then restore configuration", "restore_config_stack", ".", "enter_context", "(", "self", ".", "register", ".", "restored", "(", "name", "=", "self", ".", "run_number", ")", ")", "self", ".", "configure", "(", ")", "# set modules to run mode by before entering scan()", "self", ".", "register_utils", ".", "set_run_mode", "(", ")", "t", "=", "ExcThread", "(", "target", "=", "self", ".", "scan", ",", "name", "=", "module_id", ")", "t", ".", "daemon", "=", "True", "# exiting program even when thread is alive", "self", ".", "_scan_threads", ".", "append", "(", "t", ")", "with", "self", ".", "access_module", "(", "module_id", "=", "tx_module_ids", ")", ":", "self", ".", "fifo_readout", ".", "reset_rx", "(", ")", "self", ".", "fifo_readout", ".", "reset_fifo", "(", "self", ".", "_selected_fifos", ")", "self", ".", "fifo_readout", ".", "print_fei4_rx_status", "(", ")", "with", "self", ".", "access_files", "(", ")", ":", "# some scans use this event to stop scan loop, clear event here to make another scan possible", "self", ".", "stop_run", ".", "clear", "(", ")", "for", "t", "in", "self", ".", "_scan_threads", ":", "t", ".", "start", "(", ")", "while", "any", "(", "[", "t", ".", "is_alive", "(", ")", "for", "t", "in", "self", ".", "_scan_threads", "]", ")", ":", "# if self.abort_run.is_set():", "# break", "for", "t", "in", "self", ".", "_scan_threads", ":", "try", ":", "t", ".", "join", "(", "0.01", ")", "except", "Exception", ":", "self", ".", "_scan_threads", ".", "remove", "(", "t", ")", "self", ".", "handle_err", "(", "sys", ".", "exc_info", "(", ")", ")", "# alive_threads = [t.name for t in self._scan_threads if (not t.join(10.0) and t.is_alive())]", "# if alive_threads:", "# raise RuntimeError(\"Scan thread(s) not finished: %s\" % \", \".join(alive_threads))", "self", ".", "_scan_threads", "=", "[", "]", "for", "module_id", "in", "tx_module_ids", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "self", ".", "access_module", "(", "module_id", "=", "module_id", ")", ":", "# set modules to conf mode by after finishing scan()", "self", ".", "register_utils", ".", "set_conf_mode", "(", ")", "else", ":", "for", "module_id", "in", "self", ".", "_modules", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "# some scans use this event to stop scan loop, clear event here to make another scan possible", "self", ".", "stop_run", ".", "clear", "(", ")", "with", "self", ".", "access_module", "(", "module_id", "=", "module_id", ")", ":", "logging", ".", "info", "(", "'Scan parameter(s) for module %s: %s'", ",", "module_id", ",", "', '", ".", "join", "(", "[", "'%s=%s'", "%", "(", "key", ",", "value", ")", "for", "(", "key", ",", "value", ")", "in", "self", ".", "scan_parameters", ".", "_asdict", "(", ")", ".", "items", "(", ")", "]", ")", "if", "self", ".", "scan_parameters", "else", "'None'", ")", "with", "self", ".", "register", ".", "restored", "(", "name", "=", "self", ".", "run_number", ")", ":", "self", ".", "configure", "(", ")", "# set modules to run mode by before entering scan()", "self", ".", "register_utils", ".", "set_run_mode", "(", ")", "self", ".", "fifo_readout", ".", "reset_rx", "(", ")", "self", ".", "fifo_readout", ".", "reset_fifo", "(", "self", ".", "_selected_fifos", ")", "self", ".", "fifo_readout", ".", "print_fei4_rx_status", "(", ")", "# some scans use this event to stop scan loop, clear event here to make another scan possible", "self", ".", "stop_run", ".", "clear", "(", ")", "with", "self", ".", "access_files", "(", ")", ":", "self", ".", "scan", "(", ")", "# set modules to conf mode by after finishing scan()", "self", ".", "register_utils", ".", "set_conf_mode", "(", ")", "if", "self", ".", "_modules", ":", "self", ".", "fifo_readout", ".", "print_readout_status", "(", ")" ]
Start runs on all modules sequentially. Sets properties to access current module properties.
[ "Start", "runs", "on", "all", "modules", "sequentially", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L660-L830
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.close
def close(self): '''Releasing hardware resources. ''' try: self.dut.close() except Exception: logging.warning('Closing DUT was not successful') else: logging.debug('Closed DUT')
python
def close(self): '''Releasing hardware resources. ''' try: self.dut.close() except Exception: logging.warning('Closing DUT was not successful') else: logging.debug('Closed DUT')
[ "def", "close", "(", "self", ")", ":", "try", ":", "self", ".", "dut", ".", "close", "(", ")", "except", "Exception", ":", "logging", ".", "warning", "(", "'Closing DUT was not successful'", ")", "else", ":", "logging", ".", "debug", "(", "'Closed DUT'", ")" ]
Releasing hardware resources.
[ "Releasing", "hardware", "resources", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L858-L866
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.handle_data
def handle_data(self, data, new_file=False, flush=True): '''Handling of the data. Parameters ---------- data : list, tuple Data tuple of the format (data (np.array), last_time (float), curr_time (float), status (int)) ''' for i, module_id in enumerate(self._selected_modules): if data[i] is None: continue self._raw_data_files[module_id].append(data_iterable=data[i], scan_parameters=self._scan_parameters[module_id]._asdict(), new_file=new_file, flush=flush)
python
def handle_data(self, data, new_file=False, flush=True): '''Handling of the data. Parameters ---------- data : list, tuple Data tuple of the format (data (np.array), last_time (float), curr_time (float), status (int)) ''' for i, module_id in enumerate(self._selected_modules): if data[i] is None: continue self._raw_data_files[module_id].append(data_iterable=data[i], scan_parameters=self._scan_parameters[module_id]._asdict(), new_file=new_file, flush=flush)
[ "def", "handle_data", "(", "self", ",", "data", ",", "new_file", "=", "False", ",", "flush", "=", "True", ")", ":", "for", "i", ",", "module_id", "in", "enumerate", "(", "self", ".", "_selected_modules", ")", ":", "if", "data", "[", "i", "]", "is", "None", ":", "continue", "self", ".", "_raw_data_files", "[", "module_id", "]", ".", "append", "(", "data_iterable", "=", "data", "[", "i", "]", ",", "scan_parameters", "=", "self", ".", "_scan_parameters", "[", "module_id", "]", ".", "_asdict", "(", ")", ",", "new_file", "=", "new_file", ",", "flush", "=", "flush", ")" ]
Handling of the data. Parameters ---------- data : list, tuple Data tuple of the format (data (np.array), last_time (float), curr_time (float), status (int))
[ "Handling", "of", "the", "data", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L868-L879
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.handle_err
def handle_err(self, exc): '''Handling of Exceptions. Parameters ---------- exc : list, tuple Information of the exception of the format (type, value, traceback). Uses the return value of sys.exc_info(). ''' if self.reset_rx_on_error and isinstance(exc[1], (RxSyncError, EightbTenbError)): self.fifo_readout.print_readout_status() self.fifo_readout.reset_rx() else: # print just the first error massage if not self.abort_run.is_set(): self.abort(msg=exc[1].__class__.__name__ + ": " + str(exc[1])) self.err_queue.put(exc)
python
def handle_err(self, exc): '''Handling of Exceptions. Parameters ---------- exc : list, tuple Information of the exception of the format (type, value, traceback). Uses the return value of sys.exc_info(). ''' if self.reset_rx_on_error and isinstance(exc[1], (RxSyncError, EightbTenbError)): self.fifo_readout.print_readout_status() self.fifo_readout.reset_rx() else: # print just the first error massage if not self.abort_run.is_set(): self.abort(msg=exc[1].__class__.__name__ + ": " + str(exc[1])) self.err_queue.put(exc)
[ "def", "handle_err", "(", "self", ",", "exc", ")", ":", "if", "self", ".", "reset_rx_on_error", "and", "isinstance", "(", "exc", "[", "1", "]", ",", "(", "RxSyncError", ",", "EightbTenbError", ")", ")", ":", "self", ".", "fifo_readout", ".", "print_readout_status", "(", ")", "self", ".", "fifo_readout", ".", "reset_rx", "(", ")", "else", ":", "# print just the first error massage", "if", "not", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "self", ".", "abort", "(", "msg", "=", "exc", "[", "1", "]", ".", "__class__", ".", "__name__", "+", "\": \"", "+", "str", "(", "exc", "[", "1", "]", ")", ")", "self", ".", "err_queue", ".", "put", "(", "exc", ")" ]
Handling of Exceptions. Parameters ---------- exc : list, tuple Information of the exception of the format (type, value, traceback). Uses the return value of sys.exc_info().
[ "Handling", "of", "Exceptions", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L881-L897
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.get_configuration
def get_configuration(self, module_id, run_number=None): ''' Returns the configuration for a given module ID. The working directory is searched for a file matching the module_id with the given run number. If no run number is defined the last successfull run defines the run number. ''' def find_file(run_number): module_path = self.get_module_path(module_id) for root, _, files in os.walk(module_path): for cfgfile in files: cfg_root, cfg_ext = os.path.splitext(cfgfile) if cfg_root.startswith(''.join([str(run_number), '_', module_id])) and cfg_ext.endswith(".cfg"): return os.path.join(root, cfgfile) if not run_number: run_numbers = sorted(self._get_run_numbers(status='FINISHED').keys(), reverse=True) found_fin_run_cfg = True if not run_numbers: return None last_fin_run = run_numbers[0] for run_number in run_numbers: cfg_file = find_file(run_number) if cfg_file: if not found_fin_run_cfg: logging.warning("Module '%s' has no configuration for run %d, use config of run %d", module_id, last_fin_run, run_number) return cfg_file else: found_fin_run_cfg = False else: cfg_file = find_file(run_number) if cfg_file: return cfg_file else: raise ValueError('Found no configuration with run number %s' % run_number)
python
def get_configuration(self, module_id, run_number=None): ''' Returns the configuration for a given module ID. The working directory is searched for a file matching the module_id with the given run number. If no run number is defined the last successfull run defines the run number. ''' def find_file(run_number): module_path = self.get_module_path(module_id) for root, _, files in os.walk(module_path): for cfgfile in files: cfg_root, cfg_ext = os.path.splitext(cfgfile) if cfg_root.startswith(''.join([str(run_number), '_', module_id])) and cfg_ext.endswith(".cfg"): return os.path.join(root, cfgfile) if not run_number: run_numbers = sorted(self._get_run_numbers(status='FINISHED').keys(), reverse=True) found_fin_run_cfg = True if not run_numbers: return None last_fin_run = run_numbers[0] for run_number in run_numbers: cfg_file = find_file(run_number) if cfg_file: if not found_fin_run_cfg: logging.warning("Module '%s' has no configuration for run %d, use config of run %d", module_id, last_fin_run, run_number) return cfg_file else: found_fin_run_cfg = False else: cfg_file = find_file(run_number) if cfg_file: return cfg_file else: raise ValueError('Found no configuration with run number %s' % run_number)
[ "def", "get_configuration", "(", "self", ",", "module_id", ",", "run_number", "=", "None", ")", ":", "def", "find_file", "(", "run_number", ")", ":", "module_path", "=", "self", ".", "get_module_path", "(", "module_id", ")", "for", "root", ",", "_", ",", "files", "in", "os", ".", "walk", "(", "module_path", ")", ":", "for", "cfgfile", "in", "files", ":", "cfg_root", ",", "cfg_ext", "=", "os", ".", "path", ".", "splitext", "(", "cfgfile", ")", "if", "cfg_root", ".", "startswith", "(", "''", ".", "join", "(", "[", "str", "(", "run_number", ")", ",", "'_'", ",", "module_id", "]", ")", ")", "and", "cfg_ext", ".", "endswith", "(", "\".cfg\"", ")", ":", "return", "os", ".", "path", ".", "join", "(", "root", ",", "cfgfile", ")", "if", "not", "run_number", ":", "run_numbers", "=", "sorted", "(", "self", ".", "_get_run_numbers", "(", "status", "=", "'FINISHED'", ")", ".", "keys", "(", ")", ",", "reverse", "=", "True", ")", "found_fin_run_cfg", "=", "True", "if", "not", "run_numbers", ":", "return", "None", "last_fin_run", "=", "run_numbers", "[", "0", "]", "for", "run_number", "in", "run_numbers", ":", "cfg_file", "=", "find_file", "(", "run_number", ")", "if", "cfg_file", ":", "if", "not", "found_fin_run_cfg", ":", "logging", ".", "warning", "(", "\"Module '%s' has no configuration for run %d, use config of run %d\"", ",", "module_id", ",", "last_fin_run", ",", "run_number", ")", "return", "cfg_file", "else", ":", "found_fin_run_cfg", "=", "False", "else", ":", "cfg_file", "=", "find_file", "(", "run_number", ")", "if", "cfg_file", ":", "return", "cfg_file", "else", ":", "raise", "ValueError", "(", "'Found no configuration with run number %s'", "%", "run_number", ")" ]
Returns the configuration for a given module ID. The working directory is searched for a file matching the module_id with the given run number. If no run number is defined the last successfull run defines the run number.
[ "Returns", "the", "configuration", "for", "a", "given", "module", "ID", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L904-L938
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.select_module
def select_module(self, module_id): ''' Select module and give access to the module. ''' if not isinstance(module_id, basestring) and isinstance(module_id, Iterable) and set(module_id) - set(self._modules): raise ValueError('Module IDs invalid:' % ", ".join(set(module_id) - set(self._modules))) if isinstance(module_id, basestring) and module_id not in self._module_cfgs: raise ValueError('Module ID "%s" is not valid' % module_id) if self._current_module_handle is not None: raise RuntimeError('Module handle "%s" cannot be set because another module is active' % module_id) if module_id is None: self._selected_modules = self._modules.keys() elif not isinstance(module_id, basestring) and isinstance(module_id, Iterable): self._selected_modules = module_id elif module_id in self._modules: self._selected_modules = [module_id] elif module_id in self._tx_module_groups: self._selected_modules = self._tx_module_groups[module_id] else: RuntimeError('Cannot open files. Module handle "%s" is not valid.' % self.current_module_handle) # FIFO readout self._selected_fifos = list(set([module_cfg['FIFO'] for (name, module_cfg) in self._module_cfgs.items() if name in self._selected_modules])) # Module filter functions dict for quick lookup self._readout_fifos = [] self._filter = [] self._converter = [] for selected_module_id in self._selected_modules: module_cfg = self._module_cfgs[selected_module_id] self._readout_fifos.append(module_cfg['FIFO']) if 'tdc_channel' not in module_cfg: tdc_filter = false self._converter.append(None) elif module_cfg['tdc_channel'] is None: tdc_filter = is_tdc_word self._converter.append(convert_tdc_to_channel(channel=module_cfg['tdc_channel'])) # for the raw data analyzer else: tdc_filter = logical_and(is_tdc_word, is_tdc_from_channel(module_cfg['tdc_channel'])) self._converter.append(convert_tdc_to_channel(channel=module_cfg['tdc_channel'])) # for the raw data analyzer if 'rx_channel' not in module_cfg: self._filter.append(logical_or(is_trigger_word, tdc_filter)) elif module_cfg['rx_channel'] is None: self._filter.append(logical_or(is_trigger_word, logical_or(tdc_filter, is_fe_word))) else: self._filter.append(logical_or(is_trigger_word, logical_or(tdc_filter, logical_and(is_fe_word, is_data_from_channel(module_cfg['rx_channel']))))) # select readout channels and report sync status only from actively selected modules self._enabled_fe_channels = list(set([config['RX'] for (name, config) in self._module_cfgs.items() if name in self._selected_modules])) # enabling specific TX channels tx_channels = list(set([1 << config['tx_channel'] for (name, config) in self._module_cfgs.items() if name in self._selected_modules])) if tx_channels: self.dut['TX']['OUTPUT_ENABLE'] = reduce(lambda x, y: x | y, tx_channels) else: self.dut['TX']['OUTPUT_ENABLE'] = 0 if not isinstance(module_id, basestring) and isinstance(module_id, Iterable): self._current_module_handle = None else: self._current_module_handle = module_id if module_id is not None and isinstance(module_id, basestring): current_thread().name = module_id
python
def select_module(self, module_id): ''' Select module and give access to the module. ''' if not isinstance(module_id, basestring) and isinstance(module_id, Iterable) and set(module_id) - set(self._modules): raise ValueError('Module IDs invalid:' % ", ".join(set(module_id) - set(self._modules))) if isinstance(module_id, basestring) and module_id not in self._module_cfgs: raise ValueError('Module ID "%s" is not valid' % module_id) if self._current_module_handle is not None: raise RuntimeError('Module handle "%s" cannot be set because another module is active' % module_id) if module_id is None: self._selected_modules = self._modules.keys() elif not isinstance(module_id, basestring) and isinstance(module_id, Iterable): self._selected_modules = module_id elif module_id in self._modules: self._selected_modules = [module_id] elif module_id in self._tx_module_groups: self._selected_modules = self._tx_module_groups[module_id] else: RuntimeError('Cannot open files. Module handle "%s" is not valid.' % self.current_module_handle) # FIFO readout self._selected_fifos = list(set([module_cfg['FIFO'] for (name, module_cfg) in self._module_cfgs.items() if name in self._selected_modules])) # Module filter functions dict for quick lookup self._readout_fifos = [] self._filter = [] self._converter = [] for selected_module_id in self._selected_modules: module_cfg = self._module_cfgs[selected_module_id] self._readout_fifos.append(module_cfg['FIFO']) if 'tdc_channel' not in module_cfg: tdc_filter = false self._converter.append(None) elif module_cfg['tdc_channel'] is None: tdc_filter = is_tdc_word self._converter.append(convert_tdc_to_channel(channel=module_cfg['tdc_channel'])) # for the raw data analyzer else: tdc_filter = logical_and(is_tdc_word, is_tdc_from_channel(module_cfg['tdc_channel'])) self._converter.append(convert_tdc_to_channel(channel=module_cfg['tdc_channel'])) # for the raw data analyzer if 'rx_channel' not in module_cfg: self._filter.append(logical_or(is_trigger_word, tdc_filter)) elif module_cfg['rx_channel'] is None: self._filter.append(logical_or(is_trigger_word, logical_or(tdc_filter, is_fe_word))) else: self._filter.append(logical_or(is_trigger_word, logical_or(tdc_filter, logical_and(is_fe_word, is_data_from_channel(module_cfg['rx_channel']))))) # select readout channels and report sync status only from actively selected modules self._enabled_fe_channels = list(set([config['RX'] for (name, config) in self._module_cfgs.items() if name in self._selected_modules])) # enabling specific TX channels tx_channels = list(set([1 << config['tx_channel'] for (name, config) in self._module_cfgs.items() if name in self._selected_modules])) if tx_channels: self.dut['TX']['OUTPUT_ENABLE'] = reduce(lambda x, y: x | y, tx_channels) else: self.dut['TX']['OUTPUT_ENABLE'] = 0 if not isinstance(module_id, basestring) and isinstance(module_id, Iterable): self._current_module_handle = None else: self._current_module_handle = module_id if module_id is not None and isinstance(module_id, basestring): current_thread().name = module_id
[ "def", "select_module", "(", "self", ",", "module_id", ")", ":", "if", "not", "isinstance", "(", "module_id", ",", "basestring", ")", "and", "isinstance", "(", "module_id", ",", "Iterable", ")", "and", "set", "(", "module_id", ")", "-", "set", "(", "self", ".", "_modules", ")", ":", "raise", "ValueError", "(", "'Module IDs invalid:'", "%", "\", \"", ".", "join", "(", "set", "(", "module_id", ")", "-", "set", "(", "self", ".", "_modules", ")", ")", ")", "if", "isinstance", "(", "module_id", ",", "basestring", ")", "and", "module_id", "not", "in", "self", ".", "_module_cfgs", ":", "raise", "ValueError", "(", "'Module ID \"%s\" is not valid'", "%", "module_id", ")", "if", "self", ".", "_current_module_handle", "is", "not", "None", ":", "raise", "RuntimeError", "(", "'Module handle \"%s\" cannot be set because another module is active'", "%", "module_id", ")", "if", "module_id", "is", "None", ":", "self", ".", "_selected_modules", "=", "self", ".", "_modules", ".", "keys", "(", ")", "elif", "not", "isinstance", "(", "module_id", ",", "basestring", ")", "and", "isinstance", "(", "module_id", ",", "Iterable", ")", ":", "self", ".", "_selected_modules", "=", "module_id", "elif", "module_id", "in", "self", ".", "_modules", ":", "self", ".", "_selected_modules", "=", "[", "module_id", "]", "elif", "module_id", "in", "self", ".", "_tx_module_groups", ":", "self", ".", "_selected_modules", "=", "self", ".", "_tx_module_groups", "[", "module_id", "]", "else", ":", "RuntimeError", "(", "'Cannot open files. Module handle \"%s\" is not valid.'", "%", "self", ".", "current_module_handle", ")", "# FIFO readout", "self", ".", "_selected_fifos", "=", "list", "(", "set", "(", "[", "module_cfg", "[", "'FIFO'", "]", "for", "(", "name", ",", "module_cfg", ")", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "name", "in", "self", ".", "_selected_modules", "]", ")", ")", "# Module filter functions dict for quick lookup", "self", ".", "_readout_fifos", "=", "[", "]", "self", ".", "_filter", "=", "[", "]", "self", ".", "_converter", "=", "[", "]", "for", "selected_module_id", "in", "self", ".", "_selected_modules", ":", "module_cfg", "=", "self", ".", "_module_cfgs", "[", "selected_module_id", "]", "self", ".", "_readout_fifos", ".", "append", "(", "module_cfg", "[", "'FIFO'", "]", ")", "if", "'tdc_channel'", "not", "in", "module_cfg", ":", "tdc_filter", "=", "false", "self", ".", "_converter", ".", "append", "(", "None", ")", "elif", "module_cfg", "[", "'tdc_channel'", "]", "is", "None", ":", "tdc_filter", "=", "is_tdc_word", "self", ".", "_converter", ".", "append", "(", "convert_tdc_to_channel", "(", "channel", "=", "module_cfg", "[", "'tdc_channel'", "]", ")", ")", "# for the raw data analyzer", "else", ":", "tdc_filter", "=", "logical_and", "(", "is_tdc_word", ",", "is_tdc_from_channel", "(", "module_cfg", "[", "'tdc_channel'", "]", ")", ")", "self", ".", "_converter", ".", "append", "(", "convert_tdc_to_channel", "(", "channel", "=", "module_cfg", "[", "'tdc_channel'", "]", ")", ")", "# for the raw data analyzer", "if", "'rx_channel'", "not", "in", "module_cfg", ":", "self", ".", "_filter", ".", "append", "(", "logical_or", "(", "is_trigger_word", ",", "tdc_filter", ")", ")", "elif", "module_cfg", "[", "'rx_channel'", "]", "is", "None", ":", "self", ".", "_filter", ".", "append", "(", "logical_or", "(", "is_trigger_word", ",", "logical_or", "(", "tdc_filter", ",", "is_fe_word", ")", ")", ")", "else", ":", "self", ".", "_filter", ".", "append", "(", "logical_or", "(", "is_trigger_word", ",", "logical_or", "(", "tdc_filter", ",", "logical_and", "(", "is_fe_word", ",", "is_data_from_channel", "(", "module_cfg", "[", "'rx_channel'", "]", ")", ")", ")", ")", ")", "# select readout channels and report sync status only from actively selected modules", "self", ".", "_enabled_fe_channels", "=", "list", "(", "set", "(", "[", "config", "[", "'RX'", "]", "for", "(", "name", ",", "config", ")", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "name", "in", "self", ".", "_selected_modules", "]", ")", ")", "# enabling specific TX channels", "tx_channels", "=", "list", "(", "set", "(", "[", "1", "<<", "config", "[", "'tx_channel'", "]", "for", "(", "name", ",", "config", ")", "in", "self", ".", "_module_cfgs", ".", "items", "(", ")", "if", "name", "in", "self", ".", "_selected_modules", "]", ")", ")", "if", "tx_channels", ":", "self", ".", "dut", "[", "'TX'", "]", "[", "'OUTPUT_ENABLE'", "]", "=", "reduce", "(", "lambda", "x", ",", "y", ":", "x", "|", "y", ",", "tx_channels", ")", "else", ":", "self", ".", "dut", "[", "'TX'", "]", "[", "'OUTPUT_ENABLE'", "]", "=", "0", "if", "not", "isinstance", "(", "module_id", ",", "basestring", ")", "and", "isinstance", "(", "module_id", ",", "Iterable", ")", ":", "self", ".", "_current_module_handle", "=", "None", "else", ":", "self", ".", "_current_module_handle", "=", "module_id", "if", "module_id", "is", "not", "None", "and", "isinstance", "(", "module_id", ",", "basestring", ")", ":", "current_thread", "(", ")", ".", "name", "=", "module_id" ]
Select module and give access to the module.
[ "Select", "module", "and", "give", "access", "to", "the", "module", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1014-L1077
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.deselect_module
def deselect_module(self): ''' Deselect module and cleanup. ''' self._enabled_fe_channels = [] # ignore any RX sync errors self._readout_fifos = [] self._filter = [] self._converter = [] self.dut['TX']['OUTPUT_ENABLE'] = 0 self._current_module_handle = None if isinstance(current_thread(), _MainThread): current_thread().name = "MainThread"
python
def deselect_module(self): ''' Deselect module and cleanup. ''' self._enabled_fe_channels = [] # ignore any RX sync errors self._readout_fifos = [] self._filter = [] self._converter = [] self.dut['TX']['OUTPUT_ENABLE'] = 0 self._current_module_handle = None if isinstance(current_thread(), _MainThread): current_thread().name = "MainThread"
[ "def", "deselect_module", "(", "self", ")", ":", "self", ".", "_enabled_fe_channels", "=", "[", "]", "# ignore any RX sync errors", "self", ".", "_readout_fifos", "=", "[", "]", "self", ".", "_filter", "=", "[", "]", "self", ".", "_converter", "=", "[", "]", "self", ".", "dut", "[", "'TX'", "]", "[", "'OUTPUT_ENABLE'", "]", "=", "0", "self", ".", "_current_module_handle", "=", "None", "if", "isinstance", "(", "current_thread", "(", ")", ",", "_MainThread", ")", ":", "current_thread", "(", ")", ".", "name", "=", "\"MainThread\"" ]
Deselect module and cleanup.
[ "Deselect", "module", "and", "cleanup", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1079-L1089
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.enter_sync
def enter_sync(self): ''' Waiting for all threads to appear, then continue. ''' if self._scan_threads and self.current_module_handle not in [t.name for t in self._scan_threads]: raise RuntimeError('Thread name "%s" is not valid.') if self._scan_threads and self.current_module_handle in self._curr_sync_threads: raise RuntimeError('Thread "%s" is already actively reading FIFO.') with self._sync_lock: self._curr_sync_threads.append(self.current_module_handle) self._enter_sync_event.clear() while not self._enter_sync_event.wait(0.01): if self.abort_run.is_set(): break with self._sync_lock: if len(set(self._curr_sync_threads) & set([t.name for t in self._scan_threads if t.is_alive()])) == len(set([t.name for t in self._scan_threads if t.is_alive()])) or not self._scan_threads: self._enter_sync_event.set()
python
def enter_sync(self): ''' Waiting for all threads to appear, then continue. ''' if self._scan_threads and self.current_module_handle not in [t.name for t in self._scan_threads]: raise RuntimeError('Thread name "%s" is not valid.') if self._scan_threads and self.current_module_handle in self._curr_sync_threads: raise RuntimeError('Thread "%s" is already actively reading FIFO.') with self._sync_lock: self._curr_sync_threads.append(self.current_module_handle) self._enter_sync_event.clear() while not self._enter_sync_event.wait(0.01): if self.abort_run.is_set(): break with self._sync_lock: if len(set(self._curr_sync_threads) & set([t.name for t in self._scan_threads if t.is_alive()])) == len(set([t.name for t in self._scan_threads if t.is_alive()])) or not self._scan_threads: self._enter_sync_event.set()
[ "def", "enter_sync", "(", "self", ")", ":", "if", "self", ".", "_scan_threads", "and", "self", ".", "current_module_handle", "not", "in", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "]", ":", "raise", "RuntimeError", "(", "'Thread name \"%s\" is not valid.'", ")", "if", "self", ".", "_scan_threads", "and", "self", ".", "current_module_handle", "in", "self", ".", "_curr_sync_threads", ":", "raise", "RuntimeError", "(", "'Thread \"%s\" is already actively reading FIFO.'", ")", "with", "self", ".", "_sync_lock", ":", "self", ".", "_curr_sync_threads", ".", "append", "(", "self", ".", "current_module_handle", ")", "self", ".", "_enter_sync_event", ".", "clear", "(", ")", "while", "not", "self", ".", "_enter_sync_event", ".", "wait", "(", "0.01", ")", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "self", ".", "_sync_lock", ":", "if", "len", "(", "set", "(", "self", ".", "_curr_sync_threads", ")", "&", "set", "(", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "if", "t", ".", "is_alive", "(", ")", "]", ")", ")", "==", "len", "(", "set", "(", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "if", "t", ".", "is_alive", "(", ")", "]", ")", ")", "or", "not", "self", ".", "_scan_threads", ":", "self", ".", "_enter_sync_event", ".", "set", "(", ")" ]
Waiting for all threads to appear, then continue.
[ "Waiting", "for", "all", "threads", "to", "appear", "then", "continue", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1169-L1184
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.exit_sync
def exit_sync(self): ''' Waiting for all threads to appear, then continue. ''' if self._scan_threads and self.current_module_handle not in [t.name for t in self._scan_threads]: raise RuntimeError('Thread name "%s" is not valid.') if self._scan_threads and self.current_module_handle not in self._curr_sync_threads: raise RuntimeError('Thread "%s" is not reading FIFO.') with self._sync_lock: self._curr_sync_threads.remove(self.current_module_handle) self._exit_sync_event.clear() while not self._exit_sync_event.wait(0.01): if self.abort_run.is_set(): break with self._sync_lock: if len(set(self._curr_sync_threads) & set([t.name for t in self._scan_threads if t.is_alive()])) == 0 or not self._scan_threads: self._exit_sync_event.set()
python
def exit_sync(self): ''' Waiting for all threads to appear, then continue. ''' if self._scan_threads and self.current_module_handle not in [t.name for t in self._scan_threads]: raise RuntimeError('Thread name "%s" is not valid.') if self._scan_threads and self.current_module_handle not in self._curr_sync_threads: raise RuntimeError('Thread "%s" is not reading FIFO.') with self._sync_lock: self._curr_sync_threads.remove(self.current_module_handle) self._exit_sync_event.clear() while not self._exit_sync_event.wait(0.01): if self.abort_run.is_set(): break with self._sync_lock: if len(set(self._curr_sync_threads) & set([t.name for t in self._scan_threads if t.is_alive()])) == 0 or not self._scan_threads: self._exit_sync_event.set()
[ "def", "exit_sync", "(", "self", ")", ":", "if", "self", ".", "_scan_threads", "and", "self", ".", "current_module_handle", "not", "in", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "]", ":", "raise", "RuntimeError", "(", "'Thread name \"%s\" is not valid.'", ")", "if", "self", ".", "_scan_threads", "and", "self", ".", "current_module_handle", "not", "in", "self", ".", "_curr_sync_threads", ":", "raise", "RuntimeError", "(", "'Thread \"%s\" is not reading FIFO.'", ")", "with", "self", ".", "_sync_lock", ":", "self", ".", "_curr_sync_threads", ".", "remove", "(", "self", ".", "current_module_handle", ")", "self", ".", "_exit_sync_event", ".", "clear", "(", ")", "while", "not", "self", ".", "_exit_sync_event", ".", "wait", "(", "0.01", ")", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "self", ".", "_sync_lock", ":", "if", "len", "(", "set", "(", "self", ".", "_curr_sync_threads", ")", "&", "set", "(", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "if", "t", ".", "is_alive", "(", ")", "]", ")", ")", "==", "0", "or", "not", "self", ".", "_scan_threads", ":", "self", ".", "_exit_sync_event", ".", "set", "(", ")" ]
Waiting for all threads to appear, then continue.
[ "Waiting", "for", "all", "threads", "to", "appear", "then", "continue", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1186-L1201
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.readout
def readout(self, *args, **kwargs): ''' Running the FIFO readout while executing other statements. Starting and stopping of the FIFO readout is synchronized between the threads. ''' timeout = kwargs.pop('timeout', 10.0) self.start_readout(*args, **kwargs) try: yield finally: try: self.stop_readout(timeout=timeout) except Exception: # in case something fails, call this on last resort # if run was aborted, immediately stop readout if self.abort_run.is_set(): with self._readout_lock: if self.fifo_readout.is_running: self.fifo_readout.stop(timeout=0.0)
python
def readout(self, *args, **kwargs): ''' Running the FIFO readout while executing other statements. Starting and stopping of the FIFO readout is synchronized between the threads. ''' timeout = kwargs.pop('timeout', 10.0) self.start_readout(*args, **kwargs) try: yield finally: try: self.stop_readout(timeout=timeout) except Exception: # in case something fails, call this on last resort # if run was aborted, immediately stop readout if self.abort_run.is_set(): with self._readout_lock: if self.fifo_readout.is_running: self.fifo_readout.stop(timeout=0.0)
[ "def", "readout", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "timeout", "=", "kwargs", ".", "pop", "(", "'timeout'", ",", "10.0", ")", "self", ".", "start_readout", "(", "*", "args", ",", "*", "*", "kwargs", ")", "try", ":", "yield", "finally", ":", "try", ":", "self", ".", "stop_readout", "(", "timeout", "=", "timeout", ")", "except", "Exception", ":", "# in case something fails, call this on last resort", "# if run was aborted, immediately stop readout", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "with", "self", ".", "_readout_lock", ":", "if", "self", ".", "fifo_readout", ".", "is_running", ":", "self", ".", "fifo_readout", ".", "stop", "(", "timeout", "=", "0.0", ")" ]
Running the FIFO readout while executing other statements. Starting and stopping of the FIFO readout is synchronized between the threads.
[ "Running", "the", "FIFO", "readout", "while", "executing", "other", "statements", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1204-L1222
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.start_readout
def start_readout(self, *args, **kwargs): ''' Starting the FIFO readout. Starting of the FIFO readout is executed only once by a random thread. Starting of the FIFO readout is synchronized between all threads reading out the FIFO. ''' # Pop parameters for fifo_readout.start callback = kwargs.pop('callback', self.handle_data) errback = kwargs.pop('errback', self.handle_err) reset_rx = kwargs.pop('reset_rx', True) reset_fifo = kwargs.pop('reset_fifo', True) fill_buffer = kwargs.pop('fill_buffer', False) no_data_timeout = kwargs.pop('no_data_timeout', None) enabled_fe_channels = kwargs.pop('enabled_fe_channels', self._enabled_fe_channels) if args or kwargs: self.set_scan_parameters(*args, **kwargs) if self._scan_threads and self.current_module_handle not in [t.name for t in self._scan_threads]: raise RuntimeError('Thread name "%s" is not valid.' % t.name) if self._scan_threads and self.current_module_handle in self._curr_readout_threads: raise RuntimeError('Thread "%s" is already actively reading FIFO.') with self._readout_lock: self._curr_readout_threads.append(self.current_module_handle) self._starting_readout_event.clear() while not self._starting_readout_event.wait(0.01): if self.abort_run.is_set(): break with self._readout_lock: if len(set(self._curr_readout_threads) & set([t.name for t in self._scan_threads if t.is_alive()])) == len(set([t.name for t in self._scan_threads if t.is_alive()])) or not self._scan_threads: if not self.fifo_readout.is_running: self.fifo_readout.start(fifos=self._selected_fifos, callback=callback, errback=errback, reset_rx=reset_rx, reset_fifo=reset_fifo, fill_buffer=fill_buffer, no_data_timeout=no_data_timeout, filter_func=self._filter, converter_func=self._converter, fifo_select=self._readout_fifos, enabled_fe_channels=enabled_fe_channels) self._starting_readout_event.set()
python
def start_readout(self, *args, **kwargs): ''' Starting the FIFO readout. Starting of the FIFO readout is executed only once by a random thread. Starting of the FIFO readout is synchronized between all threads reading out the FIFO. ''' # Pop parameters for fifo_readout.start callback = kwargs.pop('callback', self.handle_data) errback = kwargs.pop('errback', self.handle_err) reset_rx = kwargs.pop('reset_rx', True) reset_fifo = kwargs.pop('reset_fifo', True) fill_buffer = kwargs.pop('fill_buffer', False) no_data_timeout = kwargs.pop('no_data_timeout', None) enabled_fe_channels = kwargs.pop('enabled_fe_channels', self._enabled_fe_channels) if args or kwargs: self.set_scan_parameters(*args, **kwargs) if self._scan_threads and self.current_module_handle not in [t.name for t in self._scan_threads]: raise RuntimeError('Thread name "%s" is not valid.' % t.name) if self._scan_threads and self.current_module_handle in self._curr_readout_threads: raise RuntimeError('Thread "%s" is already actively reading FIFO.') with self._readout_lock: self._curr_readout_threads.append(self.current_module_handle) self._starting_readout_event.clear() while not self._starting_readout_event.wait(0.01): if self.abort_run.is_set(): break with self._readout_lock: if len(set(self._curr_readout_threads) & set([t.name for t in self._scan_threads if t.is_alive()])) == len(set([t.name for t in self._scan_threads if t.is_alive()])) or not self._scan_threads: if not self.fifo_readout.is_running: self.fifo_readout.start(fifos=self._selected_fifos, callback=callback, errback=errback, reset_rx=reset_rx, reset_fifo=reset_fifo, fill_buffer=fill_buffer, no_data_timeout=no_data_timeout, filter_func=self._filter, converter_func=self._converter, fifo_select=self._readout_fifos, enabled_fe_channels=enabled_fe_channels) self._starting_readout_event.set()
[ "def", "start_readout", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Pop parameters for fifo_readout.start", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "handle_data", ")", "errback", "=", "kwargs", ".", "pop", "(", "'errback'", ",", "self", ".", "handle_err", ")", "reset_rx", "=", "kwargs", ".", "pop", "(", "'reset_rx'", ",", "True", ")", "reset_fifo", "=", "kwargs", ".", "pop", "(", "'reset_fifo'", ",", "True", ")", "fill_buffer", "=", "kwargs", ".", "pop", "(", "'fill_buffer'", ",", "False", ")", "no_data_timeout", "=", "kwargs", ".", "pop", "(", "'no_data_timeout'", ",", "None", ")", "enabled_fe_channels", "=", "kwargs", ".", "pop", "(", "'enabled_fe_channels'", ",", "self", ".", "_enabled_fe_channels", ")", "if", "args", "or", "kwargs", ":", "self", ".", "set_scan_parameters", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "self", ".", "_scan_threads", "and", "self", ".", "current_module_handle", "not", "in", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "]", ":", "raise", "RuntimeError", "(", "'Thread name \"%s\" is not valid.'", "%", "t", ".", "name", ")", "if", "self", ".", "_scan_threads", "and", "self", ".", "current_module_handle", "in", "self", ".", "_curr_readout_threads", ":", "raise", "RuntimeError", "(", "'Thread \"%s\" is already actively reading FIFO.'", ")", "with", "self", ".", "_readout_lock", ":", "self", ".", "_curr_readout_threads", ".", "append", "(", "self", ".", "current_module_handle", ")", "self", ".", "_starting_readout_event", ".", "clear", "(", ")", "while", "not", "self", ".", "_starting_readout_event", ".", "wait", "(", "0.01", ")", ":", "if", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "break", "with", "self", ".", "_readout_lock", ":", "if", "len", "(", "set", "(", "self", ".", "_curr_readout_threads", ")", "&", "set", "(", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "if", "t", ".", "is_alive", "(", ")", "]", ")", ")", "==", "len", "(", "set", "(", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "if", "t", ".", "is_alive", "(", ")", "]", ")", ")", "or", "not", "self", ".", "_scan_threads", ":", "if", "not", "self", ".", "fifo_readout", ".", "is_running", ":", "self", ".", "fifo_readout", ".", "start", "(", "fifos", "=", "self", ".", "_selected_fifos", ",", "callback", "=", "callback", ",", "errback", "=", "errback", ",", "reset_rx", "=", "reset_rx", ",", "reset_fifo", "=", "reset_fifo", ",", "fill_buffer", "=", "fill_buffer", ",", "no_data_timeout", "=", "no_data_timeout", ",", "filter_func", "=", "self", ".", "_filter", ",", "converter_func", "=", "self", ".", "_converter", ",", "fifo_select", "=", "self", ".", "_readout_fifos", ",", "enabled_fe_channels", "=", "enabled_fe_channels", ")", "self", ".", "_starting_readout_event", ".", "set", "(", ")" ]
Starting the FIFO readout. Starting of the FIFO readout is executed only once by a random thread. Starting of the FIFO readout is synchronized between all threads reading out the FIFO.
[ "Starting", "the", "FIFO", "readout", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1224-L1254
SiLab-Bonn/pyBAR
pybar/fei4_run_base.py
Fei4RunBase.stop_readout
def stop_readout(self, timeout=10.0): ''' Stopping the FIFO readout. Stopping of the FIFO readout is executed only once by a random thread. Stopping of the FIFO readout is synchronized between all threads reading out the FIFO. ''' if self._scan_threads and self.current_module_handle not in [t.name for t in self._scan_threads]: raise RuntimeError('Thread name "%s" is not valid.') if self._scan_threads and self.current_module_handle not in self._curr_readout_threads: raise RuntimeError('Thread "%s" is not reading FIFO.') with self._readout_lock: self._curr_readout_threads.remove(self.current_module_handle) self._stopping_readout_event.clear() while not self._stopping_readout_event.wait(0.01): with self._readout_lock: if len(set(self._curr_readout_threads) & set([t.name for t in self._scan_threads if t.is_alive()])) == 0 or not self._scan_threads or self.abort_run.is_set(): if self.fifo_readout.is_running: self.fifo_readout.stop(timeout=timeout) self._stopping_readout_event.set()
python
def stop_readout(self, timeout=10.0): ''' Stopping the FIFO readout. Stopping of the FIFO readout is executed only once by a random thread. Stopping of the FIFO readout is synchronized between all threads reading out the FIFO. ''' if self._scan_threads and self.current_module_handle not in [t.name for t in self._scan_threads]: raise RuntimeError('Thread name "%s" is not valid.') if self._scan_threads and self.current_module_handle not in self._curr_readout_threads: raise RuntimeError('Thread "%s" is not reading FIFO.') with self._readout_lock: self._curr_readout_threads.remove(self.current_module_handle) self._stopping_readout_event.clear() while not self._stopping_readout_event.wait(0.01): with self._readout_lock: if len(set(self._curr_readout_threads) & set([t.name for t in self._scan_threads if t.is_alive()])) == 0 or not self._scan_threads or self.abort_run.is_set(): if self.fifo_readout.is_running: self.fifo_readout.stop(timeout=timeout) self._stopping_readout_event.set()
[ "def", "stop_readout", "(", "self", ",", "timeout", "=", "10.0", ")", ":", "if", "self", ".", "_scan_threads", "and", "self", ".", "current_module_handle", "not", "in", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "]", ":", "raise", "RuntimeError", "(", "'Thread name \"%s\" is not valid.'", ")", "if", "self", ".", "_scan_threads", "and", "self", ".", "current_module_handle", "not", "in", "self", ".", "_curr_readout_threads", ":", "raise", "RuntimeError", "(", "'Thread \"%s\" is not reading FIFO.'", ")", "with", "self", ".", "_readout_lock", ":", "self", ".", "_curr_readout_threads", ".", "remove", "(", "self", ".", "current_module_handle", ")", "self", ".", "_stopping_readout_event", ".", "clear", "(", ")", "while", "not", "self", ".", "_stopping_readout_event", ".", "wait", "(", "0.01", ")", ":", "with", "self", ".", "_readout_lock", ":", "if", "len", "(", "set", "(", "self", ".", "_curr_readout_threads", ")", "&", "set", "(", "[", "t", ".", "name", "for", "t", "in", "self", ".", "_scan_threads", "if", "t", ".", "is_alive", "(", ")", "]", ")", ")", "==", "0", "or", "not", "self", ".", "_scan_threads", "or", "self", ".", "abort_run", ".", "is_set", "(", ")", ":", "if", "self", ".", "fifo_readout", ".", "is_running", ":", "self", ".", "fifo_readout", ".", "stop", "(", "timeout", "=", "timeout", ")", "self", ".", "_stopping_readout_event", ".", "set", "(", ")" ]
Stopping the FIFO readout. Stopping of the FIFO readout is executed only once by a random thread. Stopping of the FIFO readout is synchronized between all threads reading out the FIFO.
[ "Stopping", "the", "FIFO", "readout", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4_run_base.py#L1256-L1274
SiLab-Bonn/pyBAR
pybar/scans/analyze_timewalk.py
get_charge
def get_charge(max_tdc, tdc_calibration_values, tdc_pixel_calibration): # Return the charge from calibration ''' Interpolatet the TDC calibration for each pixel from 0 to max_tdc''' charge_calibration = np.zeros(shape=(80, 336, max_tdc)) for column in range(80): for row in range(336): actual_pixel_calibration = tdc_pixel_calibration[column, row, :] if np.any(actual_pixel_calibration != 0) and np.any(np.isfinite(actual_pixel_calibration)): selected_measurements = np.isfinite(actual_pixel_calibration) # Select valid calibration steps selected_actual_pixel_calibration = actual_pixel_calibration[selected_measurements] selected_tdc_calibration_values = tdc_calibration_values[selected_measurements] interpolation = interp1d(x=selected_actual_pixel_calibration, y=selected_tdc_calibration_values, kind='slinear', bounds_error=False, fill_value=0) charge_calibration[column, row, :] = interpolation(np.arange(max_tdc)) return charge_calibration
python
def get_charge(max_tdc, tdc_calibration_values, tdc_pixel_calibration): # Return the charge from calibration ''' Interpolatet the TDC calibration for each pixel from 0 to max_tdc''' charge_calibration = np.zeros(shape=(80, 336, max_tdc)) for column in range(80): for row in range(336): actual_pixel_calibration = tdc_pixel_calibration[column, row, :] if np.any(actual_pixel_calibration != 0) and np.any(np.isfinite(actual_pixel_calibration)): selected_measurements = np.isfinite(actual_pixel_calibration) # Select valid calibration steps selected_actual_pixel_calibration = actual_pixel_calibration[selected_measurements] selected_tdc_calibration_values = tdc_calibration_values[selected_measurements] interpolation = interp1d(x=selected_actual_pixel_calibration, y=selected_tdc_calibration_values, kind='slinear', bounds_error=False, fill_value=0) charge_calibration[column, row, :] = interpolation(np.arange(max_tdc)) return charge_calibration
[ "def", "get_charge", "(", "max_tdc", ",", "tdc_calibration_values", ",", "tdc_pixel_calibration", ")", ":", "# Return the charge from calibration", "charge_calibration", "=", "np", ".", "zeros", "(", "shape", "=", "(", "80", ",", "336", ",", "max_tdc", ")", ")", "for", "column", "in", "range", "(", "80", ")", ":", "for", "row", "in", "range", "(", "336", ")", ":", "actual_pixel_calibration", "=", "tdc_pixel_calibration", "[", "column", ",", "row", ",", ":", "]", "if", "np", ".", "any", "(", "actual_pixel_calibration", "!=", "0", ")", "and", "np", ".", "any", "(", "np", ".", "isfinite", "(", "actual_pixel_calibration", ")", ")", ":", "selected_measurements", "=", "np", ".", "isfinite", "(", "actual_pixel_calibration", ")", "# Select valid calibration steps", "selected_actual_pixel_calibration", "=", "actual_pixel_calibration", "[", "selected_measurements", "]", "selected_tdc_calibration_values", "=", "tdc_calibration_values", "[", "selected_measurements", "]", "interpolation", "=", "interp1d", "(", "x", "=", "selected_actual_pixel_calibration", ",", "y", "=", "selected_tdc_calibration_values", ",", "kind", "=", "'slinear'", ",", "bounds_error", "=", "False", ",", "fill_value", "=", "0", ")", "charge_calibration", "[", "column", ",", "row", ",", ":", "]", "=", "interpolation", "(", "np", ".", "arange", "(", "max_tdc", ")", ")", "return", "charge_calibration" ]
Interpolatet the TDC calibration for each pixel from 0 to max_tdc
[ "Interpolatet", "the", "TDC", "calibration", "for", "each", "pixel", "from", "0", "to", "max_tdc" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/scans/analyze_timewalk.py#L21-L33
SiLab-Bonn/pyBAR
pybar/scans/analyze_timewalk.py
get_charge_calibration
def get_charge_calibration(calibation_file, max_tdc): ''' Open the hit or calibration file and return the calibration per pixel''' with tb.open_file(calibation_file, mode="r") as in_file_calibration_h5: tdc_calibration = in_file_calibration_h5.root.HitOrCalibration[:, :, :, 1] tdc_calibration_values = in_file_calibration_h5.root.HitOrCalibration.attrs.scan_parameter_values[:] return get_charge(max_tdc, tdc_calibration_values, tdc_calibration)
python
def get_charge_calibration(calibation_file, max_tdc): ''' Open the hit or calibration file and return the calibration per pixel''' with tb.open_file(calibation_file, mode="r") as in_file_calibration_h5: tdc_calibration = in_file_calibration_h5.root.HitOrCalibration[:, :, :, 1] tdc_calibration_values = in_file_calibration_h5.root.HitOrCalibration.attrs.scan_parameter_values[:] return get_charge(max_tdc, tdc_calibration_values, tdc_calibration)
[ "def", "get_charge_calibration", "(", "calibation_file", ",", "max_tdc", ")", ":", "with", "tb", ".", "open_file", "(", "calibation_file", ",", "mode", "=", "\"r\"", ")", "as", "in_file_calibration_h5", ":", "tdc_calibration", "=", "in_file_calibration_h5", ".", "root", ".", "HitOrCalibration", "[", ":", ",", ":", ",", ":", ",", "1", "]", "tdc_calibration_values", "=", "in_file_calibration_h5", ".", "root", ".", "HitOrCalibration", ".", "attrs", ".", "scan_parameter_values", "[", ":", "]", "return", "get_charge", "(", "max_tdc", ",", "tdc_calibration_values", ",", "tdc_calibration", ")" ]
Open the hit or calibration file and return the calibration per pixel
[ "Open", "the", "hit", "or", "calibration", "file", "and", "return", "the", "calibration", "per", "pixel" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/scans/analyze_timewalk.py#L36-L41
SiLab-Bonn/pyBAR
pybar/ViTablesPlugin/pybar_plugin.py
pyBarPlugin.addEntry
def addEntry(self): """Add the `Plot pyBAR data`. entry to `Dataset` menu. """ export_icon = QtGui.QIcon() pixmap = QtGui.QPixmap(os.path.join(PLUGINSDIR, 'csv/icons/document-export.png')) export_icon.addPixmap(pixmap, QtGui.QIcon.Normal, QtGui.QIcon.On) self.plot_action = QtGui.QAction( translate('PlotpyBARdata', "Plot data with pyBAR plugin", "Plot data with pyBAR plugin"), self, shortcut=QtGui.QKeySequence.UnknownKey, triggered=self.plot, icon=export_icon, statusTip=translate('PlotpyBARdata', "Plotting of selected data with pyBAR", "Status bar text for the Dataset -> Plot pyBAR data... action")) # Add the action to the Dataset menu menu = self.vtgui.dataset_menu menu.addSeparator() menu.addAction(self.plot_action) # Add the action to the leaf context menu cmenu = self.vtgui.leaf_node_cm cmenu.addSeparator() cmenu.addAction(self.plot_action)
python
def addEntry(self): """Add the `Plot pyBAR data`. entry to `Dataset` menu. """ export_icon = QtGui.QIcon() pixmap = QtGui.QPixmap(os.path.join(PLUGINSDIR, 'csv/icons/document-export.png')) export_icon.addPixmap(pixmap, QtGui.QIcon.Normal, QtGui.QIcon.On) self.plot_action = QtGui.QAction( translate('PlotpyBARdata', "Plot data with pyBAR plugin", "Plot data with pyBAR plugin"), self, shortcut=QtGui.QKeySequence.UnknownKey, triggered=self.plot, icon=export_icon, statusTip=translate('PlotpyBARdata', "Plotting of selected data with pyBAR", "Status bar text for the Dataset -> Plot pyBAR data... action")) # Add the action to the Dataset menu menu = self.vtgui.dataset_menu menu.addSeparator() menu.addAction(self.plot_action) # Add the action to the leaf context menu cmenu = self.vtgui.leaf_node_cm cmenu.addSeparator() cmenu.addAction(self.plot_action)
[ "def", "addEntry", "(", "self", ")", ":", "export_icon", "=", "QtGui", ".", "QIcon", "(", ")", "pixmap", "=", "QtGui", ".", "QPixmap", "(", "os", ".", "path", ".", "join", "(", "PLUGINSDIR", ",", "'csv/icons/document-export.png'", ")", ")", "export_icon", ".", "addPixmap", "(", "pixmap", ",", "QtGui", ".", "QIcon", ".", "Normal", ",", "QtGui", ".", "QIcon", ".", "On", ")", "self", ".", "plot_action", "=", "QtGui", ".", "QAction", "(", "translate", "(", "'PlotpyBARdata'", ",", "\"Plot data with pyBAR plugin\"", ",", "\"Plot data with pyBAR plugin\"", ")", ",", "self", ",", "shortcut", "=", "QtGui", ".", "QKeySequence", ".", "UnknownKey", ",", "triggered", "=", "self", ".", "plot", ",", "icon", "=", "export_icon", ",", "statusTip", "=", "translate", "(", "'PlotpyBARdata'", ",", "\"Plotting of selected data with pyBAR\"", ",", "\"Status bar text for the Dataset -> Plot pyBAR data... action\"", ")", ")", "# Add the action to the Dataset menu", "menu", "=", "self", ".", "vtgui", ".", "dataset_menu", "menu", ".", "addSeparator", "(", ")", "menu", ".", "addAction", "(", "self", ".", "plot_action", ")", "# Add the action to the leaf context menu", "cmenu", "=", "self", ".", "vtgui", ".", "leaf_node_cm", "cmenu", ".", "addSeparator", "(", ")", "cmenu", ".", "addAction", "(", "self", ".", "plot_action", ")" ]
Add the `Plot pyBAR data`. entry to `Dataset` menu.
[ "Add", "the", "Plot", "pyBAR", "data", ".", "entry", "to", "Dataset", "menu", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/ViTablesPlugin/pybar_plugin.py#L145-L172
SiLab-Bonn/pyBAR
pybar/ViTablesPlugin/pybar_plugin.py
pyBarPlugin.updateDatasetMenu
def updateDatasetMenu(self): """Update the `export` QAction when the Dataset menu is pulled down. This method is a slot. See class ctor for details. """ enabled = True current = self.vtgui.dbs_tree_view.currentIndex() if current: leaf = self.vtgui.dbs_tree_model.nodeFromIndex(current) if leaf.node_kind in (u'group', u'root group'): enabled = False self.plot_action.setEnabled(enabled)
python
def updateDatasetMenu(self): """Update the `export` QAction when the Dataset menu is pulled down. This method is a slot. See class ctor for details. """ enabled = True current = self.vtgui.dbs_tree_view.currentIndex() if current: leaf = self.vtgui.dbs_tree_model.nodeFromIndex(current) if leaf.node_kind in (u'group', u'root group'): enabled = False self.plot_action.setEnabled(enabled)
[ "def", "updateDatasetMenu", "(", "self", ")", ":", "enabled", "=", "True", "current", "=", "self", ".", "vtgui", ".", "dbs_tree_view", ".", "currentIndex", "(", ")", "if", "current", ":", "leaf", "=", "self", ".", "vtgui", ".", "dbs_tree_model", ".", "nodeFromIndex", "(", "current", ")", "if", "leaf", ".", "node_kind", "in", "(", "u'group'", ",", "u'root group'", ")", ":", "enabled", "=", "False", "self", ".", "plot_action", ".", "setEnabled", "(", "enabled", ")" ]
Update the `export` QAction when the Dataset menu is pulled down. This method is a slot. See class ctor for details.
[ "Update", "the", "export", "QAction", "when", "the", "Dataset", "menu", "is", "pulled", "down", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/ViTablesPlugin/pybar_plugin.py#L174-L186
SiLab-Bonn/pyBAR
pybar/ViTablesPlugin/pybar_plugin.py
pyBarPlugin.plot
def plot(self): """Export a given dataset to a `CSV` file. This method is a slot connected to the `export` QAction. See the :meth:`addEntry` method for details. """ # The PyTables node tied to the current leaf of the databases tree current = self.vtgui.dbs_tree_view.currentIndex() leaf = self.vtgui.dbs_tree_model.nodeFromIndex(current).node data_name = leaf.name hists_1d = ['HistRelBcid', 'HistErrorCounter', 'HistTriggerErrorCounter', 'HistServiceRecord', 'HistTot', 'HistTdc', 'HistClusterTot', 'HistClusterSize'] hists_2d = ['HistOcc', 'Enable', 'Imon', 'C_High', 'EnableDigInj', 'C_Low', 'FDAC', 'TDAC', 'HistTdcPixel', 'HistTotPixel', 'HistThreshold', 'HistNoise', 'HistThresholdFitted', 'HistNoiseFitted', 'HistThresholdFittedCalib', 'HistNoiseFittedCalib'] if data_name in hists_1d: plot_1d_hist(hist=leaf[:], title=data_name) elif data_name in hists_2d: if data_name == 'HistOcc': leaf = np.sum(leaf[:], axis=2) plot_2d_hist(hist=leaf[:], title=data_name) elif 'Table' in str(type(leaf)) and len(leaf[:].dtype.names) <= 3: # detect tables with less than 4 columns plot_table(leaf[:], title=data_name) elif data_name == 'HitOrCalibration': print 'Comming soon' else: print 'Plotting', data_name, '(%s) is not supported!' % type(leaf)
python
def plot(self): """Export a given dataset to a `CSV` file. This method is a slot connected to the `export` QAction. See the :meth:`addEntry` method for details. """ # The PyTables node tied to the current leaf of the databases tree current = self.vtgui.dbs_tree_view.currentIndex() leaf = self.vtgui.dbs_tree_model.nodeFromIndex(current).node data_name = leaf.name hists_1d = ['HistRelBcid', 'HistErrorCounter', 'HistTriggerErrorCounter', 'HistServiceRecord', 'HistTot', 'HistTdc', 'HistClusterTot', 'HistClusterSize'] hists_2d = ['HistOcc', 'Enable', 'Imon', 'C_High', 'EnableDigInj', 'C_Low', 'FDAC', 'TDAC', 'HistTdcPixel', 'HistTotPixel', 'HistThreshold', 'HistNoise', 'HistThresholdFitted', 'HistNoiseFitted', 'HistThresholdFittedCalib', 'HistNoiseFittedCalib'] if data_name in hists_1d: plot_1d_hist(hist=leaf[:], title=data_name) elif data_name in hists_2d: if data_name == 'HistOcc': leaf = np.sum(leaf[:], axis=2) plot_2d_hist(hist=leaf[:], title=data_name) elif 'Table' in str(type(leaf)) and len(leaf[:].dtype.names) <= 3: # detect tables with less than 4 columns plot_table(leaf[:], title=data_name) elif data_name == 'HitOrCalibration': print 'Comming soon' else: print 'Plotting', data_name, '(%s) is not supported!' % type(leaf)
[ "def", "plot", "(", "self", ")", ":", "# The PyTables node tied to the current leaf of the databases tree", "current", "=", "self", ".", "vtgui", ".", "dbs_tree_view", ".", "currentIndex", "(", ")", "leaf", "=", "self", ".", "vtgui", ".", "dbs_tree_model", ".", "nodeFromIndex", "(", "current", ")", ".", "node", "data_name", "=", "leaf", ".", "name", "hists_1d", "=", "[", "'HistRelBcid'", ",", "'HistErrorCounter'", ",", "'HistTriggerErrorCounter'", ",", "'HistServiceRecord'", ",", "'HistTot'", ",", "'HistTdc'", ",", "'HistClusterTot'", ",", "'HistClusterSize'", "]", "hists_2d", "=", "[", "'HistOcc'", ",", "'Enable'", ",", "'Imon'", ",", "'C_High'", ",", "'EnableDigInj'", ",", "'C_Low'", ",", "'FDAC'", ",", "'TDAC'", ",", "'HistTdcPixel'", ",", "'HistTotPixel'", ",", "'HistThreshold'", ",", "'HistNoise'", ",", "'HistThresholdFitted'", ",", "'HistNoiseFitted'", ",", "'HistThresholdFittedCalib'", ",", "'HistNoiseFittedCalib'", "]", "if", "data_name", "in", "hists_1d", ":", "plot_1d_hist", "(", "hist", "=", "leaf", "[", ":", "]", ",", "title", "=", "data_name", ")", "elif", "data_name", "in", "hists_2d", ":", "if", "data_name", "==", "'HistOcc'", ":", "leaf", "=", "np", ".", "sum", "(", "leaf", "[", ":", "]", ",", "axis", "=", "2", ")", "plot_2d_hist", "(", "hist", "=", "leaf", "[", ":", "]", ",", "title", "=", "data_name", ")", "elif", "'Table'", "in", "str", "(", "type", "(", "leaf", ")", ")", "and", "len", "(", "leaf", "[", ":", "]", ".", "dtype", ".", "names", ")", "<=", "3", ":", "# detect tables with less than 4 columns", "plot_table", "(", "leaf", "[", ":", "]", ",", "title", "=", "data_name", ")", "elif", "data_name", "==", "'HitOrCalibration'", ":", "print", "'Comming soon'", "else", ":", "print", "'Plotting'", ",", "data_name", ",", "'(%s) is not supported!'", "%", "type", "(", "leaf", ")" ]
Export a given dataset to a `CSV` file. This method is a slot connected to the `export` QAction. See the :meth:`addEntry` method for details.
[ "Export", "a", "given", "dataset", "to", "a", "CSV", "file", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/ViTablesPlugin/pybar_plugin.py#L188-L214
SiLab-Bonn/pyBAR
pybar/ViTablesPlugin/pybar_plugin.py
pyBarPlugin.helpAbout
def helpAbout(self): """Brief description of the plugin. """ # Text to be displayed about_text = translate('pyBarPlugin', """<qt> <p>Data plotting plug-in for pyBAR. </qt>""", 'About') descr = dict(module_name='pyBarPlugin', folder=PLUGINSDIR, version=__version__, plugin_name='pyBarPlugin', author='David-Leon Pohl <david-leon.pohl@rub.de>, Jens Janssen <janssen@physik.uni-bonn.de>', descr=about_text) return descr
python
def helpAbout(self): """Brief description of the plugin. """ # Text to be displayed about_text = translate('pyBarPlugin', """<qt> <p>Data plotting plug-in for pyBAR. </qt>""", 'About') descr = dict(module_name='pyBarPlugin', folder=PLUGINSDIR, version=__version__, plugin_name='pyBarPlugin', author='David-Leon Pohl <david-leon.pohl@rub.de>, Jens Janssen <janssen@physik.uni-bonn.de>', descr=about_text) return descr
[ "def", "helpAbout", "(", "self", ")", ":", "# Text to be displayed", "about_text", "=", "translate", "(", "'pyBarPlugin'", ",", "\"\"\"<qt>\n <p>Data plotting plug-in for pyBAR.\n </qt>\"\"\"", ",", "'About'", ")", "descr", "=", "dict", "(", "module_name", "=", "'pyBarPlugin'", ",", "folder", "=", "PLUGINSDIR", ",", "version", "=", "__version__", ",", "plugin_name", "=", "'pyBarPlugin'", ",", "author", "=", "'David-Leon Pohl <david-leon.pohl@rub.de>, Jens Janssen <janssen@physik.uni-bonn.de>'", ",", "descr", "=", "about_text", ")", "return", "descr" ]
Brief description of the plugin.
[ "Brief", "description", "of", "the", "plugin", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/ViTablesPlugin/pybar_plugin.py#L216-L234
SiLab-Bonn/pyBAR
pybar/daq/fei4_raw_data.py
send_meta_data
def send_meta_data(socket, conf, name): '''Sends the config via ZeroMQ to a specified socket. Is called at the beginning of a run and when the config changes. Conf can be any config dictionary. ''' meta_data = dict( name=name, conf=conf ) try: socket.send_json(meta_data, flags=zmq.NOBLOCK) except zmq.Again: pass
python
def send_meta_data(socket, conf, name): '''Sends the config via ZeroMQ to a specified socket. Is called at the beginning of a run and when the config changes. Conf can be any config dictionary. ''' meta_data = dict( name=name, conf=conf ) try: socket.send_json(meta_data, flags=zmq.NOBLOCK) except zmq.Again: pass
[ "def", "send_meta_data", "(", "socket", ",", "conf", ",", "name", ")", ":", "meta_data", "=", "dict", "(", "name", "=", "name", ",", "conf", "=", "conf", ")", "try", ":", "socket", ".", "send_json", "(", "meta_data", ",", "flags", "=", "zmq", ".", "NOBLOCK", ")", "except", "zmq", ".", "Again", ":", "pass" ]
Sends the config via ZeroMQ to a specified socket. Is called at the beginning of a run and when the config changes. Conf can be any config dictionary.
[ "Sends", "the", "config", "via", "ZeroMQ", "to", "a", "specified", "socket", ".", "Is", "called", "at", "the", "beginning", "of", "a", "run", "and", "when", "the", "config", "changes", ".", "Conf", "can", "be", "any", "config", "dictionary", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fei4_raw_data.py#L13-L23
SiLab-Bonn/pyBAR
pybar/daq/fei4_raw_data.py
send_data
def send_data(socket, data, scan_parameters={}, name='ReadoutData'): '''Sends the data of every read out (raw data and meta data) via ZeroMQ to a specified socket ''' if not scan_parameters: scan_parameters = {} data_meta_data = dict( name=name, dtype=str(data[0].dtype), shape=data[0].shape, timestamp_start=data[1], # float timestamp_stop=data[2], # float readout_error=data[3], # int scan_parameters=scan_parameters # dict ) try: socket.send_json(data_meta_data, flags=zmq.SNDMORE | zmq.NOBLOCK) socket.send(data[0], flags=zmq.NOBLOCK) # PyZMQ supports sending numpy arrays without copying any data except zmq.Again: pass
python
def send_data(socket, data, scan_parameters={}, name='ReadoutData'): '''Sends the data of every read out (raw data and meta data) via ZeroMQ to a specified socket ''' if not scan_parameters: scan_parameters = {} data_meta_data = dict( name=name, dtype=str(data[0].dtype), shape=data[0].shape, timestamp_start=data[1], # float timestamp_stop=data[2], # float readout_error=data[3], # int scan_parameters=scan_parameters # dict ) try: socket.send_json(data_meta_data, flags=zmq.SNDMORE | zmq.NOBLOCK) socket.send(data[0], flags=zmq.NOBLOCK) # PyZMQ supports sending numpy arrays without copying any data except zmq.Again: pass
[ "def", "send_data", "(", "socket", ",", "data", ",", "scan_parameters", "=", "{", "}", ",", "name", "=", "'ReadoutData'", ")", ":", "if", "not", "scan_parameters", ":", "scan_parameters", "=", "{", "}", "data_meta_data", "=", "dict", "(", "name", "=", "name", ",", "dtype", "=", "str", "(", "data", "[", "0", "]", ".", "dtype", ")", ",", "shape", "=", "data", "[", "0", "]", ".", "shape", ",", "timestamp_start", "=", "data", "[", "1", "]", ",", "# float", "timestamp_stop", "=", "data", "[", "2", "]", ",", "# float", "readout_error", "=", "data", "[", "3", "]", ",", "# int", "scan_parameters", "=", "scan_parameters", "# dict", ")", "try", ":", "socket", ".", "send_json", "(", "data_meta_data", ",", "flags", "=", "zmq", ".", "SNDMORE", "|", "zmq", ".", "NOBLOCK", ")", "socket", ".", "send", "(", "data", "[", "0", "]", ",", "flags", "=", "zmq", ".", "NOBLOCK", ")", "# PyZMQ supports sending numpy arrays without copying any data", "except", "zmq", ".", "Again", ":", "pass" ]
Sends the data of every read out (raw data and meta data) via ZeroMQ to a specified socket
[ "Sends", "the", "data", "of", "every", "read", "out", "(", "raw", "data", "and", "meta", "data", ")", "via", "ZeroMQ", "to", "a", "specified", "socket" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fei4_raw_data.py#L26-L44
SiLab-Bonn/pyBAR
pybar/daq/fei4_raw_data.py
open_raw_data_file
def open_raw_data_file(filename, mode="w", title="", scan_parameters=None, socket_address=None): '''Mimics pytables.open_file() and stores the configuration and run configuration Returns: RawDataFile Object Examples: with open_raw_data_file(filename = self.scan_data_filename, title=self.scan_id, scan_parameters=[scan_parameter]) as raw_data_file: # do something here raw_data_file.append(self.readout.data, scan_parameters={scan_parameter:scan_parameter_value}) ''' return RawDataFile(filename=filename, mode=mode, title=title, scan_parameters=scan_parameters, socket_address=socket_address)
python
def open_raw_data_file(filename, mode="w", title="", scan_parameters=None, socket_address=None): '''Mimics pytables.open_file() and stores the configuration and run configuration Returns: RawDataFile Object Examples: with open_raw_data_file(filename = self.scan_data_filename, title=self.scan_id, scan_parameters=[scan_parameter]) as raw_data_file: # do something here raw_data_file.append(self.readout.data, scan_parameters={scan_parameter:scan_parameter_value}) ''' return RawDataFile(filename=filename, mode=mode, title=title, scan_parameters=scan_parameters, socket_address=socket_address)
[ "def", "open_raw_data_file", "(", "filename", ",", "mode", "=", "\"w\"", ",", "title", "=", "\"\"", ",", "scan_parameters", "=", "None", ",", "socket_address", "=", "None", ")", ":", "return", "RawDataFile", "(", "filename", "=", "filename", ",", "mode", "=", "mode", ",", "title", "=", "title", ",", "scan_parameters", "=", "scan_parameters", ",", "socket_address", "=", "socket_address", ")" ]
Mimics pytables.open_file() and stores the configuration and run configuration Returns: RawDataFile Object Examples: with open_raw_data_file(filename = self.scan_data_filename, title=self.scan_id, scan_parameters=[scan_parameter]) as raw_data_file: # do something here raw_data_file.append(self.readout.data, scan_parameters={scan_parameter:scan_parameter_value})
[ "Mimics", "pytables", ".", "open_file", "()", "and", "stores", "the", "configuration", "and", "run", "configuration" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fei4_raw_data.py#L47-L58
SiLab-Bonn/pyBAR
pybar/daq/fei4_raw_data.py
save_raw_data_from_data_queue
def save_raw_data_from_data_queue(data_queue, filename, mode='a', title='', scan_parameters=None): # mode="r+" to append data, raw_data_file_h5 must exist, "w" to overwrite raw_data_file_h5, "a" to append data, if raw_data_file_h5 does not exist it is created '''Writing raw data file from data queue If you need to write raw data once in a while this function may make it easy for you. ''' if not scan_parameters: scan_parameters = {} with open_raw_data_file(filename, mode='a', title='', scan_parameters=list(dict.iterkeys(scan_parameters))) as raw_data_file: raw_data_file.append(data_queue, scan_parameters=scan_parameters)
python
def save_raw_data_from_data_queue(data_queue, filename, mode='a', title='', scan_parameters=None): # mode="r+" to append data, raw_data_file_h5 must exist, "w" to overwrite raw_data_file_h5, "a" to append data, if raw_data_file_h5 does not exist it is created '''Writing raw data file from data queue If you need to write raw data once in a while this function may make it easy for you. ''' if not scan_parameters: scan_parameters = {} with open_raw_data_file(filename, mode='a', title='', scan_parameters=list(dict.iterkeys(scan_parameters))) as raw_data_file: raw_data_file.append(data_queue, scan_parameters=scan_parameters)
[ "def", "save_raw_data_from_data_queue", "(", "data_queue", ",", "filename", ",", "mode", "=", "'a'", ",", "title", "=", "''", ",", "scan_parameters", "=", "None", ")", ":", "# mode=\"r+\" to append data, raw_data_file_h5 must exist, \"w\" to overwrite raw_data_file_h5, \"a\" to append data, if raw_data_file_h5 does not exist it is created", "if", "not", "scan_parameters", ":", "scan_parameters", "=", "{", "}", "with", "open_raw_data_file", "(", "filename", ",", "mode", "=", "'a'", ",", "title", "=", "''", ",", "scan_parameters", "=", "list", "(", "dict", ".", "iterkeys", "(", "scan_parameters", ")", ")", ")", "as", "raw_data_file", ":", "raw_data_file", ".", "append", "(", "data_queue", ",", "scan_parameters", "=", "scan_parameters", ")" ]
Writing raw data file from data queue If you need to write raw data once in a while this function may make it easy for you.
[ "Writing", "raw", "data", "file", "from", "data", "queue" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fei4_raw_data.py#L240-L248
SiLab-Bonn/pyBAR
pybar/scans/tune_fei4.py
Fei4Tuning.scan
def scan(self): '''Metascript that calls other scripts to tune the FE. Parameters ---------- cfg_name : string Name of the config to be created. This config holds the tuning results. target_threshold : int The target threshold value in PlsrDAC. target_charge : int The target charge in PlsrDAC value to tune to. target_tot : float The target tot value to tune to. global_iterations : int Defines how often global threshold (GDAC) / global feedback (PrmpVbpf) current tuning is repeated. -1 or None: Global tuning is disabled 0: Only global threshold tuning 1: GDAC -> PrmpVbpf -> GDAC 2: GDAC -> PrmpVbpf -> GDAC -> PrmpVbpf -> GDAC ... local_iterations : int Defines how often local threshold (TDAC) / feedback current (FDAC) tuning is repeated. -1 or None: Local tuning is disabled 0: Only local threshold tuning 1: TDAC -> FDAC -> TDAC 2: TDAC -> FDAC -> TDAC -> FDAC -> TDAC ... ''' for iteration in range(0, self.global_iterations): # tune iteratively with decreasing range to save time if self.stop_run.is_set(): break logging.info("Global tuning step %d / %d", iteration + 1, self.global_iterations) self.set_scan_parameters(global_step=self.scan_parameters.global_step + 1) GdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltCoarse", "Vthin_AltFine"])) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) if self.stop_run.is_set(): break self.set_scan_parameters(global_step=self.scan_parameters.global_step + 1) FeedbackTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["PrmpVbpf"])) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) if self.global_iterations >= 0 and not self.stop_run.is_set(): self.set_scan_parameters(global_step=self.scan_parameters.global_step + 1) GdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltCoarse", "Vthin_AltFine"])) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) Vthin_AC = self.register.get_global_register_value("Vthin_AltCoarse") Vthin_AF = self.register.get_global_register_value("Vthin_AltFine") PrmpVbpf = self.register.get_global_register_value("PrmpVbpf") logging.info("Results of global threshold tuning: Vthin_AltCoarse / Vthin_AltFine = %d / %d", Vthin_AC, Vthin_AF) logging.info("Results of global feedback tuning: PrmpVbpf = %d", PrmpVbpf) for iteration in range(0, self.local_iterations): if self.stop_run.is_set(): break logging.info("Local tuning step %d / %d", iteration + 1, self.local_iterations) self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1) TdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC")) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) if self.stop_run.is_set(): break self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1) FdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="FDAC")) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) if self.local_iterations >= 0 and not self.stop_run.is_set(): self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1) TdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC")) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands)
python
def scan(self): '''Metascript that calls other scripts to tune the FE. Parameters ---------- cfg_name : string Name of the config to be created. This config holds the tuning results. target_threshold : int The target threshold value in PlsrDAC. target_charge : int The target charge in PlsrDAC value to tune to. target_tot : float The target tot value to tune to. global_iterations : int Defines how often global threshold (GDAC) / global feedback (PrmpVbpf) current tuning is repeated. -1 or None: Global tuning is disabled 0: Only global threshold tuning 1: GDAC -> PrmpVbpf -> GDAC 2: GDAC -> PrmpVbpf -> GDAC -> PrmpVbpf -> GDAC ... local_iterations : int Defines how often local threshold (TDAC) / feedback current (FDAC) tuning is repeated. -1 or None: Local tuning is disabled 0: Only local threshold tuning 1: TDAC -> FDAC -> TDAC 2: TDAC -> FDAC -> TDAC -> FDAC -> TDAC ... ''' for iteration in range(0, self.global_iterations): # tune iteratively with decreasing range to save time if self.stop_run.is_set(): break logging.info("Global tuning step %d / %d", iteration + 1, self.global_iterations) self.set_scan_parameters(global_step=self.scan_parameters.global_step + 1) GdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltCoarse", "Vthin_AltFine"])) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) if self.stop_run.is_set(): break self.set_scan_parameters(global_step=self.scan_parameters.global_step + 1) FeedbackTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["PrmpVbpf"])) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) if self.global_iterations >= 0 and not self.stop_run.is_set(): self.set_scan_parameters(global_step=self.scan_parameters.global_step + 1) GdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltCoarse", "Vthin_AltFine"])) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) Vthin_AC = self.register.get_global_register_value("Vthin_AltCoarse") Vthin_AF = self.register.get_global_register_value("Vthin_AltFine") PrmpVbpf = self.register.get_global_register_value("PrmpVbpf") logging.info("Results of global threshold tuning: Vthin_AltCoarse / Vthin_AltFine = %d / %d", Vthin_AC, Vthin_AF) logging.info("Results of global feedback tuning: PrmpVbpf = %d", PrmpVbpf) for iteration in range(0, self.local_iterations): if self.stop_run.is_set(): break logging.info("Local tuning step %d / %d", iteration + 1, self.local_iterations) self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1) TdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC")) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) if self.stop_run.is_set(): break self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1) FdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="FDAC")) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands) if self.local_iterations >= 0 and not self.stop_run.is_set(): self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1) TdacTuning.scan(self) commands = [] commands.extend(self.register.get_commands("ConfMode")) commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC")) commands.extend(self.register.get_commands("RunMode")) self.register_utils.send_commands(commands)
[ "def", "scan", "(", "self", ")", ":", "for", "iteration", "in", "range", "(", "0", ",", "self", ".", "global_iterations", ")", ":", "# tune iteratively with decreasing range to save time\r", "if", "self", ".", "stop_run", ".", "is_set", "(", ")", ":", "break", "logging", ".", "info", "(", "\"Global tuning step %d / %d\"", ",", "iteration", "+", "1", ",", "self", ".", "global_iterations", ")", "self", ".", "set_scan_parameters", "(", "global_step", "=", "self", ".", "scan_parameters", ".", "global_step", "+", "1", ")", "GdacTuning", ".", "scan", "(", "self", ")", "commands", "=", "[", "]", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"ConfMode\"", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Vthin_AltCoarse\"", ",", "\"Vthin_AltFine\"", "]", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"RunMode\"", ")", ")", "self", ".", "register_utils", ".", "send_commands", "(", "commands", ")", "if", "self", ".", "stop_run", ".", "is_set", "(", ")", ":", "break", "self", ".", "set_scan_parameters", "(", "global_step", "=", "self", ".", "scan_parameters", ".", "global_step", "+", "1", ")", "FeedbackTuning", ".", "scan", "(", "self", ")", "commands", "=", "[", "]", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"ConfMode\"", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"PrmpVbpf\"", "]", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"RunMode\"", ")", ")", "self", ".", "register_utils", ".", "send_commands", "(", "commands", ")", "if", "self", ".", "global_iterations", ">=", "0", "and", "not", "self", ".", "stop_run", ".", "is_set", "(", ")", ":", "self", ".", "set_scan_parameters", "(", "global_step", "=", "self", ".", "scan_parameters", ".", "global_step", "+", "1", ")", "GdacTuning", ".", "scan", "(", "self", ")", "commands", "=", "[", "]", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"ConfMode\"", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Vthin_AltCoarse\"", ",", "\"Vthin_AltFine\"", "]", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"RunMode\"", ")", ")", "self", ".", "register_utils", ".", "send_commands", "(", "commands", ")", "Vthin_AC", "=", "self", ".", "register", ".", "get_global_register_value", "(", "\"Vthin_AltCoarse\"", ")", "Vthin_AF", "=", "self", ".", "register", ".", "get_global_register_value", "(", "\"Vthin_AltFine\"", ")", "PrmpVbpf", "=", "self", ".", "register", ".", "get_global_register_value", "(", "\"PrmpVbpf\"", ")", "logging", ".", "info", "(", "\"Results of global threshold tuning: Vthin_AltCoarse / Vthin_AltFine = %d / %d\"", ",", "Vthin_AC", ",", "Vthin_AF", ")", "logging", ".", "info", "(", "\"Results of global feedback tuning: PrmpVbpf = %d\"", ",", "PrmpVbpf", ")", "for", "iteration", "in", "range", "(", "0", ",", "self", ".", "local_iterations", ")", ":", "if", "self", ".", "stop_run", ".", "is_set", "(", ")", ":", "break", "logging", ".", "info", "(", "\"Local tuning step %d / %d\"", ",", "iteration", "+", "1", ",", "self", ".", "local_iterations", ")", "self", ".", "set_scan_parameters", "(", "local_step", "=", "self", ".", "scan_parameters", ".", "local_step", "+", "1", ")", "TdacTuning", ".", "scan", "(", "self", ")", "commands", "=", "[", "]", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"ConfMode\"", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"WrFrontEnd\"", ",", "same_mask_for_all_dc", "=", "False", ",", "name", "=", "\"TDAC\"", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"RunMode\"", ")", ")", "self", ".", "register_utils", ".", "send_commands", "(", "commands", ")", "if", "self", ".", "stop_run", ".", "is_set", "(", ")", ":", "break", "self", ".", "set_scan_parameters", "(", "local_step", "=", "self", ".", "scan_parameters", ".", "local_step", "+", "1", ")", "FdacTuning", ".", "scan", "(", "self", ")", "commands", "=", "[", "]", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"ConfMode\"", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"WrFrontEnd\"", ",", "same_mask_for_all_dc", "=", "False", ",", "name", "=", "\"FDAC\"", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"RunMode\"", ")", ")", "self", ".", "register_utils", ".", "send_commands", "(", "commands", ")", "if", "self", ".", "local_iterations", ">=", "0", "and", "not", "self", ".", "stop_run", ".", "is_set", "(", ")", ":", "self", ".", "set_scan_parameters", "(", "local_step", "=", "self", ".", "scan_parameters", ".", "local_step", "+", "1", ")", "TdacTuning", ".", "scan", "(", "self", ")", "commands", "=", "[", "]", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"ConfMode\"", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"WrFrontEnd\"", ",", "same_mask_for_all_dc", "=", "False", ",", "name", "=", "\"TDAC\"", ")", ")", "commands", ".", "extend", "(", "self", ".", "register", ".", "get_commands", "(", "\"RunMode\"", ")", ")", "self", ".", "register_utils", ".", "send_commands", "(", "commands", ")" ]
Metascript that calls other scripts to tune the FE. Parameters ---------- cfg_name : string Name of the config to be created. This config holds the tuning results. target_threshold : int The target threshold value in PlsrDAC. target_charge : int The target charge in PlsrDAC value to tune to. target_tot : float The target tot value to tune to. global_iterations : int Defines how often global threshold (GDAC) / global feedback (PrmpVbpf) current tuning is repeated. -1 or None: Global tuning is disabled 0: Only global threshold tuning 1: GDAC -> PrmpVbpf -> GDAC 2: GDAC -> PrmpVbpf -> GDAC -> PrmpVbpf -> GDAC ... local_iterations : int Defines how often local threshold (TDAC) / feedback current (FDAC) tuning is repeated. -1 or None: Local tuning is disabled 0: Only local threshold tuning 1: TDAC -> FDAC -> TDAC 2: TDAC -> FDAC -> TDAC -> FDAC -> TDAC ...
[ "Metascript", "that", "calls", "other", "scripts", "to", "tune", "the", "FE", ".", "Parameters", "----------", "cfg_name", ":", "string", "Name", "of", "the", "config", "to", "be", "created", ".", "This", "config", "holds", "the", "tuning", "results", ".", "target_threshold", ":", "int", "The", "target", "threshold", "value", "in", "PlsrDAC", ".", "target_charge", ":", "int", "The", "target", "charge", "in", "PlsrDAC", "value", "to", "tune", "to", ".", "target_tot", ":", "float", "The", "target", "tot", "value", "to", "tune", "to", ".", "global_iterations", ":", "int", "Defines", "how", "often", "global", "threshold", "(", "GDAC", ")", "/", "global", "feedback", "(", "PrmpVbpf", ")", "current", "tuning", "is", "repeated", ".", "-", "1", "or", "None", ":", "Global", "tuning", "is", "disabled", "0", ":", "Only", "global", "threshold", "tuning", "1", ":", "GDAC", "-", ">", "PrmpVbpf", "-", ">", "GDAC", "2", ":", "GDAC", "-", ">", "PrmpVbpf", "-", ">", "GDAC", "-", ">", "PrmpVbpf", "-", ">", "GDAC", "...", "local_iterations", ":", "int", "Defines", "how", "often", "local", "threshold", "(", "TDAC", ")", "/", "feedback", "current", "(", "FDAC", ")", "tuning", "is", "repeated", ".", "-", "1", "or", "None", ":", "Local", "tuning", "is", "disabled", "0", ":", "Only", "local", "threshold", "tuning", "1", ":", "TDAC", "-", ">", "FDAC", "-", ">", "TDAC", "2", ":", "TDAC", "-", ">", "FDAC", "-", ">", "TDAC", "-", ">", "FDAC", "-", ">", "TDAC", "..." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/scans/tune_fei4.py#L92-L184
SiLab-Bonn/pyBAR
pybar/analysis/plotting/plotting.py
plot_linear_relation
def plot_linear_relation(x, y, x_err=None, y_err=None, title=None, point_label=None, legend=None, plot_range=None, plot_range_y=None, x_label=None, y_label=None, y_2_label=None, log_x=False, log_y=False, size=None, filename=None): ''' Takes point data (x,y) with errors(x,y) and fits a straight line. The deviation to this line is also plotted, showing the offset. Parameters ---------- x, y, x_err, y_err: iterable filename: string, PdfPages object or None PdfPages file object: plot is appended to the pdf string: new plot file with the given filename is created None: the plot is printed to screen ''' fig = Figure() FigureCanvas(fig) ax = fig.add_subplot(111) if x_err is not None: x_err = [x_err, x_err] if y_err is not None: y_err = [y_err, y_err] ax.set_title(title) if y_label is not None: ax.set_ylabel(y_label) if log_x: ax.set_xscale('log') if log_y: ax.set_yscale('log') if plot_range: ax.set_xlim((min(plot_range), max(plot_range))) if plot_range_y: ax.set_ylim((min(plot_range_y), max(plot_range_y))) if legend: fig.legend(legend, 0) ax.grid(True) ax.errorbar(x, y, xerr=x_err, yerr=y_err, fmt='o', color='black') # plot points # label points if needed if point_label is not None: for X, Y, Z in zip(x, y, point_label): ax.annotate('{}'.format(Z), xy=(X, Y), xytext=(-5, 5), ha='right', textcoords='offset points') line_fit, _ = np.polyfit(x, y, 1, full=False, cov=True) fit_fn = np.poly1d(line_fit) ax.plot(x, fit_fn(x), '-', lw=2, color='gray') setp(ax.get_xticklabels(), visible=False) # remove ticks at common border of both plots divider = make_axes_locatable(ax) ax_bottom_plot = divider.append_axes("bottom", 2.0, pad=0.0, sharex=ax) ax_bottom_plot.bar(x, y - fit_fn(x), align='center', width=np.amin(np.diff(x)) / 2, color='gray') # plot(x, y - fit_fn(x)) ax_bottom_plot.grid(True) if x_label is not None: ax.set_xlabel(x_label) if y_2_label is not None: ax.set_ylabel(y_2_label) ax.set_ylim((-np.amax(np.abs(y - fit_fn(x)))), (np.amax(np.abs(y - fit_fn(x))))) ax.plot(ax.set_xlim(), [0, 0], '-', color='black') setp(ax_bottom_plot.get_yticklabels()[-2:-1], visible=False) if size is not None: fig.set_size_inches(size) if not filename: fig.show() elif isinstance(filename, PdfPages): filename.savefig(fig) elif filename: fig.savefig(filename, bbox_inches='tight') return fig
python
def plot_linear_relation(x, y, x_err=None, y_err=None, title=None, point_label=None, legend=None, plot_range=None, plot_range_y=None, x_label=None, y_label=None, y_2_label=None, log_x=False, log_y=False, size=None, filename=None): ''' Takes point data (x,y) with errors(x,y) and fits a straight line. The deviation to this line is also plotted, showing the offset. Parameters ---------- x, y, x_err, y_err: iterable filename: string, PdfPages object or None PdfPages file object: plot is appended to the pdf string: new plot file with the given filename is created None: the plot is printed to screen ''' fig = Figure() FigureCanvas(fig) ax = fig.add_subplot(111) if x_err is not None: x_err = [x_err, x_err] if y_err is not None: y_err = [y_err, y_err] ax.set_title(title) if y_label is not None: ax.set_ylabel(y_label) if log_x: ax.set_xscale('log') if log_y: ax.set_yscale('log') if plot_range: ax.set_xlim((min(plot_range), max(plot_range))) if plot_range_y: ax.set_ylim((min(plot_range_y), max(plot_range_y))) if legend: fig.legend(legend, 0) ax.grid(True) ax.errorbar(x, y, xerr=x_err, yerr=y_err, fmt='o', color='black') # plot points # label points if needed if point_label is not None: for X, Y, Z in zip(x, y, point_label): ax.annotate('{}'.format(Z), xy=(X, Y), xytext=(-5, 5), ha='right', textcoords='offset points') line_fit, _ = np.polyfit(x, y, 1, full=False, cov=True) fit_fn = np.poly1d(line_fit) ax.plot(x, fit_fn(x), '-', lw=2, color='gray') setp(ax.get_xticklabels(), visible=False) # remove ticks at common border of both plots divider = make_axes_locatable(ax) ax_bottom_plot = divider.append_axes("bottom", 2.0, pad=0.0, sharex=ax) ax_bottom_plot.bar(x, y - fit_fn(x), align='center', width=np.amin(np.diff(x)) / 2, color='gray') # plot(x, y - fit_fn(x)) ax_bottom_plot.grid(True) if x_label is not None: ax.set_xlabel(x_label) if y_2_label is not None: ax.set_ylabel(y_2_label) ax.set_ylim((-np.amax(np.abs(y - fit_fn(x)))), (np.amax(np.abs(y - fit_fn(x))))) ax.plot(ax.set_xlim(), [0, 0], '-', color='black') setp(ax_bottom_plot.get_yticklabels()[-2:-1], visible=False) if size is not None: fig.set_size_inches(size) if not filename: fig.show() elif isinstance(filename, PdfPages): filename.savefig(fig) elif filename: fig.savefig(filename, bbox_inches='tight') return fig
[ "def", "plot_linear_relation", "(", "x", ",", "y", ",", "x_err", "=", "None", ",", "y_err", "=", "None", ",", "title", "=", "None", ",", "point_label", "=", "None", ",", "legend", "=", "None", ",", "plot_range", "=", "None", ",", "plot_range_y", "=", "None", ",", "x_label", "=", "None", ",", "y_label", "=", "None", ",", "y_2_label", "=", "None", ",", "log_x", "=", "False", ",", "log_y", "=", "False", ",", "size", "=", "None", ",", "filename", "=", "None", ")", ":", "fig", "=", "Figure", "(", ")", "FigureCanvas", "(", "fig", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "if", "x_err", "is", "not", "None", ":", "x_err", "=", "[", "x_err", ",", "x_err", "]", "if", "y_err", "is", "not", "None", ":", "y_err", "=", "[", "y_err", ",", "y_err", "]", "ax", ".", "set_title", "(", "title", ")", "if", "y_label", "is", "not", "None", ":", "ax", ".", "set_ylabel", "(", "y_label", ")", "if", "log_x", ":", "ax", ".", "set_xscale", "(", "'log'", ")", "if", "log_y", ":", "ax", ".", "set_yscale", "(", "'log'", ")", "if", "plot_range", ":", "ax", ".", "set_xlim", "(", "(", "min", "(", "plot_range", ")", ",", "max", "(", "plot_range", ")", ")", ")", "if", "plot_range_y", ":", "ax", ".", "set_ylim", "(", "(", "min", "(", "plot_range_y", ")", ",", "max", "(", "plot_range_y", ")", ")", ")", "if", "legend", ":", "fig", ".", "legend", "(", "legend", ",", "0", ")", "ax", ".", "grid", "(", "True", ")", "ax", ".", "errorbar", "(", "x", ",", "y", ",", "xerr", "=", "x_err", ",", "yerr", "=", "y_err", ",", "fmt", "=", "'o'", ",", "color", "=", "'black'", ")", "# plot points\r", "# label points if needed\r", "if", "point_label", "is", "not", "None", ":", "for", "X", ",", "Y", ",", "Z", "in", "zip", "(", "x", ",", "y", ",", "point_label", ")", ":", "ax", ".", "annotate", "(", "'{}'", ".", "format", "(", "Z", ")", ",", "xy", "=", "(", "X", ",", "Y", ")", ",", "xytext", "=", "(", "-", "5", ",", "5", ")", ",", "ha", "=", "'right'", ",", "textcoords", "=", "'offset points'", ")", "line_fit", ",", "_", "=", "np", ".", "polyfit", "(", "x", ",", "y", ",", "1", ",", "full", "=", "False", ",", "cov", "=", "True", ")", "fit_fn", "=", "np", ".", "poly1d", "(", "line_fit", ")", "ax", ".", "plot", "(", "x", ",", "fit_fn", "(", "x", ")", ",", "'-'", ",", "lw", "=", "2", ",", "color", "=", "'gray'", ")", "setp", "(", "ax", ".", "get_xticklabels", "(", ")", ",", "visible", "=", "False", ")", "# remove ticks at common border of both plots\r", "divider", "=", "make_axes_locatable", "(", "ax", ")", "ax_bottom_plot", "=", "divider", ".", "append_axes", "(", "\"bottom\"", ",", "2.0", ",", "pad", "=", "0.0", ",", "sharex", "=", "ax", ")", "ax_bottom_plot", ".", "bar", "(", "x", ",", "y", "-", "fit_fn", "(", "x", ")", ",", "align", "=", "'center'", ",", "width", "=", "np", ".", "amin", "(", "np", ".", "diff", "(", "x", ")", ")", "/", "2", ",", "color", "=", "'gray'", ")", "# plot(x, y - fit_fn(x))\r", "ax_bottom_plot", ".", "grid", "(", "True", ")", "if", "x_label", "is", "not", "None", ":", "ax", ".", "set_xlabel", "(", "x_label", ")", "if", "y_2_label", "is", "not", "None", ":", "ax", ".", "set_ylabel", "(", "y_2_label", ")", "ax", ".", "set_ylim", "(", "(", "-", "np", ".", "amax", "(", "np", ".", "abs", "(", "y", "-", "fit_fn", "(", "x", ")", ")", ")", ")", ",", "(", "np", ".", "amax", "(", "np", ".", "abs", "(", "y", "-", "fit_fn", "(", "x", ")", ")", ")", ")", ")", "ax", ".", "plot", "(", "ax", ".", "set_xlim", "(", ")", ",", "[", "0", ",", "0", "]", ",", "'-'", ",", "color", "=", "'black'", ")", "setp", "(", "ax_bottom_plot", ".", "get_yticklabels", "(", ")", "[", "-", "2", ":", "-", "1", "]", ",", "visible", "=", "False", ")", "if", "size", "is", "not", "None", ":", "fig", ".", "set_size_inches", "(", "size", ")", "if", "not", "filename", ":", "fig", ".", "show", "(", ")", "elif", "isinstance", "(", "filename", ",", "PdfPages", ")", ":", "filename", ".", "savefig", "(", "fig", ")", "elif", "filename", ":", "fig", ".", "savefig", "(", "filename", ",", "bbox_inches", "=", "'tight'", ")", "return", "fig" ]
Takes point data (x,y) with errors(x,y) and fits a straight line. The deviation to this line is also plotted, showing the offset. Parameters ---------- x, y, x_err, y_err: iterable filename: string, PdfPages object or None PdfPages file object: plot is appended to the pdf string: new plot file with the given filename is created None: the plot is printed to screen
[ "Takes", "point", "data", "(", "x", "y", ")", "with", "errors", "(", "x", "y", ")", "and", "fits", "a", "straight", "line", ".", "The", "deviation", "to", "this", "line", "is", "also", "plotted", "showing", "the", "offset", ".", "Parameters", "----------", "x", "y", "x_err", "y_err", ":", "iterable", "filename", ":", "string", "PdfPages", "object", "or", "None", "PdfPages", "file", "object", ":", "plot", "is", "appended", "to", "the", "pdf", "string", ":", "new", "plot", "file", "with", "the", "given", "filename", "is", "created", "None", ":", "the", "plot", "is", "printed", "to", "screen" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/plotting/plotting.py#L52-L121
SiLab-Bonn/pyBAR
pybar/analysis/plotting/plotting.py
plot_profile_histogram
def plot_profile_histogram(x, y, n_bins=100, title=None, x_label=None, y_label=None, log_y=False, filename=None): '''Takes 2D point data (x,y) and creates a profile histogram similar to the TProfile in ROOT. It calculates the y mean for every bin at the bin center and gives the y mean error as error bars. Parameters ---------- x : array like data x positions y : array like data y positions n_bins : int the number of bins used to create the histogram ''' if len(x) != len(y): raise ValueError('x and y dimensions have to be the same') n, bin_edges = np.histogram(x, bins=n_bins) # needed to calculate the number of points per bin sy = np.histogram(x, bins=n_bins, weights=y)[0] # the sum of the bin values sy2 = np.histogram(x, bins=n_bins, weights=y * y)[0] # the quadratic sum of the bin values bin_centers = (bin_edges[1:] + bin_edges[:-1]) / 2 # calculate the bin center for all bins mean = sy / n # calculate the mean of all bins std = np.sqrt((sy2 / n - mean * mean)) # TODO: not understood, need check if this is really the standard deviation # std_mean = np.sqrt((sy2 - 2 * mean * sy + mean * mean) / (1*(n - 1))) # this should be the formular ?! std_mean = std / np.sqrt((n - 1)) mean[np.isnan(mean)] = 0.0 std_mean[np.isnan(std_mean)] = 0.0 fig = Figure() FigureCanvas(fig) ax = fig.add_subplot(111) ax.errorbar(bin_centers, mean, yerr=std_mean, fmt='o') ax.set_title(title) if x_label is not None: ax.set_xlabel(x_label) if y_label is not None: ax.set_ylabel(y_label) if log_y: ax.yscale('log') ax.grid(True) if not filename: fig.show() elif isinstance(filename, PdfPages): filename.savefig(fig) else: fig.savefig(filename)
python
def plot_profile_histogram(x, y, n_bins=100, title=None, x_label=None, y_label=None, log_y=False, filename=None): '''Takes 2D point data (x,y) and creates a profile histogram similar to the TProfile in ROOT. It calculates the y mean for every bin at the bin center and gives the y mean error as error bars. Parameters ---------- x : array like data x positions y : array like data y positions n_bins : int the number of bins used to create the histogram ''' if len(x) != len(y): raise ValueError('x and y dimensions have to be the same') n, bin_edges = np.histogram(x, bins=n_bins) # needed to calculate the number of points per bin sy = np.histogram(x, bins=n_bins, weights=y)[0] # the sum of the bin values sy2 = np.histogram(x, bins=n_bins, weights=y * y)[0] # the quadratic sum of the bin values bin_centers = (bin_edges[1:] + bin_edges[:-1]) / 2 # calculate the bin center for all bins mean = sy / n # calculate the mean of all bins std = np.sqrt((sy2 / n - mean * mean)) # TODO: not understood, need check if this is really the standard deviation # std_mean = np.sqrt((sy2 - 2 * mean * sy + mean * mean) / (1*(n - 1))) # this should be the formular ?! std_mean = std / np.sqrt((n - 1)) mean[np.isnan(mean)] = 0.0 std_mean[np.isnan(std_mean)] = 0.0 fig = Figure() FigureCanvas(fig) ax = fig.add_subplot(111) ax.errorbar(bin_centers, mean, yerr=std_mean, fmt='o') ax.set_title(title) if x_label is not None: ax.set_xlabel(x_label) if y_label is not None: ax.set_ylabel(y_label) if log_y: ax.yscale('log') ax.grid(True) if not filename: fig.show() elif isinstance(filename, PdfPages): filename.savefig(fig) else: fig.savefig(filename)
[ "def", "plot_profile_histogram", "(", "x", ",", "y", ",", "n_bins", "=", "100", ",", "title", "=", "None", ",", "x_label", "=", "None", ",", "y_label", "=", "None", ",", "log_y", "=", "False", ",", "filename", "=", "None", ")", ":", "if", "len", "(", "x", ")", "!=", "len", "(", "y", ")", ":", "raise", "ValueError", "(", "'x and y dimensions have to be the same'", ")", "n", ",", "bin_edges", "=", "np", ".", "histogram", "(", "x", ",", "bins", "=", "n_bins", ")", "# needed to calculate the number of points per bin\r", "sy", "=", "np", ".", "histogram", "(", "x", ",", "bins", "=", "n_bins", ",", "weights", "=", "y", ")", "[", "0", "]", "# the sum of the bin values\r", "sy2", "=", "np", ".", "histogram", "(", "x", ",", "bins", "=", "n_bins", ",", "weights", "=", "y", "*", "y", ")", "[", "0", "]", "# the quadratic sum of the bin values\r", "bin_centers", "=", "(", "bin_edges", "[", "1", ":", "]", "+", "bin_edges", "[", ":", "-", "1", "]", ")", "/", "2", "# calculate the bin center for all bins\r", "mean", "=", "sy", "/", "n", "# calculate the mean of all bins\r", "std", "=", "np", ".", "sqrt", "(", "(", "sy2", "/", "n", "-", "mean", "*", "mean", ")", ")", "# TODO: not understood, need check if this is really the standard deviation\r", "# std_mean = np.sqrt((sy2 - 2 * mean * sy + mean * mean) / (1*(n - 1))) # this should be the formular ?!\r", "std_mean", "=", "std", "/", "np", ".", "sqrt", "(", "(", "n", "-", "1", ")", ")", "mean", "[", "np", ".", "isnan", "(", "mean", ")", "]", "=", "0.0", "std_mean", "[", "np", ".", "isnan", "(", "std_mean", ")", "]", "=", "0.0", "fig", "=", "Figure", "(", ")", "FigureCanvas", "(", "fig", ")", "ax", "=", "fig", ".", "add_subplot", "(", "111", ")", "ax", ".", "errorbar", "(", "bin_centers", ",", "mean", ",", "yerr", "=", "std_mean", ",", "fmt", "=", "'o'", ")", "ax", ".", "set_title", "(", "title", ")", "if", "x_label", "is", "not", "None", ":", "ax", ".", "set_xlabel", "(", "x_label", ")", "if", "y_label", "is", "not", "None", ":", "ax", ".", "set_ylabel", "(", "y_label", ")", "if", "log_y", ":", "ax", ".", "yscale", "(", "'log'", ")", "ax", ".", "grid", "(", "True", ")", "if", "not", "filename", ":", "fig", ".", "show", "(", ")", "elif", "isinstance", "(", "filename", ",", "PdfPages", ")", ":", "filename", ".", "savefig", "(", "fig", ")", "else", ":", "fig", ".", "savefig", "(", "filename", ")" ]
Takes 2D point data (x,y) and creates a profile histogram similar to the TProfile in ROOT. It calculates the y mean for every bin at the bin center and gives the y mean error as error bars. Parameters ---------- x : array like data x positions y : array like data y positions n_bins : int the number of bins used to create the histogram
[ "Takes", "2D", "point", "data", "(", "x", "y", ")", "and", "creates", "a", "profile", "histogram", "similar", "to", "the", "TProfile", "in", "ROOT", ".", "It", "calculates", "the", "y", "mean", "for", "every", "bin", "at", "the", "bin", "center", "and", "gives", "the", "y", "mean", "error", "as", "error", "bars", ".", "Parameters", "----------", "x", ":", "array", "like", "data", "x", "positions", "y", ":", "array", "like", "data", "y", "positions", "n_bins", ":", "int", "the", "number", "of", "bins", "used", "to", "create", "the", "histogram" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/plotting/plotting.py#L241-L284
SiLab-Bonn/pyBAR
pybar/analysis/plotting/plotting.py
round_to_multiple
def round_to_multiple(number, multiple): '''Rounding up to the nearest multiple of any positive integer Parameters ---------- number : int, float Input number. multiple : int Round up to multiple of multiple. Will be converted to int. Must not be equal zero. Returns ------- ceil_mod_number : int Rounded up number. Example ------- round_to_multiple(maximum, math.floor(math.log10(maximum))) ''' multiple = int(multiple) if multiple == 0: multiple = 1 ceil_mod_number = number - number % (-multiple) return int(ceil_mod_number)
python
def round_to_multiple(number, multiple): '''Rounding up to the nearest multiple of any positive integer Parameters ---------- number : int, float Input number. multiple : int Round up to multiple of multiple. Will be converted to int. Must not be equal zero. Returns ------- ceil_mod_number : int Rounded up number. Example ------- round_to_multiple(maximum, math.floor(math.log10(maximum))) ''' multiple = int(multiple) if multiple == 0: multiple = 1 ceil_mod_number = number - number % (-multiple) return int(ceil_mod_number)
[ "def", "round_to_multiple", "(", "number", ",", "multiple", ")", ":", "multiple", "=", "int", "(", "multiple", ")", "if", "multiple", "==", "0", ":", "multiple", "=", "1", "ceil_mod_number", "=", "number", "-", "number", "%", "(", "-", "multiple", ")", "return", "int", "(", "ceil_mod_number", ")" ]
Rounding up to the nearest multiple of any positive integer Parameters ---------- number : int, float Input number. multiple : int Round up to multiple of multiple. Will be converted to int. Must not be equal zero. Returns ------- ceil_mod_number : int Rounded up number. Example ------- round_to_multiple(maximum, math.floor(math.log10(maximum)))
[ "Rounding", "up", "to", "the", "nearest", "multiple", "of", "any", "positive", "integer", "Parameters", "----------", "number", ":", "int", "float", "Input", "number", ".", "multiple", ":", "int", "Round", "up", "to", "multiple", "of", "multiple", ".", "Will", "be", "converted", "to", "int", ".", "Must", "not", "be", "equal", "zero", ".", "Returns", "-------", "ceil_mod_number", ":", "int", "Rounded", "up", "number", ".", "Example", "-------", "round_to_multiple", "(", "maximum", "math", ".", "floor", "(", "math", ".", "log10", "(", "maximum", ")))" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/plotting/plotting.py#L352-L374
SiLab-Bonn/pyBAR
pybar/analysis/plotting/plotting.py
hist_quantiles
def hist_quantiles(hist, prob=(0.05, 0.95), return_indices=False, copy=True): '''Calculate quantiles from histograms, cuts off hist below and above given quantile. This function will not cut off more than the given values. Parameters ---------- hist : array_like, iterable Input histogram with dimension at most 1. prob : float, list, tuple List of quantiles to compute. Upper and lower limit. From 0 to 1. Default is 0.05 and 0.95. return_indices : bool, optional If true, return the indices of the hist. copy : bool, optional Whether to copy the input data (True), or to use a reference instead. Default is False. Returns ------- masked_hist : masked_array Hist with masked elements. masked_hist : masked_array, tuple Hist with masked elements and indices. ''' # make np array hist_t = np.array(hist) # calculate cumulative distribution cdf = np.cumsum(hist_t) # copy, convert and normalize if cdf[-1] == 0: normcdf = cdf.astype('float') else: normcdf = cdf.astype('float') / cdf[-1] # calculate unique values from cumulative distribution and their indices unormcdf, indices = np.unique(normcdf, return_index=True) # calculate limits try: hp = np.where(unormcdf > prob[1])[0][0] lp = np.where(unormcdf >= prob[0])[0][0] except IndexError: hp_index = hist_t.shape[0] lp_index = 0 else: hp_index = indices[hp] lp_index = indices[lp] # copy and create ma masked_hist = np.ma.array(hist, copy=copy, mask=True) masked_hist.mask[lp_index:hp_index + 1] = False if return_indices: return masked_hist, (lp_index, hp_index) else: return masked_hist
python
def hist_quantiles(hist, prob=(0.05, 0.95), return_indices=False, copy=True): '''Calculate quantiles from histograms, cuts off hist below and above given quantile. This function will not cut off more than the given values. Parameters ---------- hist : array_like, iterable Input histogram with dimension at most 1. prob : float, list, tuple List of quantiles to compute. Upper and lower limit. From 0 to 1. Default is 0.05 and 0.95. return_indices : bool, optional If true, return the indices of the hist. copy : bool, optional Whether to copy the input data (True), or to use a reference instead. Default is False. Returns ------- masked_hist : masked_array Hist with masked elements. masked_hist : masked_array, tuple Hist with masked elements and indices. ''' # make np array hist_t = np.array(hist) # calculate cumulative distribution cdf = np.cumsum(hist_t) # copy, convert and normalize if cdf[-1] == 0: normcdf = cdf.astype('float') else: normcdf = cdf.astype('float') / cdf[-1] # calculate unique values from cumulative distribution and their indices unormcdf, indices = np.unique(normcdf, return_index=True) # calculate limits try: hp = np.where(unormcdf > prob[1])[0][0] lp = np.where(unormcdf >= prob[0])[0][0] except IndexError: hp_index = hist_t.shape[0] lp_index = 0 else: hp_index = indices[hp] lp_index = indices[lp] # copy and create ma masked_hist = np.ma.array(hist, copy=copy, mask=True) masked_hist.mask[lp_index:hp_index + 1] = False if return_indices: return masked_hist, (lp_index, hp_index) else: return masked_hist
[ "def", "hist_quantiles", "(", "hist", ",", "prob", "=", "(", "0.05", ",", "0.95", ")", ",", "return_indices", "=", "False", ",", "copy", "=", "True", ")", ":", "# make np array\r", "hist_t", "=", "np", ".", "array", "(", "hist", ")", "# calculate cumulative distribution\r", "cdf", "=", "np", ".", "cumsum", "(", "hist_t", ")", "# copy, convert and normalize\r", "if", "cdf", "[", "-", "1", "]", "==", "0", ":", "normcdf", "=", "cdf", ".", "astype", "(", "'float'", ")", "else", ":", "normcdf", "=", "cdf", ".", "astype", "(", "'float'", ")", "/", "cdf", "[", "-", "1", "]", "# calculate unique values from cumulative distribution and their indices\r", "unormcdf", ",", "indices", "=", "np", ".", "unique", "(", "normcdf", ",", "return_index", "=", "True", ")", "# calculate limits\r", "try", ":", "hp", "=", "np", ".", "where", "(", "unormcdf", ">", "prob", "[", "1", "]", ")", "[", "0", "]", "[", "0", "]", "lp", "=", "np", ".", "where", "(", "unormcdf", ">=", "prob", "[", "0", "]", ")", "[", "0", "]", "[", "0", "]", "except", "IndexError", ":", "hp_index", "=", "hist_t", ".", "shape", "[", "0", "]", "lp_index", "=", "0", "else", ":", "hp_index", "=", "indices", "[", "hp", "]", "lp_index", "=", "indices", "[", "lp", "]", "# copy and create ma\r", "masked_hist", "=", "np", ".", "ma", ".", "array", "(", "hist", ",", "copy", "=", "copy", ",", "mask", "=", "True", ")", "masked_hist", ".", "mask", "[", "lp_index", ":", "hp_index", "+", "1", "]", "=", "False", "if", "return_indices", ":", "return", "masked_hist", ",", "(", "lp_index", ",", "hp_index", ")", "else", ":", "return", "masked_hist" ]
Calculate quantiles from histograms, cuts off hist below and above given quantile. This function will not cut off more than the given values. Parameters ---------- hist : array_like, iterable Input histogram with dimension at most 1. prob : float, list, tuple List of quantiles to compute. Upper and lower limit. From 0 to 1. Default is 0.05 and 0.95. return_indices : bool, optional If true, return the indices of the hist. copy : bool, optional Whether to copy the input data (True), or to use a reference instead. Default is False. Returns ------- masked_hist : masked_array Hist with masked elements. masked_hist : masked_array, tuple Hist with masked elements and indices.
[ "Calculate", "quantiles", "from", "histograms", "cuts", "off", "hist", "below", "and", "above", "given", "quantile", ".", "This", "function", "will", "not", "cut", "off", "more", "than", "the", "given", "values", ".", "Parameters", "----------", "hist", ":", "array_like", "iterable", "Input", "histogram", "with", "dimension", "at", "most", "1", ".", "prob", ":", "float", "list", "tuple", "List", "of", "quantiles", "to", "compute", ".", "Upper", "and", "lower", "limit", ".", "From", "0", "to", "1", ".", "Default", "is", "0", ".", "05", "and", "0", ".", "95", ".", "return_indices", ":", "bool", "optional", "If", "true", "return", "the", "indices", "of", "the", "hist", ".", "copy", ":", "bool", "optional", "Whether", "to", "copy", "the", "input", "data", "(", "True", ")", "or", "to", "use", "a", "reference", "instead", ".", "Default", "is", "False", ".", "Returns", "-------", "masked_hist", ":", "masked_array", "Hist", "with", "masked", "elements", ".", "masked_hist", ":", "masked_array", "tuple", "Hist", "with", "masked", "elements", "and", "indices", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/plotting/plotting.py#L831-L879
SiLab-Bonn/pyBAR
pybar/analysis/plotting/plotting.py
hist_last_nonzero
def hist_last_nonzero(hist, return_index=False, copy=True): '''Find the last nonzero index and mask the remaining entries. Parameters ---------- hist : array_like, iterable Input histogram with dimension at most 1. return_index : bool, optional If true, return the index. copy : bool, optional Whether to copy the input data (True), or to use a reference instead. Default is False. Returns ------- masked_hist : masked_array Hist with masked elements. masked_hist : masked_array, tuple Hist with masked elements and index of the element after the last nonzero value. ''' # make np array hist_t = np.array(hist) index = (np.where(hist_t)[-1][-1] + 1) if np.sum(hist_t) > 1 else hist_t.shape[0] # copy and create ma masked_hist = np.ma.array(hist, copy=copy, mask=True) masked_hist.mask[index:] = False if return_index: return masked_hist, index else: return masked_hist
python
def hist_last_nonzero(hist, return_index=False, copy=True): '''Find the last nonzero index and mask the remaining entries. Parameters ---------- hist : array_like, iterable Input histogram with dimension at most 1. return_index : bool, optional If true, return the index. copy : bool, optional Whether to copy the input data (True), or to use a reference instead. Default is False. Returns ------- masked_hist : masked_array Hist with masked elements. masked_hist : masked_array, tuple Hist with masked elements and index of the element after the last nonzero value. ''' # make np array hist_t = np.array(hist) index = (np.where(hist_t)[-1][-1] + 1) if np.sum(hist_t) > 1 else hist_t.shape[0] # copy and create ma masked_hist = np.ma.array(hist, copy=copy, mask=True) masked_hist.mask[index:] = False if return_index: return masked_hist, index else: return masked_hist
[ "def", "hist_last_nonzero", "(", "hist", ",", "return_index", "=", "False", ",", "copy", "=", "True", ")", ":", "# make np array\r", "hist_t", "=", "np", ".", "array", "(", "hist", ")", "index", "=", "(", "np", ".", "where", "(", "hist_t", ")", "[", "-", "1", "]", "[", "-", "1", "]", "+", "1", ")", "if", "np", ".", "sum", "(", "hist_t", ")", ">", "1", "else", "hist_t", ".", "shape", "[", "0", "]", "# copy and create ma\r", "masked_hist", "=", "np", ".", "ma", ".", "array", "(", "hist", ",", "copy", "=", "copy", ",", "mask", "=", "True", ")", "masked_hist", ".", "mask", "[", "index", ":", "]", "=", "False", "if", "return_index", ":", "return", "masked_hist", ",", "index", "else", ":", "return", "masked_hist" ]
Find the last nonzero index and mask the remaining entries. Parameters ---------- hist : array_like, iterable Input histogram with dimension at most 1. return_index : bool, optional If true, return the index. copy : bool, optional Whether to copy the input data (True), or to use a reference instead. Default is False. Returns ------- masked_hist : masked_array Hist with masked elements. masked_hist : masked_array, tuple Hist with masked elements and index of the element after the last nonzero value.
[ "Find", "the", "last", "nonzero", "index", "and", "mask", "the", "remaining", "entries", ".", "Parameters", "----------", "hist", ":", "array_like", "iterable", "Input", "histogram", "with", "dimension", "at", "most", "1", ".", "return_index", ":", "bool", "optional", "If", "true", "return", "the", "index", ".", "copy", ":", "bool", "optional", "Whether", "to", "copy", "the", "input", "data", "(", "True", ")", "or", "to", "use", "a", "reference", "instead", ".", "Default", "is", "False", ".", "Returns", "-------", "masked_hist", ":", "masked_array", "Hist", "with", "masked", "elements", ".", "masked_hist", ":", "masked_array", "tuple", "Hist", "with", "masked", "elements", "and", "index", "of", "the", "element", "after", "the", "last", "nonzero", "value", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/plotting/plotting.py#L882-L910
SiLab-Bonn/pyBAR
pybar/daq/fifo_readout.py
FifoReadout.readout
def readout(self, fifo, no_data_timeout=None): '''Readout thread continuously reading FIFO. Readout thread, which uses read_raw_data_from_fifo() and appends data to self._fifo_data_deque (collection.deque). ''' logging.info('Starting readout thread for %s', fifo) time_last_data = time() time_wait = 0.0 empty_reads = 0 while not self.force_stop[fifo].wait(time_wait if time_wait >= 0.0 else 0.0): time_read = time() try: if no_data_timeout and time_last_data + no_data_timeout < get_float_time(): raise NoDataTimeout('Received no data for %0.1f second(s) from %s' % (no_data_timeout, fifo)) raw_data = self.read_raw_data_from_fifo(fifo) except NoDataTimeout: no_data_timeout = None # raise exception only once if self.errback: self.errback(sys.exc_info()) else: raise except Exception: if self.errback: self.errback(sys.exc_info()) else: raise if self.stop_readout.is_set(): # in case of a exception, break immediately break else: n_data_words = raw_data.shape[0] if n_data_words > 0: time_last_data = time() empty_reads = 0 time_start_read, time_stop_read = self.update_timestamp(fifo) status = 0 self._fifo_data_deque[fifo].append((raw_data, time_start_read, time_stop_read, status)) with self._fifo_conditions[fifo]: self._fifo_conditions[fifo].notify_all() elif self.stop_readout.is_set(): if empty_reads == self._n_empty_reads: break else: empty_reads += 1 finally: # ensure that the readout interval does not depend on the processing time of the data # and stays more or less constant over time time_wait = self.readout_interval - (time() - time_read) self._fifo_data_deque[fifo].append(None) # last item, None will stop worker with self._fifo_conditions[fifo]: self._fifo_conditions[fifo].notify_all() logging.info('Stopping readout thread for %s', fifo)
python
def readout(self, fifo, no_data_timeout=None): '''Readout thread continuously reading FIFO. Readout thread, which uses read_raw_data_from_fifo() and appends data to self._fifo_data_deque (collection.deque). ''' logging.info('Starting readout thread for %s', fifo) time_last_data = time() time_wait = 0.0 empty_reads = 0 while not self.force_stop[fifo].wait(time_wait if time_wait >= 0.0 else 0.0): time_read = time() try: if no_data_timeout and time_last_data + no_data_timeout < get_float_time(): raise NoDataTimeout('Received no data for %0.1f second(s) from %s' % (no_data_timeout, fifo)) raw_data = self.read_raw_data_from_fifo(fifo) except NoDataTimeout: no_data_timeout = None # raise exception only once if self.errback: self.errback(sys.exc_info()) else: raise except Exception: if self.errback: self.errback(sys.exc_info()) else: raise if self.stop_readout.is_set(): # in case of a exception, break immediately break else: n_data_words = raw_data.shape[0] if n_data_words > 0: time_last_data = time() empty_reads = 0 time_start_read, time_stop_read = self.update_timestamp(fifo) status = 0 self._fifo_data_deque[fifo].append((raw_data, time_start_read, time_stop_read, status)) with self._fifo_conditions[fifo]: self._fifo_conditions[fifo].notify_all() elif self.stop_readout.is_set(): if empty_reads == self._n_empty_reads: break else: empty_reads += 1 finally: # ensure that the readout interval does not depend on the processing time of the data # and stays more or less constant over time time_wait = self.readout_interval - (time() - time_read) self._fifo_data_deque[fifo].append(None) # last item, None will stop worker with self._fifo_conditions[fifo]: self._fifo_conditions[fifo].notify_all() logging.info('Stopping readout thread for %s', fifo)
[ "def", "readout", "(", "self", ",", "fifo", ",", "no_data_timeout", "=", "None", ")", ":", "logging", ".", "info", "(", "'Starting readout thread for %s'", ",", "fifo", ")", "time_last_data", "=", "time", "(", ")", "time_wait", "=", "0.0", "empty_reads", "=", "0", "while", "not", "self", ".", "force_stop", "[", "fifo", "]", ".", "wait", "(", "time_wait", "if", "time_wait", ">=", "0.0", "else", "0.0", ")", ":", "time_read", "=", "time", "(", ")", "try", ":", "if", "no_data_timeout", "and", "time_last_data", "+", "no_data_timeout", "<", "get_float_time", "(", ")", ":", "raise", "NoDataTimeout", "(", "'Received no data for %0.1f second(s) from %s'", "%", "(", "no_data_timeout", ",", "fifo", ")", ")", "raw_data", "=", "self", ".", "read_raw_data_from_fifo", "(", "fifo", ")", "except", "NoDataTimeout", ":", "no_data_timeout", "=", "None", "# raise exception only once\r", "if", "self", ".", "errback", ":", "self", ".", "errback", "(", "sys", ".", "exc_info", "(", ")", ")", "else", ":", "raise", "except", "Exception", ":", "if", "self", ".", "errback", ":", "self", ".", "errback", "(", "sys", ".", "exc_info", "(", ")", ")", "else", ":", "raise", "if", "self", ".", "stop_readout", ".", "is_set", "(", ")", ":", "# in case of a exception, break immediately\r", "break", "else", ":", "n_data_words", "=", "raw_data", ".", "shape", "[", "0", "]", "if", "n_data_words", ">", "0", ":", "time_last_data", "=", "time", "(", ")", "empty_reads", "=", "0", "time_start_read", ",", "time_stop_read", "=", "self", ".", "update_timestamp", "(", "fifo", ")", "status", "=", "0", "self", ".", "_fifo_data_deque", "[", "fifo", "]", ".", "append", "(", "(", "raw_data", ",", "time_start_read", ",", "time_stop_read", ",", "status", ")", ")", "with", "self", ".", "_fifo_conditions", "[", "fifo", "]", ":", "self", ".", "_fifo_conditions", "[", "fifo", "]", ".", "notify_all", "(", ")", "elif", "self", ".", "stop_readout", ".", "is_set", "(", ")", ":", "if", "empty_reads", "==", "self", ".", "_n_empty_reads", ":", "break", "else", ":", "empty_reads", "+=", "1", "finally", ":", "# ensure that the readout interval does not depend on the processing time of the data\r", "# and stays more or less constant over time\r", "time_wait", "=", "self", ".", "readout_interval", "-", "(", "time", "(", ")", "-", "time_read", ")", "self", ".", "_fifo_data_deque", "[", "fifo", "]", ".", "append", "(", "None", ")", "# last item, None will stop worker\r", "with", "self", ".", "_fifo_conditions", "[", "fifo", "]", ":", "self", ".", "_fifo_conditions", "[", "fifo", "]", ".", "notify_all", "(", ")", "logging", ".", "info", "(", "'Stopping readout thread for %s'", ",", "fifo", ")" ]
Readout thread continuously reading FIFO. Readout thread, which uses read_raw_data_from_fifo() and appends data to self._fifo_data_deque (collection.deque).
[ "Readout", "thread", "continuously", "reading", "FIFO", ".", "Readout", "thread", "which", "uses", "read_raw_data_from_fifo", "()", "and", "appends", "data", "to", "self", ".", "_fifo_data_deque", "(", "collection", ".", "deque", ")", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fifo_readout.py#L270-L320
SiLab-Bonn/pyBAR
pybar/daq/fifo_readout.py
FifoReadout.worker
def worker(self, fifo): '''Worker thread continuously filtering and converting data when data becomes available. ''' logging.debug('Starting worker thread for %s', fifo) self._fifo_conditions[fifo].acquire() while True: try: data_tuple = self._fifo_data_deque[fifo].popleft() except IndexError: self._fifo_conditions[fifo].wait(self.readout_interval) # sleep a little bit, reducing CPU usage else: if data_tuple is None: # if None then exit break else: for index, (filter_func, converter_func, fifo_select) in enumerate(izip(self.filter_func, self.converter_func, self.fifo_select)): if fifo_select is None or fifo_select == fifo: # filter and do the conversion converted_data_tuple = convert_data_iterable((data_tuple,), filter_func=filter_func, converter_func=converter_func)[0] n_data_words = converted_data_tuple[0].shape[0] with self.data_words_per_second_lock: self._words_per_read[index].append((n_data_words, converted_data_tuple[1], converted_data_tuple[2])) self._data_deque[index].append(converted_data_tuple) with self._data_conditions[index]: self._data_conditions[index].notify_all() for index, fifo_select in enumerate(self.fifo_select): if fifo_select is None or fifo_select == fifo: self._data_deque[index].append(None) with self._data_conditions[index]: self._data_conditions[index].notify_all() self._fifo_conditions[fifo].release() logging.debug('Stopping worker thread for %s', fifo)
python
def worker(self, fifo): '''Worker thread continuously filtering and converting data when data becomes available. ''' logging.debug('Starting worker thread for %s', fifo) self._fifo_conditions[fifo].acquire() while True: try: data_tuple = self._fifo_data_deque[fifo].popleft() except IndexError: self._fifo_conditions[fifo].wait(self.readout_interval) # sleep a little bit, reducing CPU usage else: if data_tuple is None: # if None then exit break else: for index, (filter_func, converter_func, fifo_select) in enumerate(izip(self.filter_func, self.converter_func, self.fifo_select)): if fifo_select is None or fifo_select == fifo: # filter and do the conversion converted_data_tuple = convert_data_iterable((data_tuple,), filter_func=filter_func, converter_func=converter_func)[0] n_data_words = converted_data_tuple[0].shape[0] with self.data_words_per_second_lock: self._words_per_read[index].append((n_data_words, converted_data_tuple[1], converted_data_tuple[2])) self._data_deque[index].append(converted_data_tuple) with self._data_conditions[index]: self._data_conditions[index].notify_all() for index, fifo_select in enumerate(self.fifo_select): if fifo_select is None or fifo_select == fifo: self._data_deque[index].append(None) with self._data_conditions[index]: self._data_conditions[index].notify_all() self._fifo_conditions[fifo].release() logging.debug('Stopping worker thread for %s', fifo)
[ "def", "worker", "(", "self", ",", "fifo", ")", ":", "logging", ".", "debug", "(", "'Starting worker thread for %s'", ",", "fifo", ")", "self", ".", "_fifo_conditions", "[", "fifo", "]", ".", "acquire", "(", ")", "while", "True", ":", "try", ":", "data_tuple", "=", "self", ".", "_fifo_data_deque", "[", "fifo", "]", ".", "popleft", "(", ")", "except", "IndexError", ":", "self", ".", "_fifo_conditions", "[", "fifo", "]", ".", "wait", "(", "self", ".", "readout_interval", ")", "# sleep a little bit, reducing CPU usage\r", "else", ":", "if", "data_tuple", "is", "None", ":", "# if None then exit\r", "break", "else", ":", "for", "index", ",", "(", "filter_func", ",", "converter_func", ",", "fifo_select", ")", "in", "enumerate", "(", "izip", "(", "self", ".", "filter_func", ",", "self", ".", "converter_func", ",", "self", ".", "fifo_select", ")", ")", ":", "if", "fifo_select", "is", "None", "or", "fifo_select", "==", "fifo", ":", "# filter and do the conversion\r", "converted_data_tuple", "=", "convert_data_iterable", "(", "(", "data_tuple", ",", ")", ",", "filter_func", "=", "filter_func", ",", "converter_func", "=", "converter_func", ")", "[", "0", "]", "n_data_words", "=", "converted_data_tuple", "[", "0", "]", ".", "shape", "[", "0", "]", "with", "self", ".", "data_words_per_second_lock", ":", "self", ".", "_words_per_read", "[", "index", "]", ".", "append", "(", "(", "n_data_words", ",", "converted_data_tuple", "[", "1", "]", ",", "converted_data_tuple", "[", "2", "]", ")", ")", "self", ".", "_data_deque", "[", "index", "]", ".", "append", "(", "converted_data_tuple", ")", "with", "self", ".", "_data_conditions", "[", "index", "]", ":", "self", ".", "_data_conditions", "[", "index", "]", ".", "notify_all", "(", ")", "for", "index", ",", "fifo_select", "in", "enumerate", "(", "self", ".", "fifo_select", ")", ":", "if", "fifo_select", "is", "None", "or", "fifo_select", "==", "fifo", ":", "self", ".", "_data_deque", "[", "index", "]", ".", "append", "(", "None", ")", "with", "self", ".", "_data_conditions", "[", "index", "]", ":", "self", ".", "_data_conditions", "[", "index", "]", ".", "notify_all", "(", ")", "self", ".", "_fifo_conditions", "[", "fifo", "]", ".", "release", "(", ")", "logging", ".", "debug", "(", "'Stopping worker thread for %s'", ",", "fifo", ")" ]
Worker thread continuously filtering and converting data when data becomes available.
[ "Worker", "thread", "continuously", "filtering", "and", "converting", "data", "when", "data", "becomes", "available", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fifo_readout.py#L322-L352
SiLab-Bonn/pyBAR
pybar/daq/fifo_readout.py
FifoReadout.writer
def writer(self, index, no_data_timeout=None): '''Writer thread continuously calling callback function for writing data when data becomes available. ''' is_fe_data_header = logical_and(is_fe_word, is_data_header) logging.debug('Starting writer thread with index %d', index) self._data_conditions[index].acquire() time_last_data = time() time_write = time() converted_data_tuple_list = [None] * len(self.filter_func) # callback function gets a list of lists of tuples while True: try: if no_data_timeout and time_last_data + no_data_timeout < time(): raise NoDataTimeout('Received no data for %0.1f second(s) for writer thread with index %d' % (no_data_timeout, index)) converted_data_tuple = self._data_deque[index].popleft() except NoDataTimeout: # no data timeout no_data_timeout = None # raise exception only once if self.errback: self.errback(sys.exc_info()) else: raise except IndexError: # no data in queue self._data_conditions[index].wait(self.readout_interval) # sleep a little bit, reducing CPU usage else: if converted_data_tuple is None: # if None then write and exit if self.callback and any(converted_data_tuple_list): try: self.callback(converted_data_tuple_list) except Exception: self.errback(sys.exc_info()) break else: if no_data_timeout and np.any(is_fe_data_header(converted_data_tuple[0])): # check for FEI4 data words time_last_data = time() if converted_data_tuple_list[index]: converted_data_tuple_list[index].append(converted_data_tuple) else: converted_data_tuple_list[index] = [converted_data_tuple] # adding iterable if self.fill_buffer: self._data_buffer[index].append(converted_data_tuple) # check if calling the callback function is about time if self.callback and any(converted_data_tuple_list) and ((self.write_interval and time() - time_write >= self.write_interval) or not self.write_interval): try: self.callback(converted_data_tuple_list) # callback function gets a list of lists of tuples except Exception: self.errback(sys.exc_info()) else: converted_data_tuple_list = [None] * len(self.filter_func) time_write = time() # update last write timestamp self._data_conditions[index].release() logging.debug('Stopping writer thread with index %d', index)
python
def writer(self, index, no_data_timeout=None): '''Writer thread continuously calling callback function for writing data when data becomes available. ''' is_fe_data_header = logical_and(is_fe_word, is_data_header) logging.debug('Starting writer thread with index %d', index) self._data_conditions[index].acquire() time_last_data = time() time_write = time() converted_data_tuple_list = [None] * len(self.filter_func) # callback function gets a list of lists of tuples while True: try: if no_data_timeout and time_last_data + no_data_timeout < time(): raise NoDataTimeout('Received no data for %0.1f second(s) for writer thread with index %d' % (no_data_timeout, index)) converted_data_tuple = self._data_deque[index].popleft() except NoDataTimeout: # no data timeout no_data_timeout = None # raise exception only once if self.errback: self.errback(sys.exc_info()) else: raise except IndexError: # no data in queue self._data_conditions[index].wait(self.readout_interval) # sleep a little bit, reducing CPU usage else: if converted_data_tuple is None: # if None then write and exit if self.callback and any(converted_data_tuple_list): try: self.callback(converted_data_tuple_list) except Exception: self.errback(sys.exc_info()) break else: if no_data_timeout and np.any(is_fe_data_header(converted_data_tuple[0])): # check for FEI4 data words time_last_data = time() if converted_data_tuple_list[index]: converted_data_tuple_list[index].append(converted_data_tuple) else: converted_data_tuple_list[index] = [converted_data_tuple] # adding iterable if self.fill_buffer: self._data_buffer[index].append(converted_data_tuple) # check if calling the callback function is about time if self.callback and any(converted_data_tuple_list) and ((self.write_interval and time() - time_write >= self.write_interval) or not self.write_interval): try: self.callback(converted_data_tuple_list) # callback function gets a list of lists of tuples except Exception: self.errback(sys.exc_info()) else: converted_data_tuple_list = [None] * len(self.filter_func) time_write = time() # update last write timestamp self._data_conditions[index].release() logging.debug('Stopping writer thread with index %d', index)
[ "def", "writer", "(", "self", ",", "index", ",", "no_data_timeout", "=", "None", ")", ":", "is_fe_data_header", "=", "logical_and", "(", "is_fe_word", ",", "is_data_header", ")", "logging", ".", "debug", "(", "'Starting writer thread with index %d'", ",", "index", ")", "self", ".", "_data_conditions", "[", "index", "]", ".", "acquire", "(", ")", "time_last_data", "=", "time", "(", ")", "time_write", "=", "time", "(", ")", "converted_data_tuple_list", "=", "[", "None", "]", "*", "len", "(", "self", ".", "filter_func", ")", "# callback function gets a list of lists of tuples\r", "while", "True", ":", "try", ":", "if", "no_data_timeout", "and", "time_last_data", "+", "no_data_timeout", "<", "time", "(", ")", ":", "raise", "NoDataTimeout", "(", "'Received no data for %0.1f second(s) for writer thread with index %d'", "%", "(", "no_data_timeout", ",", "index", ")", ")", "converted_data_tuple", "=", "self", ".", "_data_deque", "[", "index", "]", ".", "popleft", "(", ")", "except", "NoDataTimeout", ":", "# no data timeout\r", "no_data_timeout", "=", "None", "# raise exception only once\r", "if", "self", ".", "errback", ":", "self", ".", "errback", "(", "sys", ".", "exc_info", "(", ")", ")", "else", ":", "raise", "except", "IndexError", ":", "# no data in queue\r", "self", ".", "_data_conditions", "[", "index", "]", ".", "wait", "(", "self", ".", "readout_interval", ")", "# sleep a little bit, reducing CPU usage\r", "else", ":", "if", "converted_data_tuple", "is", "None", ":", "# if None then write and exit\r", "if", "self", ".", "callback", "and", "any", "(", "converted_data_tuple_list", ")", ":", "try", ":", "self", ".", "callback", "(", "converted_data_tuple_list", ")", "except", "Exception", ":", "self", ".", "errback", "(", "sys", ".", "exc_info", "(", ")", ")", "break", "else", ":", "if", "no_data_timeout", "and", "np", ".", "any", "(", "is_fe_data_header", "(", "converted_data_tuple", "[", "0", "]", ")", ")", ":", "# check for FEI4 data words\r", "time_last_data", "=", "time", "(", ")", "if", "converted_data_tuple_list", "[", "index", "]", ":", "converted_data_tuple_list", "[", "index", "]", ".", "append", "(", "converted_data_tuple", ")", "else", ":", "converted_data_tuple_list", "[", "index", "]", "=", "[", "converted_data_tuple", "]", "# adding iterable\r", "if", "self", ".", "fill_buffer", ":", "self", ".", "_data_buffer", "[", "index", "]", ".", "append", "(", "converted_data_tuple", ")", "# check if calling the callback function is about time\r", "if", "self", ".", "callback", "and", "any", "(", "converted_data_tuple_list", ")", "and", "(", "(", "self", ".", "write_interval", "and", "time", "(", ")", "-", "time_write", ">=", "self", ".", "write_interval", ")", "or", "not", "self", ".", "write_interval", ")", ":", "try", ":", "self", ".", "callback", "(", "converted_data_tuple_list", ")", "# callback function gets a list of lists of tuples\r", "except", "Exception", ":", "self", ".", "errback", "(", "sys", ".", "exc_info", "(", ")", ")", "else", ":", "converted_data_tuple_list", "=", "[", "None", "]", "*", "len", "(", "self", ".", "filter_func", ")", "time_write", "=", "time", "(", ")", "# update last write timestamp\r", "self", ".", "_data_conditions", "[", "index", "]", ".", "release", "(", ")", "logging", ".", "debug", "(", "'Stopping writer thread with index %d'", ",", "index", ")" ]
Writer thread continuously calling callback function for writing data when data becomes available.
[ "Writer", "thread", "continuously", "calling", "callback", "function", "for", "writing", "data", "when", "data", "becomes", "available", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fifo_readout.py#L354-L403
SiLab-Bonn/pyBAR
pybar/daq/fifo_readout.py
FifoReadout.get_data_from_buffer
def get_data_from_buffer(self, filter_func=None, converter_func=None): '''Reads local data buffer and returns data and meta data list. Returns ------- data : list List of data and meta data dicts. ''' if self._is_running: raise RuntimeError('Readout thread running') if not self.fill_buffer: logging.warning('Data buffer is not activated') return [convert_data_iterable(data_iterable, filter_func=filter_func, converter_func=converter_func) for data_iterable in self._data_buffer]
python
def get_data_from_buffer(self, filter_func=None, converter_func=None): '''Reads local data buffer and returns data and meta data list. Returns ------- data : list List of data and meta data dicts. ''' if self._is_running: raise RuntimeError('Readout thread running') if not self.fill_buffer: logging.warning('Data buffer is not activated') return [convert_data_iterable(data_iterable, filter_func=filter_func, converter_func=converter_func) for data_iterable in self._data_buffer]
[ "def", "get_data_from_buffer", "(", "self", ",", "filter_func", "=", "None", ",", "converter_func", "=", "None", ")", ":", "if", "self", ".", "_is_running", ":", "raise", "RuntimeError", "(", "'Readout thread running'", ")", "if", "not", "self", ".", "fill_buffer", ":", "logging", ".", "warning", "(", "'Data buffer is not activated'", ")", "return", "[", "convert_data_iterable", "(", "data_iterable", ",", "filter_func", "=", "filter_func", ",", "converter_func", "=", "converter_func", ")", "for", "data_iterable", "in", "self", ".", "_data_buffer", "]" ]
Reads local data buffer and returns data and meta data list. Returns ------- data : list List of data and meta data dicts.
[ "Reads", "local", "data", "buffer", "and", "returns", "data", "and", "meta", "data", "list", ".", "Returns", "-------", "data", ":", "list", "List", "of", "data", "and", "meta", "data", "dicts", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fifo_readout.py#L425-L437
SiLab-Bonn/pyBAR
pybar/daq/fifo_readout.py
FifoReadout.get_raw_data_from_buffer
def get_raw_data_from_buffer(self, filter_func=None, converter_func=None): '''Reads local data buffer and returns raw data array. Returns ------- data : np.array An array containing data words from the local data buffer. ''' if self._is_running: raise RuntimeError('Readout thread running') if not self.fill_buffer: logging.warning('Data buffer is not activated') return [convert_data_array(data_array_from_data_iterable(data_iterable), filter_func=filter_func, converter_func=converter_func) for data_iterable in self._data_buffer]
python
def get_raw_data_from_buffer(self, filter_func=None, converter_func=None): '''Reads local data buffer and returns raw data array. Returns ------- data : np.array An array containing data words from the local data buffer. ''' if self._is_running: raise RuntimeError('Readout thread running') if not self.fill_buffer: logging.warning('Data buffer is not activated') return [convert_data_array(data_array_from_data_iterable(data_iterable), filter_func=filter_func, converter_func=converter_func) for data_iterable in self._data_buffer]
[ "def", "get_raw_data_from_buffer", "(", "self", ",", "filter_func", "=", "None", ",", "converter_func", "=", "None", ")", ":", "if", "self", ".", "_is_running", ":", "raise", "RuntimeError", "(", "'Readout thread running'", ")", "if", "not", "self", ".", "fill_buffer", ":", "logging", ".", "warning", "(", "'Data buffer is not activated'", ")", "return", "[", "convert_data_array", "(", "data_array_from_data_iterable", "(", "data_iterable", ")", ",", "filter_func", "=", "filter_func", ",", "converter_func", "=", "converter_func", ")", "for", "data_iterable", "in", "self", ".", "_data_buffer", "]" ]
Reads local data buffer and returns raw data array. Returns ------- data : np.array An array containing data words from the local data buffer.
[ "Reads", "local", "data", "buffer", "and", "returns", "raw", "data", "array", ".", "Returns", "-------", "data", ":", "np", ".", "array", "An", "array", "containing", "data", "words", "from", "the", "local", "data", "buffer", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fifo_readout.py#L439-L451
SiLab-Bonn/pyBAR
pybar/daq/fifo_readout.py
FifoReadout.read_raw_data_from_fifo
def read_raw_data_from_fifo(self, fifo, filter_func=None, converter_func=None): '''Reads FIFO data and returns raw data array. Returns ------- data : np.array An array containing FIFO data words. ''' return convert_data_array(self.dut[fifo].get_data(), filter_func=filter_func, converter_func=converter_func)
python
def read_raw_data_from_fifo(self, fifo, filter_func=None, converter_func=None): '''Reads FIFO data and returns raw data array. Returns ------- data : np.array An array containing FIFO data words. ''' return convert_data_array(self.dut[fifo].get_data(), filter_func=filter_func, converter_func=converter_func)
[ "def", "read_raw_data_from_fifo", "(", "self", ",", "fifo", ",", "filter_func", "=", "None", ",", "converter_func", "=", "None", ")", ":", "return", "convert_data_array", "(", "self", ".", "dut", "[", "fifo", "]", ".", "get_data", "(", ")", ",", "filter_func", "=", "filter_func", ",", "converter_func", "=", "converter_func", ")" ]
Reads FIFO data and returns raw data array. Returns ------- data : np.array An array containing FIFO data words.
[ "Reads", "FIFO", "data", "and", "returns", "raw", "data", "array", ".", "Returns", "-------", "data", ":", "np", ".", "array", "An", "array", "containing", "FIFO", "data", "words", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/fifo_readout.py#L453-L461
SiLab-Bonn/pyBAR
pybar/utils/utils.py
get_item_from_queue
def get_item_from_queue(Q, timeout=0.01): """ Attempts to retrieve an item from the queue Q. If Q is empty, None is returned. Blocks for 'timeout' seconds in case the queue is empty, so don't use this method for speedy retrieval of multiple items (use get_all_from_queue for that). """ try: item = Q.get(True, 0.01) except Queue.Empty: return None return item
python
def get_item_from_queue(Q, timeout=0.01): """ Attempts to retrieve an item from the queue Q. If Q is empty, None is returned. Blocks for 'timeout' seconds in case the queue is empty, so don't use this method for speedy retrieval of multiple items (use get_all_from_queue for that). """ try: item = Q.get(True, 0.01) except Queue.Empty: return None return item
[ "def", "get_item_from_queue", "(", "Q", ",", "timeout", "=", "0.01", ")", ":", "try", ":", "item", "=", "Q", ".", "get", "(", "True", ",", "0.01", ")", "except", "Queue", ".", "Empty", ":", "return", "None", "return", "item" ]
Attempts to retrieve an item from the queue Q. If Q is empty, None is returned. Blocks for 'timeout' seconds in case the queue is empty, so don't use this method for speedy retrieval of multiple items (use get_all_from_queue for that).
[ "Attempts", "to", "retrieve", "an", "item", "from", "the", "queue", "Q", ".", "If", "Q", "is", "empty", "None", "is", "returned", ".", "Blocks", "for", "timeout", "seconds", "in", "case", "the", "queue", "is", "empty", "so", "don", "t", "use", "this", "method", "for", "speedy", "retrieval", "of", "multiple", "items", "(", "use", "get_all_from_queue", "for", "that", ")", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L38-L51
SiLab-Bonn/pyBAR
pybar/utils/utils.py
argmin_list
def argmin_list(seq, func): """ Return a list of elements of seq[i] with the lowest func(seq[i]) scores. >>> argmin_list(['one', 'to', 'three', 'or'], len) ['to', 'or'] """ best_score, best = func(seq[0]), [] for x in seq: x_score = func(x) if x_score < best_score: best, best_score = [x], x_score elif x_score == best_score: best.append(x) return best
python
def argmin_list(seq, func): """ Return a list of elements of seq[i] with the lowest func(seq[i]) scores. >>> argmin_list(['one', 'to', 'three', 'or'], len) ['to', 'or'] """ best_score, best = func(seq[0]), [] for x in seq: x_score = func(x) if x_score < best_score: best, best_score = [x], x_score elif x_score == best_score: best.append(x) return best
[ "def", "argmin_list", "(", "seq", ",", "func", ")", ":", "best_score", ",", "best", "=", "func", "(", "seq", "[", "0", "]", ")", ",", "[", "]", "for", "x", "in", "seq", ":", "x_score", "=", "func", "(", "x", ")", "if", "x_score", "<", "best_score", ":", "best", ",", "best_score", "=", "[", "x", "]", ",", "x_score", "elif", "x_score", "==", "best_score", ":", "best", ".", "append", "(", "x", ")", "return", "best" ]
Return a list of elements of seq[i] with the lowest func(seq[i]) scores. >>> argmin_list(['one', 'to', 'three', 'or'], len) ['to', 'or']
[ "Return", "a", "list", "of", "elements", "of", "seq", "[", "i", "]", "with", "the", "lowest", "func", "(", "seq", "[", "i", "]", ")", "scores", ".", ">>>", "argmin_list", "(", "[", "one", "to", "three", "or", "]", "len", ")", "[", "to", "or", "]" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L62-L75
SiLab-Bonn/pyBAR
pybar/utils/utils.py
flatten_iterable
def flatten_iterable(iterable): """flatten iterable, but leaves out strings [[[1, 2, 3], [4, 5]], 6] -> [1, 2, 3, 4, 5, 6] """ for item in iterable: if isinstance(item, collections.Iterable) and not isinstance(item, basestring): for sub in flatten_iterable(item): yield sub else: yield item
python
def flatten_iterable(iterable): """flatten iterable, but leaves out strings [[[1, 2, 3], [4, 5]], 6] -> [1, 2, 3, 4, 5, 6] """ for item in iterable: if isinstance(item, collections.Iterable) and not isinstance(item, basestring): for sub in flatten_iterable(item): yield sub else: yield item
[ "def", "flatten_iterable", "(", "iterable", ")", ":", "for", "item", "in", "iterable", ":", "if", "isinstance", "(", "item", ",", "collections", ".", "Iterable", ")", "and", "not", "isinstance", "(", "item", ",", "basestring", ")", ":", "for", "sub", "in", "flatten_iterable", "(", "item", ")", ":", "yield", "sub", "else", ":", "yield", "item" ]
flatten iterable, but leaves out strings [[[1, 2, 3], [4, 5]], 6] -> [1, 2, 3, 4, 5, 6]
[ "flatten", "iterable", "but", "leaves", "out", "strings", "[[[", "1", "2", "3", "]", "[", "4", "5", "]]", "6", "]", "-", ">", "[", "1", "2", "3", "4", "5", "6", "]" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L198-L209
SiLab-Bonn/pyBAR
pybar/utils/utils.py
iterable
def iterable(item): """generate iterable from item, but leaves out strings """ if isinstance(item, collections.Iterable) and not isinstance(item, basestring): return item else: return [item]
python
def iterable(item): """generate iterable from item, but leaves out strings """ if isinstance(item, collections.Iterable) and not isinstance(item, basestring): return item else: return [item]
[ "def", "iterable", "(", "item", ")", ":", "if", "isinstance", "(", "item", ",", "collections", ".", "Iterable", ")", "and", "not", "isinstance", "(", "item", ",", "basestring", ")", ":", "return", "item", "else", ":", "return", "[", "item", "]" ]
generate iterable from item, but leaves out strings
[ "generate", "iterable", "from", "item", "but", "leaves", "out", "strings" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L212-L219
SiLab-Bonn/pyBAR
pybar/utils/utils.py
natsorted
def natsorted(seq, cmp=natcmp): "Returns a copy of seq, sorted by natural string sort." import copy temp = copy.copy(seq) natsort(temp, cmp) return temp
python
def natsorted(seq, cmp=natcmp): "Returns a copy of seq, sorted by natural string sort." import copy temp = copy.copy(seq) natsort(temp, cmp) return temp
[ "def", "natsorted", "(", "seq", ",", "cmp", "=", "natcmp", ")", ":", "import", "copy", "temp", "=", "copy", ".", "copy", "(", "seq", ")", "natsort", "(", "temp", ",", "cmp", ")", "return", "temp" ]
Returns a copy of seq, sorted by natural string sort.
[ "Returns", "a", "copy", "of", "seq", "sorted", "by", "natural", "string", "sort", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L258-L263
SiLab-Bonn/pyBAR
pybar/utils/utils.py
get_iso_time
def get_iso_time(): '''returns time as ISO string, mapping to and from datetime in ugly way convert to string with str() ''' t1 = time.time() t2 = datetime.datetime.fromtimestamp(t1) t4 = t2.__str__() try: t4a, t4b = t4.split(".", 1) except ValueError: t4a = t4 t4b = '000000' t5 = datetime.datetime.strptime(t4a, "%Y-%m-%d %H:%M:%S") ms = int(t4b.ljust(6, '0')[:6]) return t5.replace(microsecond=ms)
python
def get_iso_time(): '''returns time as ISO string, mapping to and from datetime in ugly way convert to string with str() ''' t1 = time.time() t2 = datetime.datetime.fromtimestamp(t1) t4 = t2.__str__() try: t4a, t4b = t4.split(".", 1) except ValueError: t4a = t4 t4b = '000000' t5 = datetime.datetime.strptime(t4a, "%Y-%m-%d %H:%M:%S") ms = int(t4b.ljust(6, '0')[:6]) return t5.replace(microsecond=ms)
[ "def", "get_iso_time", "(", ")", ":", "t1", "=", "time", ".", "time", "(", ")", "t2", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "t1", ")", "t4", "=", "t2", ".", "__str__", "(", ")", "try", ":", "t4a", ",", "t4b", "=", "t4", ".", "split", "(", "\".\"", ",", "1", ")", "except", "ValueError", ":", "t4a", "=", "t4", "t4b", "=", "'000000'", "t5", "=", "datetime", ".", "datetime", ".", "strptime", "(", "t4a", ",", "\"%Y-%m-%d %H:%M:%S\"", ")", "ms", "=", "int", "(", "t4b", ".", "ljust", "(", "6", ",", "'0'", ")", "[", ":", "6", "]", ")", "return", "t5", ".", "replace", "(", "microsecond", "=", "ms", ")" ]
returns time as ISO string, mapping to and from datetime in ugly way convert to string with str()
[ "returns", "time", "as", "ISO", "string", "mapping", "to", "and", "from", "datetime", "in", "ugly", "way", "convert", "to", "string", "with", "str", "()" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L267-L282
SiLab-Bonn/pyBAR
pybar/utils/utils.py
get_float_time
def get_float_time(): '''returns time as double precision floats - Time64 in pytables - mapping to and from python datetime's ''' t1 = time.time() t2 = datetime.datetime.fromtimestamp(t1) return time.mktime(t2.timetuple()) + 1e-6 * t2.microsecond
python
def get_float_time(): '''returns time as double precision floats - Time64 in pytables - mapping to and from python datetime's ''' t1 = time.time() t2 = datetime.datetime.fromtimestamp(t1) return time.mktime(t2.timetuple()) + 1e-6 * t2.microsecond
[ "def", "get_float_time", "(", ")", ":", "t1", "=", "time", ".", "time", "(", ")", "t2", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "t1", ")", "return", "time", ".", "mktime", "(", "t2", ".", "timetuple", "(", ")", ")", "+", "1e-6", "*", "t2", ".", "microsecond" ]
returns time as double precision floats - Time64 in pytables - mapping to and from python datetime's
[ "returns", "time", "as", "double", "precision", "floats", "-", "Time64", "in", "pytables", "-", "mapping", "to", "and", "from", "python", "datetime", "s" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L285-L291
SiLab-Bonn/pyBAR
pybar/utils/utils.py
groupby_dict
def groupby_dict(dictionary, key): ''' Group dict of dicts by key. ''' return dict((k, list(g)) for k, g in itertools.groupby(sorted(dictionary.keys(), key=lambda name: dictionary[name][key]), key=lambda name: dictionary[name][key]))
python
def groupby_dict(dictionary, key): ''' Group dict of dicts by key. ''' return dict((k, list(g)) for k, g in itertools.groupby(sorted(dictionary.keys(), key=lambda name: dictionary[name][key]), key=lambda name: dictionary[name][key]))
[ "def", "groupby_dict", "(", "dictionary", ",", "key", ")", ":", "return", "dict", "(", "(", "k", ",", "list", "(", "g", ")", ")", "for", "k", ",", "g", "in", "itertools", ".", "groupby", "(", "sorted", "(", "dictionary", ".", "keys", "(", ")", ",", "key", "=", "lambda", "name", ":", "dictionary", "[", "name", "]", "[", "key", "]", ")", ",", "key", "=", "lambda", "name", ":", "dictionary", "[", "name", "]", "[", "key", "]", ")", ")" ]
Group dict of dicts by key.
[ "Group", "dict", "of", "dicts", "by", "key", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L313-L316
SiLab-Bonn/pyBAR
pybar/utils/utils.py
zip_nofill
def zip_nofill(*iterables): '''Zipping iterables without fillvalue. Note: https://stackoverflow.com/questions/38054593/zip-longest-without-fillvalue ''' return (tuple([entry for entry in iterable if entry is not None]) for iterable in itertools.izip_longest(*iterables, fillvalue=None))
python
def zip_nofill(*iterables): '''Zipping iterables without fillvalue. Note: https://stackoverflow.com/questions/38054593/zip-longest-without-fillvalue ''' return (tuple([entry for entry in iterable if entry is not None]) for iterable in itertools.izip_longest(*iterables, fillvalue=None))
[ "def", "zip_nofill", "(", "*", "iterables", ")", ":", "return", "(", "tuple", "(", "[", "entry", "for", "entry", "in", "iterable", "if", "entry", "is", "not", "None", "]", ")", "for", "iterable", "in", "itertools", ".", "izip_longest", "(", "*", "iterables", ",", "fillvalue", "=", "None", ")", ")" ]
Zipping iterables without fillvalue. Note: https://stackoverflow.com/questions/38054593/zip-longest-without-fillvalue
[ "Zipping", "iterables", "without", "fillvalue", ".", "Note", ":", "https", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "38054593", "/", "zip", "-", "longest", "-", "without", "-", "fillvalue" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L334-L339
SiLab-Bonn/pyBAR
pybar/utils/utils.py
find_file_dir_up
def find_file_dir_up(filename, path=None, n=None): '''Finding file in directory upwards. ''' if path is None: path = os.getcwd() i = 0 while True: current_path = path for _ in range(i): current_path = os.path.split(current_path)[0] if os.path.isfile(os.path.join(current_path, filename)): # found file and return return os.path.join(current_path, filename) elif os.path.dirname(current_path) == current_path: # root of filesystem return elif n is not None and i == n: return else: # file not found i += 1 continue
python
def find_file_dir_up(filename, path=None, n=None): '''Finding file in directory upwards. ''' if path is None: path = os.getcwd() i = 0 while True: current_path = path for _ in range(i): current_path = os.path.split(current_path)[0] if os.path.isfile(os.path.join(current_path, filename)): # found file and return return os.path.join(current_path, filename) elif os.path.dirname(current_path) == current_path: # root of filesystem return elif n is not None and i == n: return else: # file not found i += 1 continue
[ "def", "find_file_dir_up", "(", "filename", ",", "path", "=", "None", ",", "n", "=", "None", ")", ":", "if", "path", "is", "None", ":", "path", "=", "os", ".", "getcwd", "(", ")", "i", "=", "0", "while", "True", ":", "current_path", "=", "path", "for", "_", "in", "range", "(", "i", ")", ":", "current_path", "=", "os", ".", "path", ".", "split", "(", "current_path", ")", "[", "0", "]", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "current_path", ",", "filename", ")", ")", ":", "# found file and return\r", "return", "os", ".", "path", ".", "join", "(", "current_path", ",", "filename", ")", "elif", "os", ".", "path", ".", "dirname", "(", "current_path", ")", "==", "current_path", ":", "# root of filesystem\r", "return", "elif", "n", "is", "not", "None", "and", "i", "==", "n", ":", "return", "else", ":", "# file not found\r", "i", "+=", "1", "continue" ]
Finding file in directory upwards.
[ "Finding", "file", "in", "directory", "upwards", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/utils/utils.py#L342-L360
SiLab-Bonn/pyBAR
pybar/fei4/register.py
load_configuration_from_text_file
def load_configuration_from_text_file(register, configuration_file): '''Loading configuration from text files to register object Parameters ---------- register : pybar.fei4.register object configuration_file : string Full path (directory and filename) of the configuration file. If name is not given, reload configuration from file. ''' logging.info("Loading configuration: %s" % configuration_file) register.configuration_file = configuration_file config_dict = parse_global_config(register.configuration_file) if 'Flavor' in config_dict: flavor = config_dict.pop('Flavor').lower() if register.flavor: pass else: register.init_fe_type(flavor) else: if register.flavor: pass else: raise ValueError('Flavor not specified') if 'Chip_ID' in config_dict: chip_id = config_dict.pop('Chip_ID') if register.chip_address: pass else: register.set_chip_address(chip_address=chip_id & 0x7, broadcast=True if chip_id & 0x8 else False) elif 'Chip_Address' in config_dict: chip_address = config_dict.pop('Chip_Address') if register.chip_address: pass else: register.set_chip_address(chip_address) else: if register.chip_id_initialized: pass else: raise ValueError('Chip address not specified') global_registers_configured = [] pixel_registers_configured = [] for key in config_dict.keys(): value = config_dict.pop(key) if key in register.global_registers: register.set_global_register_value(key, value) global_registers_configured.append(key) elif key in register.pixel_registers: register.set_pixel_register_value(key, value) pixel_registers_configured.append(key) elif key in register.calibration_parameters: register.calibration_parameters[key] = value else: register.miscellaneous[key] = value global_registers = register.get_global_register_attributes('name', readonly=False) pixel_registers = register.pixel_registers.keys() global_registers_not_configured = set(global_registers).difference(global_registers_configured) pixel_registers_not_configured = set(pixel_registers).difference(pixel_registers_configured) if global_registers_not_configured: logging.warning("Following global register(s) not configured: {}".format(', '.join('\'' + reg + '\'' for reg in global_registers_not_configured))) if pixel_registers_not_configured: logging.warning("Following pixel register(s) not configured: {}".format(', '.join('\'' + reg + '\'' for reg in pixel_registers_not_configured))) if register.miscellaneous: logging.warning("Found following unknown parameter(s): {}".format(', '.join('\'' + parameter + '\'' for parameter in register.miscellaneous.iterkeys())))
python
def load_configuration_from_text_file(register, configuration_file): '''Loading configuration from text files to register object Parameters ---------- register : pybar.fei4.register object configuration_file : string Full path (directory and filename) of the configuration file. If name is not given, reload configuration from file. ''' logging.info("Loading configuration: %s" % configuration_file) register.configuration_file = configuration_file config_dict = parse_global_config(register.configuration_file) if 'Flavor' in config_dict: flavor = config_dict.pop('Flavor').lower() if register.flavor: pass else: register.init_fe_type(flavor) else: if register.flavor: pass else: raise ValueError('Flavor not specified') if 'Chip_ID' in config_dict: chip_id = config_dict.pop('Chip_ID') if register.chip_address: pass else: register.set_chip_address(chip_address=chip_id & 0x7, broadcast=True if chip_id & 0x8 else False) elif 'Chip_Address' in config_dict: chip_address = config_dict.pop('Chip_Address') if register.chip_address: pass else: register.set_chip_address(chip_address) else: if register.chip_id_initialized: pass else: raise ValueError('Chip address not specified') global_registers_configured = [] pixel_registers_configured = [] for key in config_dict.keys(): value = config_dict.pop(key) if key in register.global_registers: register.set_global_register_value(key, value) global_registers_configured.append(key) elif key in register.pixel_registers: register.set_pixel_register_value(key, value) pixel_registers_configured.append(key) elif key in register.calibration_parameters: register.calibration_parameters[key] = value else: register.miscellaneous[key] = value global_registers = register.get_global_register_attributes('name', readonly=False) pixel_registers = register.pixel_registers.keys() global_registers_not_configured = set(global_registers).difference(global_registers_configured) pixel_registers_not_configured = set(pixel_registers).difference(pixel_registers_configured) if global_registers_not_configured: logging.warning("Following global register(s) not configured: {}".format(', '.join('\'' + reg + '\'' for reg in global_registers_not_configured))) if pixel_registers_not_configured: logging.warning("Following pixel register(s) not configured: {}".format(', '.join('\'' + reg + '\'' for reg in pixel_registers_not_configured))) if register.miscellaneous: logging.warning("Found following unknown parameter(s): {}".format(', '.join('\'' + parameter + '\'' for parameter in register.miscellaneous.iterkeys())))
[ "def", "load_configuration_from_text_file", "(", "register", ",", "configuration_file", ")", ":", "logging", ".", "info", "(", "\"Loading configuration: %s\"", "%", "configuration_file", ")", "register", ".", "configuration_file", "=", "configuration_file", "config_dict", "=", "parse_global_config", "(", "register", ".", "configuration_file", ")", "if", "'Flavor'", "in", "config_dict", ":", "flavor", "=", "config_dict", ".", "pop", "(", "'Flavor'", ")", ".", "lower", "(", ")", "if", "register", ".", "flavor", ":", "pass", "else", ":", "register", ".", "init_fe_type", "(", "flavor", ")", "else", ":", "if", "register", ".", "flavor", ":", "pass", "else", ":", "raise", "ValueError", "(", "'Flavor not specified'", ")", "if", "'Chip_ID'", "in", "config_dict", ":", "chip_id", "=", "config_dict", ".", "pop", "(", "'Chip_ID'", ")", "if", "register", ".", "chip_address", ":", "pass", "else", ":", "register", ".", "set_chip_address", "(", "chip_address", "=", "chip_id", "&", "0x7", ",", "broadcast", "=", "True", "if", "chip_id", "&", "0x8", "else", "False", ")", "elif", "'Chip_Address'", "in", "config_dict", ":", "chip_address", "=", "config_dict", ".", "pop", "(", "'Chip_Address'", ")", "if", "register", ".", "chip_address", ":", "pass", "else", ":", "register", ".", "set_chip_address", "(", "chip_address", ")", "else", ":", "if", "register", ".", "chip_id_initialized", ":", "pass", "else", ":", "raise", "ValueError", "(", "'Chip address not specified'", ")", "global_registers_configured", "=", "[", "]", "pixel_registers_configured", "=", "[", "]", "for", "key", "in", "config_dict", ".", "keys", "(", ")", ":", "value", "=", "config_dict", ".", "pop", "(", "key", ")", "if", "key", "in", "register", ".", "global_registers", ":", "register", ".", "set_global_register_value", "(", "key", ",", "value", ")", "global_registers_configured", ".", "append", "(", "key", ")", "elif", "key", "in", "register", ".", "pixel_registers", ":", "register", ".", "set_pixel_register_value", "(", "key", ",", "value", ")", "pixel_registers_configured", ".", "append", "(", "key", ")", "elif", "key", "in", "register", ".", "calibration_parameters", ":", "register", ".", "calibration_parameters", "[", "key", "]", "=", "value", "else", ":", "register", ".", "miscellaneous", "[", "key", "]", "=", "value", "global_registers", "=", "register", ".", "get_global_register_attributes", "(", "'name'", ",", "readonly", "=", "False", ")", "pixel_registers", "=", "register", ".", "pixel_registers", ".", "keys", "(", ")", "global_registers_not_configured", "=", "set", "(", "global_registers", ")", ".", "difference", "(", "global_registers_configured", ")", "pixel_registers_not_configured", "=", "set", "(", "pixel_registers", ")", ".", "difference", "(", "pixel_registers_configured", ")", "if", "global_registers_not_configured", ":", "logging", ".", "warning", "(", "\"Following global register(s) not configured: {}\"", ".", "format", "(", "', '", ".", "join", "(", "'\\''", "+", "reg", "+", "'\\''", "for", "reg", "in", "global_registers_not_configured", ")", ")", ")", "if", "pixel_registers_not_configured", ":", "logging", ".", "warning", "(", "\"Following pixel register(s) not configured: {}\"", ".", "format", "(", "', '", ".", "join", "(", "'\\''", "+", "reg", "+", "'\\''", "for", "reg", "in", "pixel_registers_not_configured", ")", ")", ")", "if", "register", ".", "miscellaneous", ":", "logging", ".", "warning", "(", "\"Found following unknown parameter(s): {}\"", ".", "format", "(", "', '", ".", "join", "(", "'\\''", "+", "parameter", "+", "'\\''", "for", "parameter", "in", "register", ".", "miscellaneous", ".", "iterkeys", "(", ")", ")", ")", ")" ]
Loading configuration from text files to register object Parameters ---------- register : pybar.fei4.register object configuration_file : string Full path (directory and filename) of the configuration file. If name is not given, reload configuration from file.
[ "Loading", "configuration", "from", "text", "files", "to", "register", "object", "Parameters", "----------", "register", ":", "pybar", ".", "fei4", ".", "register", "object", "configuration_file", ":", "string", "Full", "path", "(", "directory", "and", "filename", ")", "of", "the", "configuration", "file", ".", "If", "name", "is", "not", "given", "reload", "configuration", "from", "file", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L715-L781
SiLab-Bonn/pyBAR
pybar/fei4/register.py
load_configuration_from_hdf5
def load_configuration_from_hdf5(register, configuration_file, node=''): '''Loading configuration from HDF5 file to register object Parameters ---------- register : pybar.fei4.register object configuration_file : string, file Filename of the HDF5 configuration file or file object. node : string Additional identifier (subgroup). Useful when more than one configuration is stored inside a HDF5 file. ''' def load_conf(): logging.info("Loading configuration: %s" % h5_file.filename) register.configuration_file = h5_file.filename if node: configuration_group = h5_file.root.configuration.node else: configuration_group = h5_file.root.configuration # miscellaneous for row in configuration_group.miscellaneous: name = row['name'] try: value = literal_eval(row['value']) except ValueError: value = row['value'] if name == 'Flavor': if register.flavor: pass else: register.init_fe_type(value) elif name == 'Chip_ID': if register.chip_address: pass else: register.set_chip_address(chip_address=value & 0x7, broadcast=True if value & 0x8 else False) elif name == 'Chip_Address': if register.chip_address: pass else: register.set_chip_address(chip_address=value, broadcast=False) else: register.miscellaneous[name] = value if register.flavor: pass else: raise ValueError('Flavor not specified') if register.chip_id_initialized: pass else: raise ValueError('Chip address not specified') # calibration parameters for row in configuration_group.calibration_parameters: name = row['name'] value = row['value'] register.calibration_parameters[name] = literal_eval(value) # global for row in configuration_group.global_register: name = row['name'] value = row['value'] register.set_global_register_value(name, literal_eval(value)) # pixels for pixel_reg in h5_file.iter_nodes(configuration_group, 'CArray'): # ['Enable', 'TDAC', 'C_High', 'C_Low', 'Imon', 'FDAC', 'EnableDigInj']: if pixel_reg.name in register.pixel_registers: register.set_pixel_register_value(pixel_reg.name, np.asarray(pixel_reg).T) # np.asarray(h5_file.get_node(configuration_group, name=pixel_reg)).T if isinstance(configuration_file, tb.file.File): h5_file = configuration_file load_conf() else: with tb.open_file(configuration_file, mode="r", title='') as h5_file: load_conf()
python
def load_configuration_from_hdf5(register, configuration_file, node=''): '''Loading configuration from HDF5 file to register object Parameters ---------- register : pybar.fei4.register object configuration_file : string, file Filename of the HDF5 configuration file or file object. node : string Additional identifier (subgroup). Useful when more than one configuration is stored inside a HDF5 file. ''' def load_conf(): logging.info("Loading configuration: %s" % h5_file.filename) register.configuration_file = h5_file.filename if node: configuration_group = h5_file.root.configuration.node else: configuration_group = h5_file.root.configuration # miscellaneous for row in configuration_group.miscellaneous: name = row['name'] try: value = literal_eval(row['value']) except ValueError: value = row['value'] if name == 'Flavor': if register.flavor: pass else: register.init_fe_type(value) elif name == 'Chip_ID': if register.chip_address: pass else: register.set_chip_address(chip_address=value & 0x7, broadcast=True if value & 0x8 else False) elif name == 'Chip_Address': if register.chip_address: pass else: register.set_chip_address(chip_address=value, broadcast=False) else: register.miscellaneous[name] = value if register.flavor: pass else: raise ValueError('Flavor not specified') if register.chip_id_initialized: pass else: raise ValueError('Chip address not specified') # calibration parameters for row in configuration_group.calibration_parameters: name = row['name'] value = row['value'] register.calibration_parameters[name] = literal_eval(value) # global for row in configuration_group.global_register: name = row['name'] value = row['value'] register.set_global_register_value(name, literal_eval(value)) # pixels for pixel_reg in h5_file.iter_nodes(configuration_group, 'CArray'): # ['Enable', 'TDAC', 'C_High', 'C_Low', 'Imon', 'FDAC', 'EnableDigInj']: if pixel_reg.name in register.pixel_registers: register.set_pixel_register_value(pixel_reg.name, np.asarray(pixel_reg).T) # np.asarray(h5_file.get_node(configuration_group, name=pixel_reg)).T if isinstance(configuration_file, tb.file.File): h5_file = configuration_file load_conf() else: with tb.open_file(configuration_file, mode="r", title='') as h5_file: load_conf()
[ "def", "load_configuration_from_hdf5", "(", "register", ",", "configuration_file", ",", "node", "=", "''", ")", ":", "def", "load_conf", "(", ")", ":", "logging", ".", "info", "(", "\"Loading configuration: %s\"", "%", "h5_file", ".", "filename", ")", "register", ".", "configuration_file", "=", "h5_file", ".", "filename", "if", "node", ":", "configuration_group", "=", "h5_file", ".", "root", ".", "configuration", ".", "node", "else", ":", "configuration_group", "=", "h5_file", ".", "root", ".", "configuration", "# miscellaneous\r", "for", "row", "in", "configuration_group", ".", "miscellaneous", ":", "name", "=", "row", "[", "'name'", "]", "try", ":", "value", "=", "literal_eval", "(", "row", "[", "'value'", "]", ")", "except", "ValueError", ":", "value", "=", "row", "[", "'value'", "]", "if", "name", "==", "'Flavor'", ":", "if", "register", ".", "flavor", ":", "pass", "else", ":", "register", ".", "init_fe_type", "(", "value", ")", "elif", "name", "==", "'Chip_ID'", ":", "if", "register", ".", "chip_address", ":", "pass", "else", ":", "register", ".", "set_chip_address", "(", "chip_address", "=", "value", "&", "0x7", ",", "broadcast", "=", "True", "if", "value", "&", "0x8", "else", "False", ")", "elif", "name", "==", "'Chip_Address'", ":", "if", "register", ".", "chip_address", ":", "pass", "else", ":", "register", ".", "set_chip_address", "(", "chip_address", "=", "value", ",", "broadcast", "=", "False", ")", "else", ":", "register", ".", "miscellaneous", "[", "name", "]", "=", "value", "if", "register", ".", "flavor", ":", "pass", "else", ":", "raise", "ValueError", "(", "'Flavor not specified'", ")", "if", "register", ".", "chip_id_initialized", ":", "pass", "else", ":", "raise", "ValueError", "(", "'Chip address not specified'", ")", "# calibration parameters\r", "for", "row", "in", "configuration_group", ".", "calibration_parameters", ":", "name", "=", "row", "[", "'name'", "]", "value", "=", "row", "[", "'value'", "]", "register", ".", "calibration_parameters", "[", "name", "]", "=", "literal_eval", "(", "value", ")", "# global\r", "for", "row", "in", "configuration_group", ".", "global_register", ":", "name", "=", "row", "[", "'name'", "]", "value", "=", "row", "[", "'value'", "]", "register", ".", "set_global_register_value", "(", "name", ",", "literal_eval", "(", "value", ")", ")", "# pixels\r", "for", "pixel_reg", "in", "h5_file", ".", "iter_nodes", "(", "configuration_group", ",", "'CArray'", ")", ":", "# ['Enable', 'TDAC', 'C_High', 'C_Low', 'Imon', 'FDAC', 'EnableDigInj']:\r", "if", "pixel_reg", ".", "name", "in", "register", ".", "pixel_registers", ":", "register", ".", "set_pixel_register_value", "(", "pixel_reg", ".", "name", ",", "np", ".", "asarray", "(", "pixel_reg", ")", ".", "T", ")", "# np.asarray(h5_file.get_node(configuration_group, name=pixel_reg)).T\r", "if", "isinstance", "(", "configuration_file", ",", "tb", ".", "file", ".", "File", ")", ":", "h5_file", "=", "configuration_file", "load_conf", "(", ")", "else", ":", "with", "tb", ".", "open_file", "(", "configuration_file", ",", "mode", "=", "\"r\"", ",", "title", "=", "''", ")", "as", "h5_file", ":", "load_conf", "(", ")" ]
Loading configuration from HDF5 file to register object Parameters ---------- register : pybar.fei4.register object configuration_file : string, file Filename of the HDF5 configuration file or file object. node : string Additional identifier (subgroup). Useful when more than one configuration is stored inside a HDF5 file.
[ "Loading", "configuration", "from", "HDF5", "file", "to", "register", "object", "Parameters", "----------", "register", ":", "pybar", ".", "fei4", ".", "register", "object", "configuration_file", ":", "string", "file", "Filename", "of", "the", "HDF5", "configuration", "file", "or", "file", "object", ".", "node", ":", "string", "Additional", "identifier", "(", "subgroup", ")", ".", "Useful", "when", "more", "than", "one", "configuration", "is", "stored", "inside", "a", "HDF5", "file", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L784-L860
SiLab-Bonn/pyBAR
pybar/fei4/register.py
save_configuration_to_text_file
def save_configuration_to_text_file(register, configuration_file): '''Saving configuration to text files from register object Parameters ---------- register : pybar.fei4.register object configuration_file : string Filename of the configuration file. ''' configuration_path, filename = os.path.split(configuration_file) if os.path.split(configuration_path)[1] == 'configs': configuration_path = os.path.split(configuration_path)[0] filename = os.path.splitext(filename)[0].strip() register.configuration_file = os.path.join(os.path.join(configuration_path, 'configs'), filename + ".cfg") if os.path.isfile(register.configuration_file): logging.warning("Overwriting configuration: %s", register.configuration_file) else: logging.info("Saving configuration: %s" % register.configuration_file) pixel_reg_dict = {} for path in ["tdacs", "fdacs", "masks", "configs"]: configuration_file_path = os.path.join(configuration_path, path) if not os.path.exists(configuration_file_path): os.makedirs(configuration_file_path) if path == "tdacs": dac = register.get_pixel_register_objects(name="TDAC")[0] dac_config_path = os.path.join(configuration_file_path, "_".join([dac['name'].lower(), filename]) + ".dat") write_pixel_dac_config(dac_config_path, dac['value']) pixel_reg_dict[dac['name']] = os.path.relpath(dac_config_path, os.path.dirname(register.configuration_file)) elif path == "fdacs": dac = register.get_pixel_register_objects(name="FDAC")[0] dac_config_path = os.path.join(configuration_file_path, "_".join([dac['name'].lower(), filename]) + ".dat") write_pixel_dac_config(dac_config_path, dac['value']) pixel_reg_dict[dac['name']] = os.path.relpath(dac_config_path, os.path.dirname(register.configuration_file)) elif path == "masks": masks = register.get_pixel_register_objects(bitlength=1) for mask in masks: dac_config_path = os.path.join(configuration_file_path, "_".join([mask['name'].lower(), filename]) + ".dat") write_pixel_mask_config(dac_config_path, mask['value']) pixel_reg_dict[mask['name']] = os.path.relpath(dac_config_path, os.path.dirname(register.configuration_file)) elif path == "configs": with open(register.configuration_file, 'w') as f: lines = [] lines.append("# FEI4 Flavor\n") lines.append('%s %s\n' % ('Flavor', register.flavor)) lines.append("\n# FEI4 Chip ID\n") lines.append('%s %d\n' % ('Chip_ID', register.chip_id)) lines.append("\n# FEI4 Global Registers\n") global_regs = register.get_global_register_objects(readonly=False) for global_reg in sorted(global_regs, key=itemgetter('name')): lines.append('%s %d\n' % (global_reg['name'], global_reg['value'])) lines.append("\n# FEI4 Pixel Registers\n") for key in sorted(pixel_reg_dict): lines.append('%s %s\n' % (key, pixel_reg_dict[key])) lines.append("\n# FEI4 Calibration Parameters\n") for key in register.calibration_parameters: if register.calibration_parameters[key] is None: lines.append('%s %s\n' % (key, register.calibration_parameters[key])) elif isinstance(register.calibration_parameters[key], (float, int, long)): lines.append('%s %s\n' % (key, round(register.calibration_parameters[key], 4))) elif isinstance(register.calibration_parameters[key], list): lines.append('%s %s\n' % (key, [round(elem, 2) for elem in register.calibration_parameters[key]])) else: raise ValueError('type %s not supported' % type(register.calibration_parameters[key])) if register.miscellaneous: lines.append("\n# Miscellaneous\n") for key, value in register.miscellaneous.iteritems(): lines.append('%s %s\n' % (key, value)) f.writelines(lines)
python
def save_configuration_to_text_file(register, configuration_file): '''Saving configuration to text files from register object Parameters ---------- register : pybar.fei4.register object configuration_file : string Filename of the configuration file. ''' configuration_path, filename = os.path.split(configuration_file) if os.path.split(configuration_path)[1] == 'configs': configuration_path = os.path.split(configuration_path)[0] filename = os.path.splitext(filename)[0].strip() register.configuration_file = os.path.join(os.path.join(configuration_path, 'configs'), filename + ".cfg") if os.path.isfile(register.configuration_file): logging.warning("Overwriting configuration: %s", register.configuration_file) else: logging.info("Saving configuration: %s" % register.configuration_file) pixel_reg_dict = {} for path in ["tdacs", "fdacs", "masks", "configs"]: configuration_file_path = os.path.join(configuration_path, path) if not os.path.exists(configuration_file_path): os.makedirs(configuration_file_path) if path == "tdacs": dac = register.get_pixel_register_objects(name="TDAC")[0] dac_config_path = os.path.join(configuration_file_path, "_".join([dac['name'].lower(), filename]) + ".dat") write_pixel_dac_config(dac_config_path, dac['value']) pixel_reg_dict[dac['name']] = os.path.relpath(dac_config_path, os.path.dirname(register.configuration_file)) elif path == "fdacs": dac = register.get_pixel_register_objects(name="FDAC")[0] dac_config_path = os.path.join(configuration_file_path, "_".join([dac['name'].lower(), filename]) + ".dat") write_pixel_dac_config(dac_config_path, dac['value']) pixel_reg_dict[dac['name']] = os.path.relpath(dac_config_path, os.path.dirname(register.configuration_file)) elif path == "masks": masks = register.get_pixel_register_objects(bitlength=1) for mask in masks: dac_config_path = os.path.join(configuration_file_path, "_".join([mask['name'].lower(), filename]) + ".dat") write_pixel_mask_config(dac_config_path, mask['value']) pixel_reg_dict[mask['name']] = os.path.relpath(dac_config_path, os.path.dirname(register.configuration_file)) elif path == "configs": with open(register.configuration_file, 'w') as f: lines = [] lines.append("# FEI4 Flavor\n") lines.append('%s %s\n' % ('Flavor', register.flavor)) lines.append("\n# FEI4 Chip ID\n") lines.append('%s %d\n' % ('Chip_ID', register.chip_id)) lines.append("\n# FEI4 Global Registers\n") global_regs = register.get_global_register_objects(readonly=False) for global_reg in sorted(global_regs, key=itemgetter('name')): lines.append('%s %d\n' % (global_reg['name'], global_reg['value'])) lines.append("\n# FEI4 Pixel Registers\n") for key in sorted(pixel_reg_dict): lines.append('%s %s\n' % (key, pixel_reg_dict[key])) lines.append("\n# FEI4 Calibration Parameters\n") for key in register.calibration_parameters: if register.calibration_parameters[key] is None: lines.append('%s %s\n' % (key, register.calibration_parameters[key])) elif isinstance(register.calibration_parameters[key], (float, int, long)): lines.append('%s %s\n' % (key, round(register.calibration_parameters[key], 4))) elif isinstance(register.calibration_parameters[key], list): lines.append('%s %s\n' % (key, [round(elem, 2) for elem in register.calibration_parameters[key]])) else: raise ValueError('type %s not supported' % type(register.calibration_parameters[key])) if register.miscellaneous: lines.append("\n# Miscellaneous\n") for key, value in register.miscellaneous.iteritems(): lines.append('%s %s\n' % (key, value)) f.writelines(lines)
[ "def", "save_configuration_to_text_file", "(", "register", ",", "configuration_file", ")", ":", "configuration_path", ",", "filename", "=", "os", ".", "path", ".", "split", "(", "configuration_file", ")", "if", "os", ".", "path", ".", "split", "(", "configuration_path", ")", "[", "1", "]", "==", "'configs'", ":", "configuration_path", "=", "os", ".", "path", ".", "split", "(", "configuration_path", ")", "[", "0", "]", "filename", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "0", "]", ".", "strip", "(", ")", "register", ".", "configuration_file", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "join", "(", "configuration_path", ",", "'configs'", ")", ",", "filename", "+", "\".cfg\"", ")", "if", "os", ".", "path", ".", "isfile", "(", "register", ".", "configuration_file", ")", ":", "logging", ".", "warning", "(", "\"Overwriting configuration: %s\"", ",", "register", ".", "configuration_file", ")", "else", ":", "logging", ".", "info", "(", "\"Saving configuration: %s\"", "%", "register", ".", "configuration_file", ")", "pixel_reg_dict", "=", "{", "}", "for", "path", "in", "[", "\"tdacs\"", ",", "\"fdacs\"", ",", "\"masks\"", ",", "\"configs\"", "]", ":", "configuration_file_path", "=", "os", ".", "path", ".", "join", "(", "configuration_path", ",", "path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "configuration_file_path", ")", ":", "os", ".", "makedirs", "(", "configuration_file_path", ")", "if", "path", "==", "\"tdacs\"", ":", "dac", "=", "register", ".", "get_pixel_register_objects", "(", "name", "=", "\"TDAC\"", ")", "[", "0", "]", "dac_config_path", "=", "os", ".", "path", ".", "join", "(", "configuration_file_path", ",", "\"_\"", ".", "join", "(", "[", "dac", "[", "'name'", "]", ".", "lower", "(", ")", ",", "filename", "]", ")", "+", "\".dat\"", ")", "write_pixel_dac_config", "(", "dac_config_path", ",", "dac", "[", "'value'", "]", ")", "pixel_reg_dict", "[", "dac", "[", "'name'", "]", "]", "=", "os", ".", "path", ".", "relpath", "(", "dac_config_path", ",", "os", ".", "path", ".", "dirname", "(", "register", ".", "configuration_file", ")", ")", "elif", "path", "==", "\"fdacs\"", ":", "dac", "=", "register", ".", "get_pixel_register_objects", "(", "name", "=", "\"FDAC\"", ")", "[", "0", "]", "dac_config_path", "=", "os", ".", "path", ".", "join", "(", "configuration_file_path", ",", "\"_\"", ".", "join", "(", "[", "dac", "[", "'name'", "]", ".", "lower", "(", ")", ",", "filename", "]", ")", "+", "\".dat\"", ")", "write_pixel_dac_config", "(", "dac_config_path", ",", "dac", "[", "'value'", "]", ")", "pixel_reg_dict", "[", "dac", "[", "'name'", "]", "]", "=", "os", ".", "path", ".", "relpath", "(", "dac_config_path", ",", "os", ".", "path", ".", "dirname", "(", "register", ".", "configuration_file", ")", ")", "elif", "path", "==", "\"masks\"", ":", "masks", "=", "register", ".", "get_pixel_register_objects", "(", "bitlength", "=", "1", ")", "for", "mask", "in", "masks", ":", "dac_config_path", "=", "os", ".", "path", ".", "join", "(", "configuration_file_path", ",", "\"_\"", ".", "join", "(", "[", "mask", "[", "'name'", "]", ".", "lower", "(", ")", ",", "filename", "]", ")", "+", "\".dat\"", ")", "write_pixel_mask_config", "(", "dac_config_path", ",", "mask", "[", "'value'", "]", ")", "pixel_reg_dict", "[", "mask", "[", "'name'", "]", "]", "=", "os", ".", "path", ".", "relpath", "(", "dac_config_path", ",", "os", ".", "path", ".", "dirname", "(", "register", ".", "configuration_file", ")", ")", "elif", "path", "==", "\"configs\"", ":", "with", "open", "(", "register", ".", "configuration_file", ",", "'w'", ")", "as", "f", ":", "lines", "=", "[", "]", "lines", ".", "append", "(", "\"# FEI4 Flavor\\n\"", ")", "lines", ".", "append", "(", "'%s %s\\n'", "%", "(", "'Flavor'", ",", "register", ".", "flavor", ")", ")", "lines", ".", "append", "(", "\"\\n# FEI4 Chip ID\\n\"", ")", "lines", ".", "append", "(", "'%s %d\\n'", "%", "(", "'Chip_ID'", ",", "register", ".", "chip_id", ")", ")", "lines", ".", "append", "(", "\"\\n# FEI4 Global Registers\\n\"", ")", "global_regs", "=", "register", ".", "get_global_register_objects", "(", "readonly", "=", "False", ")", "for", "global_reg", "in", "sorted", "(", "global_regs", ",", "key", "=", "itemgetter", "(", "'name'", ")", ")", ":", "lines", ".", "append", "(", "'%s %d\\n'", "%", "(", "global_reg", "[", "'name'", "]", ",", "global_reg", "[", "'value'", "]", ")", ")", "lines", ".", "append", "(", "\"\\n# FEI4 Pixel Registers\\n\"", ")", "for", "key", "in", "sorted", "(", "pixel_reg_dict", ")", ":", "lines", ".", "append", "(", "'%s %s\\n'", "%", "(", "key", ",", "pixel_reg_dict", "[", "key", "]", ")", ")", "lines", ".", "append", "(", "\"\\n# FEI4 Calibration Parameters\\n\"", ")", "for", "key", "in", "register", ".", "calibration_parameters", ":", "if", "register", ".", "calibration_parameters", "[", "key", "]", "is", "None", ":", "lines", ".", "append", "(", "'%s %s\\n'", "%", "(", "key", ",", "register", ".", "calibration_parameters", "[", "key", "]", ")", ")", "elif", "isinstance", "(", "register", ".", "calibration_parameters", "[", "key", "]", ",", "(", "float", ",", "int", ",", "long", ")", ")", ":", "lines", ".", "append", "(", "'%s %s\\n'", "%", "(", "key", ",", "round", "(", "register", ".", "calibration_parameters", "[", "key", "]", ",", "4", ")", ")", ")", "elif", "isinstance", "(", "register", ".", "calibration_parameters", "[", "key", "]", ",", "list", ")", ":", "lines", ".", "append", "(", "'%s %s\\n'", "%", "(", "key", ",", "[", "round", "(", "elem", ",", "2", ")", "for", "elem", "in", "register", ".", "calibration_parameters", "[", "key", "]", "]", ")", ")", "else", ":", "raise", "ValueError", "(", "'type %s not supported'", "%", "type", "(", "register", ".", "calibration_parameters", "[", "key", "]", ")", ")", "if", "register", ".", "miscellaneous", ":", "lines", ".", "append", "(", "\"\\n# Miscellaneous\\n\"", ")", "for", "key", ",", "value", "in", "register", ".", "miscellaneous", ".", "iteritems", "(", ")", ":", "lines", ".", "append", "(", "'%s %s\\n'", "%", "(", "key", ",", "value", ")", ")", "f", ".", "writelines", "(", "lines", ")" ]
Saving configuration to text files from register object Parameters ---------- register : pybar.fei4.register object configuration_file : string Filename of the configuration file.
[ "Saving", "configuration", "to", "text", "files", "from", "register", "object", "Parameters", "----------", "register", ":", "pybar", ".", "fei4", ".", "register", "object", "configuration_file", ":", "string", "Filename", "of", "the", "configuration", "file", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L863-L930
SiLab-Bonn/pyBAR
pybar/fei4/register.py
save_configuration_to_hdf5
def save_configuration_to_hdf5(register, configuration_file, name=''): '''Saving configuration to HDF5 file from register object Parameters ---------- register : pybar.fei4.register object configuration_file : string, file Filename of the HDF5 configuration file or file object. name : string Additional identifier (subgroup). Useful when storing more than one configuration inside a HDF5 file. ''' def save_conf(): logging.info("Saving configuration: %s" % h5_file.filename) register.configuration_file = h5_file.filename try: configuration_group = h5_file.create_group(h5_file.root, "configuration") except tb.NodeError: configuration_group = h5_file.root.configuration if name: try: configuration_group = h5_file.create_group(configuration_group, name) except tb.NodeError: configuration_group = h5_file.root.configuration.name # calibration_parameters try: h5_file.remove_node(configuration_group, name='calibration_parameters') except tb.NodeError: pass calibration_data_table = h5_file.create_table(configuration_group, name='calibration_parameters', description=NameValue, title='calibration_parameters') calibration_data_row = calibration_data_table.row for key, value in register.calibration_parameters.iteritems(): calibration_data_row['name'] = key calibration_data_row['value'] = str(value) calibration_data_row.append() calibration_data_table.flush() # miscellaneous try: h5_file.remove_node(configuration_group, name='miscellaneous') except tb.NodeError: pass miscellaneous_data_table = h5_file.create_table(configuration_group, name='miscellaneous', description=NameValue, title='miscellaneous') miscellaneous_data_row = miscellaneous_data_table.row miscellaneous_data_row['name'] = 'Flavor' miscellaneous_data_row['value'] = register.flavor miscellaneous_data_row.append() miscellaneous_data_row['name'] = 'Chip_ID' miscellaneous_data_row['value'] = register.chip_id miscellaneous_data_row.append() for key, value in register.miscellaneous.iteritems(): miscellaneous_data_row['name'] = key miscellaneous_data_row['value'] = value miscellaneous_data_row.append() miscellaneous_data_table.flush() # global try: h5_file.remove_node(configuration_group, name='global_register') except tb.NodeError: pass global_data_table = h5_file.create_table(configuration_group, name='global_register', description=NameValue, title='global_register') global_data_table_row = global_data_table.row global_regs = register.get_global_register_objects(readonly=False) for global_reg in sorted(global_regs, key=itemgetter('name')): global_data_table_row['name'] = global_reg['name'] global_data_table_row['value'] = global_reg['value'] # TODO: some function that converts to bin, hex global_data_table_row.append() global_data_table.flush() # pixel for pixel_reg in register.pixel_registers.itervalues(): try: h5_file.remove_node(configuration_group, name=pixel_reg['name']) except tb.NodeError: pass data = pixel_reg['value'].T atom = tb.Atom.from_dtype(data.dtype) ds = h5_file.create_carray(configuration_group, name=pixel_reg['name'], atom=atom, shape=data.shape, title=pixel_reg['name']) ds[:] = data if isinstance(configuration_file, tb.file.File): h5_file = configuration_file save_conf() else: with tb.open_file(configuration_file, mode="a", title='') as h5_file: save_conf()
python
def save_configuration_to_hdf5(register, configuration_file, name=''): '''Saving configuration to HDF5 file from register object Parameters ---------- register : pybar.fei4.register object configuration_file : string, file Filename of the HDF5 configuration file or file object. name : string Additional identifier (subgroup). Useful when storing more than one configuration inside a HDF5 file. ''' def save_conf(): logging.info("Saving configuration: %s" % h5_file.filename) register.configuration_file = h5_file.filename try: configuration_group = h5_file.create_group(h5_file.root, "configuration") except tb.NodeError: configuration_group = h5_file.root.configuration if name: try: configuration_group = h5_file.create_group(configuration_group, name) except tb.NodeError: configuration_group = h5_file.root.configuration.name # calibration_parameters try: h5_file.remove_node(configuration_group, name='calibration_parameters') except tb.NodeError: pass calibration_data_table = h5_file.create_table(configuration_group, name='calibration_parameters', description=NameValue, title='calibration_parameters') calibration_data_row = calibration_data_table.row for key, value in register.calibration_parameters.iteritems(): calibration_data_row['name'] = key calibration_data_row['value'] = str(value) calibration_data_row.append() calibration_data_table.flush() # miscellaneous try: h5_file.remove_node(configuration_group, name='miscellaneous') except tb.NodeError: pass miscellaneous_data_table = h5_file.create_table(configuration_group, name='miscellaneous', description=NameValue, title='miscellaneous') miscellaneous_data_row = miscellaneous_data_table.row miscellaneous_data_row['name'] = 'Flavor' miscellaneous_data_row['value'] = register.flavor miscellaneous_data_row.append() miscellaneous_data_row['name'] = 'Chip_ID' miscellaneous_data_row['value'] = register.chip_id miscellaneous_data_row.append() for key, value in register.miscellaneous.iteritems(): miscellaneous_data_row['name'] = key miscellaneous_data_row['value'] = value miscellaneous_data_row.append() miscellaneous_data_table.flush() # global try: h5_file.remove_node(configuration_group, name='global_register') except tb.NodeError: pass global_data_table = h5_file.create_table(configuration_group, name='global_register', description=NameValue, title='global_register') global_data_table_row = global_data_table.row global_regs = register.get_global_register_objects(readonly=False) for global_reg in sorted(global_regs, key=itemgetter('name')): global_data_table_row['name'] = global_reg['name'] global_data_table_row['value'] = global_reg['value'] # TODO: some function that converts to bin, hex global_data_table_row.append() global_data_table.flush() # pixel for pixel_reg in register.pixel_registers.itervalues(): try: h5_file.remove_node(configuration_group, name=pixel_reg['name']) except tb.NodeError: pass data = pixel_reg['value'].T atom = tb.Atom.from_dtype(data.dtype) ds = h5_file.create_carray(configuration_group, name=pixel_reg['name'], atom=atom, shape=data.shape, title=pixel_reg['name']) ds[:] = data if isinstance(configuration_file, tb.file.File): h5_file = configuration_file save_conf() else: with tb.open_file(configuration_file, mode="a", title='') as h5_file: save_conf()
[ "def", "save_configuration_to_hdf5", "(", "register", ",", "configuration_file", ",", "name", "=", "''", ")", ":", "def", "save_conf", "(", ")", ":", "logging", ".", "info", "(", "\"Saving configuration: %s\"", "%", "h5_file", ".", "filename", ")", "register", ".", "configuration_file", "=", "h5_file", ".", "filename", "try", ":", "configuration_group", "=", "h5_file", ".", "create_group", "(", "h5_file", ".", "root", ",", "\"configuration\"", ")", "except", "tb", ".", "NodeError", ":", "configuration_group", "=", "h5_file", ".", "root", ".", "configuration", "if", "name", ":", "try", ":", "configuration_group", "=", "h5_file", ".", "create_group", "(", "configuration_group", ",", "name", ")", "except", "tb", ".", "NodeError", ":", "configuration_group", "=", "h5_file", ".", "root", ".", "configuration", ".", "name", "# calibration_parameters\r", "try", ":", "h5_file", ".", "remove_node", "(", "configuration_group", ",", "name", "=", "'calibration_parameters'", ")", "except", "tb", ".", "NodeError", ":", "pass", "calibration_data_table", "=", "h5_file", ".", "create_table", "(", "configuration_group", ",", "name", "=", "'calibration_parameters'", ",", "description", "=", "NameValue", ",", "title", "=", "'calibration_parameters'", ")", "calibration_data_row", "=", "calibration_data_table", ".", "row", "for", "key", ",", "value", "in", "register", ".", "calibration_parameters", ".", "iteritems", "(", ")", ":", "calibration_data_row", "[", "'name'", "]", "=", "key", "calibration_data_row", "[", "'value'", "]", "=", "str", "(", "value", ")", "calibration_data_row", ".", "append", "(", ")", "calibration_data_table", ".", "flush", "(", ")", "# miscellaneous\r", "try", ":", "h5_file", ".", "remove_node", "(", "configuration_group", ",", "name", "=", "'miscellaneous'", ")", "except", "tb", ".", "NodeError", ":", "pass", "miscellaneous_data_table", "=", "h5_file", ".", "create_table", "(", "configuration_group", ",", "name", "=", "'miscellaneous'", ",", "description", "=", "NameValue", ",", "title", "=", "'miscellaneous'", ")", "miscellaneous_data_row", "=", "miscellaneous_data_table", ".", "row", "miscellaneous_data_row", "[", "'name'", "]", "=", "'Flavor'", "miscellaneous_data_row", "[", "'value'", "]", "=", "register", ".", "flavor", "miscellaneous_data_row", ".", "append", "(", ")", "miscellaneous_data_row", "[", "'name'", "]", "=", "'Chip_ID'", "miscellaneous_data_row", "[", "'value'", "]", "=", "register", ".", "chip_id", "miscellaneous_data_row", ".", "append", "(", ")", "for", "key", ",", "value", "in", "register", ".", "miscellaneous", ".", "iteritems", "(", ")", ":", "miscellaneous_data_row", "[", "'name'", "]", "=", "key", "miscellaneous_data_row", "[", "'value'", "]", "=", "value", "miscellaneous_data_row", ".", "append", "(", ")", "miscellaneous_data_table", ".", "flush", "(", ")", "# global\r", "try", ":", "h5_file", ".", "remove_node", "(", "configuration_group", ",", "name", "=", "'global_register'", ")", "except", "tb", ".", "NodeError", ":", "pass", "global_data_table", "=", "h5_file", ".", "create_table", "(", "configuration_group", ",", "name", "=", "'global_register'", ",", "description", "=", "NameValue", ",", "title", "=", "'global_register'", ")", "global_data_table_row", "=", "global_data_table", ".", "row", "global_regs", "=", "register", ".", "get_global_register_objects", "(", "readonly", "=", "False", ")", "for", "global_reg", "in", "sorted", "(", "global_regs", ",", "key", "=", "itemgetter", "(", "'name'", ")", ")", ":", "global_data_table_row", "[", "'name'", "]", "=", "global_reg", "[", "'name'", "]", "global_data_table_row", "[", "'value'", "]", "=", "global_reg", "[", "'value'", "]", "# TODO: some function that converts to bin, hex\r", "global_data_table_row", ".", "append", "(", ")", "global_data_table", ".", "flush", "(", ")", "# pixel\r", "for", "pixel_reg", "in", "register", ".", "pixel_registers", ".", "itervalues", "(", ")", ":", "try", ":", "h5_file", ".", "remove_node", "(", "configuration_group", ",", "name", "=", "pixel_reg", "[", "'name'", "]", ")", "except", "tb", ".", "NodeError", ":", "pass", "data", "=", "pixel_reg", "[", "'value'", "]", ".", "T", "atom", "=", "tb", ".", "Atom", ".", "from_dtype", "(", "data", ".", "dtype", ")", "ds", "=", "h5_file", ".", "create_carray", "(", "configuration_group", ",", "name", "=", "pixel_reg", "[", "'name'", "]", ",", "atom", "=", "atom", ",", "shape", "=", "data", ".", "shape", ",", "title", "=", "pixel_reg", "[", "'name'", "]", ")", "ds", "[", ":", "]", "=", "data", "if", "isinstance", "(", "configuration_file", ",", "tb", ".", "file", ".", "File", ")", ":", "h5_file", "=", "configuration_file", "save_conf", "(", ")", "else", ":", "with", "tb", ".", "open_file", "(", "configuration_file", ",", "mode", "=", "\"a\"", ",", "title", "=", "''", ")", "as", "h5_file", ":", "save_conf", "(", ")" ]
Saving configuration to HDF5 file from register object Parameters ---------- register : pybar.fei4.register object configuration_file : string, file Filename of the HDF5 configuration file or file object. name : string Additional identifier (subgroup). Useful when storing more than one configuration inside a HDF5 file.
[ "Saving", "configuration", "to", "HDF5", "file", "from", "register", "object", "Parameters", "----------", "register", ":", "pybar", ".", "fei4", ".", "register", "object", "configuration_file", ":", "string", "file", "Filename", "of", "the", "HDF5", "configuration", "file", "or", "file", "object", ".", "name", ":", "string", "Additional", "identifier", "(", "subgroup", ")", ".", "Useful", "when", "storing", "more", "than", "one", "configuration", "inside", "a", "HDF5", "file", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L933-L1019
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.load_configuration
def load_configuration(self, configuration_file): '''Loading configuration Parameters ---------- configuration_file : string Path to the configuration file (text or HDF5 file). ''' if os.path.isfile(configuration_file): if not isinstance(configuration_file, tb.file.File) and os.path.splitext(configuration_file)[1].strip().lower() != ".h5": load_configuration_from_text_file(self, configuration_file) else: load_configuration_from_hdf5(self, configuration_file) else: raise ValueError('Cannot find configuration file specified: %s' % configuration_file)
python
def load_configuration(self, configuration_file): '''Loading configuration Parameters ---------- configuration_file : string Path to the configuration file (text or HDF5 file). ''' if os.path.isfile(configuration_file): if not isinstance(configuration_file, tb.file.File) and os.path.splitext(configuration_file)[1].strip().lower() != ".h5": load_configuration_from_text_file(self, configuration_file) else: load_configuration_from_hdf5(self, configuration_file) else: raise ValueError('Cannot find configuration file specified: %s' % configuration_file)
[ "def", "load_configuration", "(", "self", ",", "configuration_file", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "configuration_file", ")", ":", "if", "not", "isinstance", "(", "configuration_file", ",", "tb", ".", "file", ".", "File", ")", "and", "os", ".", "path", ".", "splitext", "(", "configuration_file", ")", "[", "1", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "!=", "\".h5\"", ":", "load_configuration_from_text_file", "(", "self", ",", "configuration_file", ")", "else", ":", "load_configuration_from_hdf5", "(", "self", ",", "configuration_file", ")", "else", ":", "raise", "ValueError", "(", "'Cannot find configuration file specified: %s'", "%", "configuration_file", ")" ]
Loading configuration Parameters ---------- configuration_file : string Path to the configuration file (text or HDF5 file).
[ "Loading", "configuration", "Parameters", "----------", "configuration_file", ":", "string", "Path", "to", "the", "configuration", "file", "(", "text", "or", "HDF5", "file", ")", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L135-L149
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.save_configuration
def save_configuration(self, configuration_file): '''Saving configuration Parameters ---------- configuration_file : string Filename of the configuration file. ''' if not isinstance(configuration_file, tb.file.File) and os.path.splitext(configuration_file)[1].strip().lower() != ".h5": return save_configuration_to_text_file(self, configuration_file) else: return save_configuration_to_hdf5(self, configuration_file)
python
def save_configuration(self, configuration_file): '''Saving configuration Parameters ---------- configuration_file : string Filename of the configuration file. ''' if not isinstance(configuration_file, tb.file.File) and os.path.splitext(configuration_file)[1].strip().lower() != ".h5": return save_configuration_to_text_file(self, configuration_file) else: return save_configuration_to_hdf5(self, configuration_file)
[ "def", "save_configuration", "(", "self", ",", "configuration_file", ")", ":", "if", "not", "isinstance", "(", "configuration_file", ",", "tb", ".", "file", ".", "File", ")", "and", "os", ".", "path", ".", "splitext", "(", "configuration_file", ")", "[", "1", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "!=", "\".h5\"", ":", "return", "save_configuration_to_text_file", "(", "self", ",", "configuration_file", ")", "else", ":", "return", "save_configuration_to_hdf5", "(", "self", ",", "configuration_file", ")" ]
Saving configuration Parameters ---------- configuration_file : string Filename of the configuration file.
[ "Saving", "configuration", "Parameters", "----------", "configuration_file", ":", "string", "Filename", "of", "the", "configuration", "file", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L151-L162
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.get_commands
def get_commands(self, command_name, **kwargs): """get fe_command from command name and keyword arguments wrapper for build_commands() implements FEI4 specific behavior """ chip_id = kwargs.pop("ChipID", self.chip_id_bitarray) commands = [] if command_name == "zeros": bv = bitarray(endian='little') if "length" in kwargs: bv += bitarray(kwargs["length"], endian='little') # initialized from int, bits may be random elif kwargs: raise ValueError("Unknown parameter(s): %s" % ", ".join(kwargs.iterkeys())) bv.setall(0) # all bits to zero commands.append(bv) elif command_name == "ones": bv = bitarray(endian='little') if "length" in kwargs: bv += bitarray(kwargs["length"], endian='little') # initialized from int, bits may be random elif kwargs: raise ValueError("Unknown parameter(s): %s" % ", ".join(kwargs.iterkeys())) bv.setall(1) # all bits to one commands.append(bv) elif command_name == "WrRegister": register_addresses = self.get_global_register_attributes("addresses", **kwargs) register_bitsets = self.get_global_register_bitsets(register_addresses) commands.extend([self.build_command(command_name, Address=register_address, GlobalData=register_bitset, ChipID=chip_id, **kwargs) for register_address, register_bitset in zip(register_addresses, register_bitsets)]) elif command_name == "RdRegister": register_addresses = self.get_global_register_attributes('addresses', **kwargs) commands.extend([self.build_command(command_name, Address=register_address, ChipID=chip_id) for register_address in register_addresses]) elif command_name == "WrFrontEnd": registers = ["S0", "S1", "SR_Clr", "CalEn", "DIGHITIN_SEL", "GateHitOr", "ReadErrorReq", "StopClkPulse", "SR_Clock", "Efuse_Sense", "HITLD_IN", "Colpr_Mode", "Colpr_Addr"] if self.fei4a: registers.append("ReadSkipped") elif self.fei4b: registers.append("SR_Read") self.create_restore_point() dcs = kwargs.pop("dcs", range(40)) # set the double columns to latch # in case of empty list if not dcs: dcs = range(40) joint_write = kwargs.pop("joint_write", False) same_mask_for_all_dc = kwargs.pop("same_mask_for_all_dc", False) register_objects = self.get_pixel_register_objects(do_sort=['pxstrobe'], **kwargs) # prepare for writing pixel registers if not self.broadcast: self.set_global_register_value("Colpr_Mode", 0) # write only to the addressed double-column self.set_global_register_value("Colpr_Addr", 40) # ivalid address, grounded # Broadcasting ConfMode not necessary, writing registers is also possible in RunMode # commands.extend(self.get_commands("ConfMode", ChipID=8)) # set all chips to conf mode to receive commands# # set all other chips to invalid addresses, to make broadcasting of WrRegister command possible commands.extend(self.get_commands("WrRegister", name=["Colpr_Mode", "Colpr_Addr"], ChipID=8)) # braodcast self.set_global_register_value("S0", 0) self.set_global_register_value("S1", 0) self.set_global_register_value("SR_Clr", 0) self.set_global_register_value("CalEn", 0) self.set_global_register_value("DIGHITIN_SEL", 0) self.set_global_register_value("GateHitOr", 0) self.set_global_register_value("ReadErrorReq", 0) self.set_global_register_value("StopClkPulse", 0) self.set_global_register_value("SR_Clock", 0) self.set_global_register_value("Efuse_Sense", 0) self.set_global_register_value("HITLD_IN", 0) self.set_global_register_value("Colpr_Mode", 3 if same_mask_for_all_dc else 0) # write only the addressed double-column self.set_global_register_value("Colpr_Addr", 0) if self.fei4a: self.set_global_register_value("ReadSkipped", 0) elif self.fei4b: self.set_global_register_value("SR_Read", 0) commands.extend(self.get_commands("WrRegister", name=registers)) if joint_write: pxstrobes = 0 first_read = True do_latch = False for register_object in register_objects: if register_object['bitlength'] != 1: raise ValueError('Pixel register %s: joint write not supported for pixel DACs' % register_object['name']) pxstrobe = register_object['pxstrobe'] if not isinstance(pxstrobe, basestring): do_latch = True pxstrobes += 2 ** register_object['pxstrobe'] if first_read: pixel_reg_value = register_object['value'] first_read = False else: if np.array_equal(pixel_reg_value, register_object['value']): pixel_reg_value = register_object['value'] else: raise ValueError('Pixel register %s: joint write not supported, pixel register values must be equal' % register_object['name']) if do_latch: self.set_global_register_value("Latch_En", 1) else: self.set_global_register_value("Latch_En", 0) self.set_global_register_value("Pixel_Strobes", pxstrobes) commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes", "Latch_En"])) for dc_no in (dcs[:1] if same_mask_for_all_dc else dcs): self.set_global_register_value("Colpr_Addr", dc_no) commands.extend(self.get_commands("WrRegister", name=["Colpr_Addr"])) register_bitset = self.get_pixel_register_bitset(register_objects[0], 0, dc_no) commands.extend([self.build_command(command_name, PixelData=register_bitset, ChipID=8, **kwargs)]) # broadcast if do_latch: commands.extend(self.get_commands("GlobalPulse", Width=0)) else: for register_object in register_objects: pxstrobe = register_object['pxstrobe'] if isinstance(pxstrobe, basestring): do_latch = False self.set_global_register_value("Pixel_Strobes", 0) # no latch self.set_global_register_value("Latch_En", 0) commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes", "Latch_En"])) else: do_latch = True self.set_global_register_value("Latch_En", 1) commands.extend(self.get_commands("WrRegister", name=["Latch_En"])) bitlength = register_object['bitlength'] for bit_no, pxstrobe_bit_no in (enumerate(range(bitlength)) if (register_object['littleendian'] is False) else enumerate(reversed(range(bitlength)))): if do_latch: self.set_global_register_value("Pixel_Strobes", 2 ** (pxstrobe + bit_no)) commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes"])) for dc_no in (dcs[:1] if same_mask_for_all_dc else dcs): self.set_global_register_value("Colpr_Addr", dc_no) commands.extend(self.get_commands("WrRegister", name=["Colpr_Addr"])) register_bitset = self.get_pixel_register_bitset(register_object, pxstrobe_bit_no, dc_no) commands.extend([self.build_command(command_name, PixelData=register_bitset, ChipID=8, **kwargs)]) # broadcast if do_latch: commands.extend(self.get_commands("GlobalPulse", Width=0)) self.restore(pixel_register=False) commands.extend(self.get_commands("WrRegister", name=registers)) elif command_name == "RdFrontEnd": registers = ["Conf_AddrEnable", "S0", "S1", "SR_Clr", "CalEn", "DIGHITIN_SEL", "GateHitOr", "ReadErrorReq", "StopClkPulse", "SR_Clock", "Efuse_Sense", "HITLD_IN", "Colpr_Mode", "Colpr_Addr", "Pixel_Strobes", "Latch_En"] if self.fei4a: registers.append("ReadSkipped") elif self.fei4b: registers.append("SR_Read") self.create_restore_point() dcs = kwargs.pop("dcs", range(40)) # set the double columns to latch # in case of empty list if not dcs: dcs = range(40) register_objects = self.get_pixel_register_objects(**kwargs) self.set_global_register_value('Conf_AddrEnable', 1) self.set_global_register_value("S0", 0) self.set_global_register_value("S1", 0) self.set_global_register_value("SR_Clr", 0) if self.fei4b: self.set_global_register_value("SR_Read", 0) self.set_global_register_value("CalEn", 0) self.set_global_register_value("DIGHITIN_SEL", 0) self.set_global_register_value("GateHitOr", 0) if self.fei4a: self.set_global_register_value("ReadSkipped", 0) self.set_global_register_value("ReadErrorReq", 0) self.set_global_register_value("StopClkPulse", 0) self.set_global_register_value("SR_Clock", 0) self.set_global_register_value("Efuse_Sense", 0) self.set_global_register_value("HITLD_IN", 0) self.set_global_register_value("Colpr_Mode", 0) # write only the addressed double-column self.set_global_register_value("Colpr_Addr", 0) self.set_global_register_value("Latch_En", 0) self.set_global_register_value("Pixel_Strobes", 0) commands.extend(self.get_commands("WrRegister", name=registers)) for index, register_object in enumerate(register_objects): # make sure that EnableDigInj is first read back, because it is not latched if register_object['name'] == 'EnableDigInj': register_objects[0], register_objects[index] = register_objects[index], register_objects[0] break for register_object in register_objects: pxstrobe = register_object['pxstrobe'] bitlength = register_object['bitlength'] for pxstrobe_bit_no in range(bitlength): logging.debug('Pixel Register %s Bit %d', register_object['name'], pxstrobe_bit_no) do_latch = True try: self.set_global_register_value("Pixel_Strobes", 2 ** (pxstrobe + pxstrobe_bit_no)) except TypeError: # thrown for not latched digInjection self.set_global_register_value("Pixel_Strobes", 0) # do not latch do_latch = False commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes"])) for dc_no in dcs: self.set_global_register_value("Colpr_Addr", dc_no) commands.extend(self.get_commands("WrRegister", name=["Colpr_Addr"])) if do_latch is True: self.set_global_register_value("S0", 1) self.set_global_register_value("S1", 1) self.set_global_register_value("SR_Clock", 1) commands.extend(self.get_commands("WrRegister", name=["S0", "S1", "SR_Clock"])) commands.extend(self.get_commands("GlobalPulse", Width=0)) self.set_global_register_value("S0", 0) self.set_global_register_value("S1", 0) self.set_global_register_value("SR_Clock", 0) commands.extend(self.get_commands("WrRegister", name=["S0", "S1", "SR_Clock"])) register_bitset = self.get_pixel_register_bitset(register_object, pxstrobe_bit_no if (register_object['littleendian'] is False) else register_object['bitlength'] - pxstrobe_bit_no - 1, dc_no) if self.fei4b: self.set_global_register_value("SR_Read", 1) commands.extend(self.get_commands("WrRegister", name=["SR_Read"])) commands.extend([self.build_command("WrFrontEnd", PixelData=register_bitset, ChipID=chip_id)]) if self.fei4b: self.set_global_register_value("SR_Read", 0) commands.extend(self.get_commands("WrRegister", name=["SR_Read"])) self.restore(pixel_register=False) commands.extend(self.get_commands("WrRegister", name=registers)) else: commands.append(self.build_command(command_name, ChipID=chip_id, **kwargs)) return commands
python
def get_commands(self, command_name, **kwargs): """get fe_command from command name and keyword arguments wrapper for build_commands() implements FEI4 specific behavior """ chip_id = kwargs.pop("ChipID", self.chip_id_bitarray) commands = [] if command_name == "zeros": bv = bitarray(endian='little') if "length" in kwargs: bv += bitarray(kwargs["length"], endian='little') # initialized from int, bits may be random elif kwargs: raise ValueError("Unknown parameter(s): %s" % ", ".join(kwargs.iterkeys())) bv.setall(0) # all bits to zero commands.append(bv) elif command_name == "ones": bv = bitarray(endian='little') if "length" in kwargs: bv += bitarray(kwargs["length"], endian='little') # initialized from int, bits may be random elif kwargs: raise ValueError("Unknown parameter(s): %s" % ", ".join(kwargs.iterkeys())) bv.setall(1) # all bits to one commands.append(bv) elif command_name == "WrRegister": register_addresses = self.get_global_register_attributes("addresses", **kwargs) register_bitsets = self.get_global_register_bitsets(register_addresses) commands.extend([self.build_command(command_name, Address=register_address, GlobalData=register_bitset, ChipID=chip_id, **kwargs) for register_address, register_bitset in zip(register_addresses, register_bitsets)]) elif command_name == "RdRegister": register_addresses = self.get_global_register_attributes('addresses', **kwargs) commands.extend([self.build_command(command_name, Address=register_address, ChipID=chip_id) for register_address in register_addresses]) elif command_name == "WrFrontEnd": registers = ["S0", "S1", "SR_Clr", "CalEn", "DIGHITIN_SEL", "GateHitOr", "ReadErrorReq", "StopClkPulse", "SR_Clock", "Efuse_Sense", "HITLD_IN", "Colpr_Mode", "Colpr_Addr"] if self.fei4a: registers.append("ReadSkipped") elif self.fei4b: registers.append("SR_Read") self.create_restore_point() dcs = kwargs.pop("dcs", range(40)) # set the double columns to latch # in case of empty list if not dcs: dcs = range(40) joint_write = kwargs.pop("joint_write", False) same_mask_for_all_dc = kwargs.pop("same_mask_for_all_dc", False) register_objects = self.get_pixel_register_objects(do_sort=['pxstrobe'], **kwargs) # prepare for writing pixel registers if not self.broadcast: self.set_global_register_value("Colpr_Mode", 0) # write only to the addressed double-column self.set_global_register_value("Colpr_Addr", 40) # ivalid address, grounded # Broadcasting ConfMode not necessary, writing registers is also possible in RunMode # commands.extend(self.get_commands("ConfMode", ChipID=8)) # set all chips to conf mode to receive commands# # set all other chips to invalid addresses, to make broadcasting of WrRegister command possible commands.extend(self.get_commands("WrRegister", name=["Colpr_Mode", "Colpr_Addr"], ChipID=8)) # braodcast self.set_global_register_value("S0", 0) self.set_global_register_value("S1", 0) self.set_global_register_value("SR_Clr", 0) self.set_global_register_value("CalEn", 0) self.set_global_register_value("DIGHITIN_SEL", 0) self.set_global_register_value("GateHitOr", 0) self.set_global_register_value("ReadErrorReq", 0) self.set_global_register_value("StopClkPulse", 0) self.set_global_register_value("SR_Clock", 0) self.set_global_register_value("Efuse_Sense", 0) self.set_global_register_value("HITLD_IN", 0) self.set_global_register_value("Colpr_Mode", 3 if same_mask_for_all_dc else 0) # write only the addressed double-column self.set_global_register_value("Colpr_Addr", 0) if self.fei4a: self.set_global_register_value("ReadSkipped", 0) elif self.fei4b: self.set_global_register_value("SR_Read", 0) commands.extend(self.get_commands("WrRegister", name=registers)) if joint_write: pxstrobes = 0 first_read = True do_latch = False for register_object in register_objects: if register_object['bitlength'] != 1: raise ValueError('Pixel register %s: joint write not supported for pixel DACs' % register_object['name']) pxstrobe = register_object['pxstrobe'] if not isinstance(pxstrobe, basestring): do_latch = True pxstrobes += 2 ** register_object['pxstrobe'] if first_read: pixel_reg_value = register_object['value'] first_read = False else: if np.array_equal(pixel_reg_value, register_object['value']): pixel_reg_value = register_object['value'] else: raise ValueError('Pixel register %s: joint write not supported, pixel register values must be equal' % register_object['name']) if do_latch: self.set_global_register_value("Latch_En", 1) else: self.set_global_register_value("Latch_En", 0) self.set_global_register_value("Pixel_Strobes", pxstrobes) commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes", "Latch_En"])) for dc_no in (dcs[:1] if same_mask_for_all_dc else dcs): self.set_global_register_value("Colpr_Addr", dc_no) commands.extend(self.get_commands("WrRegister", name=["Colpr_Addr"])) register_bitset = self.get_pixel_register_bitset(register_objects[0], 0, dc_no) commands.extend([self.build_command(command_name, PixelData=register_bitset, ChipID=8, **kwargs)]) # broadcast if do_latch: commands.extend(self.get_commands("GlobalPulse", Width=0)) else: for register_object in register_objects: pxstrobe = register_object['pxstrobe'] if isinstance(pxstrobe, basestring): do_latch = False self.set_global_register_value("Pixel_Strobes", 0) # no latch self.set_global_register_value("Latch_En", 0) commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes", "Latch_En"])) else: do_latch = True self.set_global_register_value("Latch_En", 1) commands.extend(self.get_commands("WrRegister", name=["Latch_En"])) bitlength = register_object['bitlength'] for bit_no, pxstrobe_bit_no in (enumerate(range(bitlength)) if (register_object['littleendian'] is False) else enumerate(reversed(range(bitlength)))): if do_latch: self.set_global_register_value("Pixel_Strobes", 2 ** (pxstrobe + bit_no)) commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes"])) for dc_no in (dcs[:1] if same_mask_for_all_dc else dcs): self.set_global_register_value("Colpr_Addr", dc_no) commands.extend(self.get_commands("WrRegister", name=["Colpr_Addr"])) register_bitset = self.get_pixel_register_bitset(register_object, pxstrobe_bit_no, dc_no) commands.extend([self.build_command(command_name, PixelData=register_bitset, ChipID=8, **kwargs)]) # broadcast if do_latch: commands.extend(self.get_commands("GlobalPulse", Width=0)) self.restore(pixel_register=False) commands.extend(self.get_commands("WrRegister", name=registers)) elif command_name == "RdFrontEnd": registers = ["Conf_AddrEnable", "S0", "S1", "SR_Clr", "CalEn", "DIGHITIN_SEL", "GateHitOr", "ReadErrorReq", "StopClkPulse", "SR_Clock", "Efuse_Sense", "HITLD_IN", "Colpr_Mode", "Colpr_Addr", "Pixel_Strobes", "Latch_En"] if self.fei4a: registers.append("ReadSkipped") elif self.fei4b: registers.append("SR_Read") self.create_restore_point() dcs = kwargs.pop("dcs", range(40)) # set the double columns to latch # in case of empty list if not dcs: dcs = range(40) register_objects = self.get_pixel_register_objects(**kwargs) self.set_global_register_value('Conf_AddrEnable', 1) self.set_global_register_value("S0", 0) self.set_global_register_value("S1", 0) self.set_global_register_value("SR_Clr", 0) if self.fei4b: self.set_global_register_value("SR_Read", 0) self.set_global_register_value("CalEn", 0) self.set_global_register_value("DIGHITIN_SEL", 0) self.set_global_register_value("GateHitOr", 0) if self.fei4a: self.set_global_register_value("ReadSkipped", 0) self.set_global_register_value("ReadErrorReq", 0) self.set_global_register_value("StopClkPulse", 0) self.set_global_register_value("SR_Clock", 0) self.set_global_register_value("Efuse_Sense", 0) self.set_global_register_value("HITLD_IN", 0) self.set_global_register_value("Colpr_Mode", 0) # write only the addressed double-column self.set_global_register_value("Colpr_Addr", 0) self.set_global_register_value("Latch_En", 0) self.set_global_register_value("Pixel_Strobes", 0) commands.extend(self.get_commands("WrRegister", name=registers)) for index, register_object in enumerate(register_objects): # make sure that EnableDigInj is first read back, because it is not latched if register_object['name'] == 'EnableDigInj': register_objects[0], register_objects[index] = register_objects[index], register_objects[0] break for register_object in register_objects: pxstrobe = register_object['pxstrobe'] bitlength = register_object['bitlength'] for pxstrobe_bit_no in range(bitlength): logging.debug('Pixel Register %s Bit %d', register_object['name'], pxstrobe_bit_no) do_latch = True try: self.set_global_register_value("Pixel_Strobes", 2 ** (pxstrobe + pxstrobe_bit_no)) except TypeError: # thrown for not latched digInjection self.set_global_register_value("Pixel_Strobes", 0) # do not latch do_latch = False commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes"])) for dc_no in dcs: self.set_global_register_value("Colpr_Addr", dc_no) commands.extend(self.get_commands("WrRegister", name=["Colpr_Addr"])) if do_latch is True: self.set_global_register_value("S0", 1) self.set_global_register_value("S1", 1) self.set_global_register_value("SR_Clock", 1) commands.extend(self.get_commands("WrRegister", name=["S0", "S1", "SR_Clock"])) commands.extend(self.get_commands("GlobalPulse", Width=0)) self.set_global_register_value("S0", 0) self.set_global_register_value("S1", 0) self.set_global_register_value("SR_Clock", 0) commands.extend(self.get_commands("WrRegister", name=["S0", "S1", "SR_Clock"])) register_bitset = self.get_pixel_register_bitset(register_object, pxstrobe_bit_no if (register_object['littleendian'] is False) else register_object['bitlength'] - pxstrobe_bit_no - 1, dc_no) if self.fei4b: self.set_global_register_value("SR_Read", 1) commands.extend(self.get_commands("WrRegister", name=["SR_Read"])) commands.extend([self.build_command("WrFrontEnd", PixelData=register_bitset, ChipID=chip_id)]) if self.fei4b: self.set_global_register_value("SR_Read", 0) commands.extend(self.get_commands("WrRegister", name=["SR_Read"])) self.restore(pixel_register=False) commands.extend(self.get_commands("WrRegister", name=registers)) else: commands.append(self.build_command(command_name, ChipID=chip_id, **kwargs)) return commands
[ "def", "get_commands", "(", "self", ",", "command_name", ",", "*", "*", "kwargs", ")", ":", "chip_id", "=", "kwargs", ".", "pop", "(", "\"ChipID\"", ",", "self", ".", "chip_id_bitarray", ")", "commands", "=", "[", "]", "if", "command_name", "==", "\"zeros\"", ":", "bv", "=", "bitarray", "(", "endian", "=", "'little'", ")", "if", "\"length\"", "in", "kwargs", ":", "bv", "+=", "bitarray", "(", "kwargs", "[", "\"length\"", "]", ",", "endian", "=", "'little'", ")", "# initialized from int, bits may be random\r", "elif", "kwargs", ":", "raise", "ValueError", "(", "\"Unknown parameter(s): %s\"", "%", "\", \"", ".", "join", "(", "kwargs", ".", "iterkeys", "(", ")", ")", ")", "bv", ".", "setall", "(", "0", ")", "# all bits to zero\r", "commands", ".", "append", "(", "bv", ")", "elif", "command_name", "==", "\"ones\"", ":", "bv", "=", "bitarray", "(", "endian", "=", "'little'", ")", "if", "\"length\"", "in", "kwargs", ":", "bv", "+=", "bitarray", "(", "kwargs", "[", "\"length\"", "]", ",", "endian", "=", "'little'", ")", "# initialized from int, bits may be random\r", "elif", "kwargs", ":", "raise", "ValueError", "(", "\"Unknown parameter(s): %s\"", "%", "\", \"", ".", "join", "(", "kwargs", ".", "iterkeys", "(", ")", ")", ")", "bv", ".", "setall", "(", "1", ")", "# all bits to one\r", "commands", ".", "append", "(", "bv", ")", "elif", "command_name", "==", "\"WrRegister\"", ":", "register_addresses", "=", "self", ".", "get_global_register_attributes", "(", "\"addresses\"", ",", "*", "*", "kwargs", ")", "register_bitsets", "=", "self", ".", "get_global_register_bitsets", "(", "register_addresses", ")", "commands", ".", "extend", "(", "[", "self", ".", "build_command", "(", "command_name", ",", "Address", "=", "register_address", ",", "GlobalData", "=", "register_bitset", ",", "ChipID", "=", "chip_id", ",", "*", "*", "kwargs", ")", "for", "register_address", ",", "register_bitset", "in", "zip", "(", "register_addresses", ",", "register_bitsets", ")", "]", ")", "elif", "command_name", "==", "\"RdRegister\"", ":", "register_addresses", "=", "self", ".", "get_global_register_attributes", "(", "'addresses'", ",", "*", "*", "kwargs", ")", "commands", ".", "extend", "(", "[", "self", ".", "build_command", "(", "command_name", ",", "Address", "=", "register_address", ",", "ChipID", "=", "chip_id", ")", "for", "register_address", "in", "register_addresses", "]", ")", "elif", "command_name", "==", "\"WrFrontEnd\"", ":", "registers", "=", "[", "\"S0\"", ",", "\"S1\"", ",", "\"SR_Clr\"", ",", "\"CalEn\"", ",", "\"DIGHITIN_SEL\"", ",", "\"GateHitOr\"", ",", "\"ReadErrorReq\"", ",", "\"StopClkPulse\"", ",", "\"SR_Clock\"", ",", "\"Efuse_Sense\"", ",", "\"HITLD_IN\"", ",", "\"Colpr_Mode\"", ",", "\"Colpr_Addr\"", "]", "if", "self", ".", "fei4a", ":", "registers", ".", "append", "(", "\"ReadSkipped\"", ")", "elif", "self", ".", "fei4b", ":", "registers", ".", "append", "(", "\"SR_Read\"", ")", "self", ".", "create_restore_point", "(", ")", "dcs", "=", "kwargs", ".", "pop", "(", "\"dcs\"", ",", "range", "(", "40", ")", ")", "# set the double columns to latch\r", "# in case of empty list\r", "if", "not", "dcs", ":", "dcs", "=", "range", "(", "40", ")", "joint_write", "=", "kwargs", ".", "pop", "(", "\"joint_write\"", ",", "False", ")", "same_mask_for_all_dc", "=", "kwargs", ".", "pop", "(", "\"same_mask_for_all_dc\"", ",", "False", ")", "register_objects", "=", "self", ".", "get_pixel_register_objects", "(", "do_sort", "=", "[", "'pxstrobe'", "]", ",", "*", "*", "kwargs", ")", "# prepare for writing pixel registers\r", "if", "not", "self", ".", "broadcast", ":", "self", ".", "set_global_register_value", "(", "\"Colpr_Mode\"", ",", "0", ")", "# write only to the addressed double-column\r", "self", ".", "set_global_register_value", "(", "\"Colpr_Addr\"", ",", "40", ")", "# ivalid address, grounded\r", "# Broadcasting ConfMode not necessary, writing registers is also possible in RunMode\r", "# commands.extend(self.get_commands(\"ConfMode\", ChipID=8)) # set all chips to conf mode to receive commands#\r", "# set all other chips to invalid addresses, to make broadcasting of WrRegister command possible\r", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Colpr_Mode\"", ",", "\"Colpr_Addr\"", "]", ",", "ChipID", "=", "8", ")", ")", "# braodcast\r", "self", ".", "set_global_register_value", "(", "\"S0\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"S1\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"SR_Clr\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"CalEn\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"DIGHITIN_SEL\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"GateHitOr\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"ReadErrorReq\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"StopClkPulse\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"SR_Clock\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"Efuse_Sense\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"HITLD_IN\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"Colpr_Mode\"", ",", "3", "if", "same_mask_for_all_dc", "else", "0", ")", "# write only the addressed double-column\r", "self", ".", "set_global_register_value", "(", "\"Colpr_Addr\"", ",", "0", ")", "if", "self", ".", "fei4a", ":", "self", ".", "set_global_register_value", "(", "\"ReadSkipped\"", ",", "0", ")", "elif", "self", ".", "fei4b", ":", "self", ".", "set_global_register_value", "(", "\"SR_Read\"", ",", "0", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "registers", ")", ")", "if", "joint_write", ":", "pxstrobes", "=", "0", "first_read", "=", "True", "do_latch", "=", "False", "for", "register_object", "in", "register_objects", ":", "if", "register_object", "[", "'bitlength'", "]", "!=", "1", ":", "raise", "ValueError", "(", "'Pixel register %s: joint write not supported for pixel DACs'", "%", "register_object", "[", "'name'", "]", ")", "pxstrobe", "=", "register_object", "[", "'pxstrobe'", "]", "if", "not", "isinstance", "(", "pxstrobe", ",", "basestring", ")", ":", "do_latch", "=", "True", "pxstrobes", "+=", "2", "**", "register_object", "[", "'pxstrobe'", "]", "if", "first_read", ":", "pixel_reg_value", "=", "register_object", "[", "'value'", "]", "first_read", "=", "False", "else", ":", "if", "np", ".", "array_equal", "(", "pixel_reg_value", ",", "register_object", "[", "'value'", "]", ")", ":", "pixel_reg_value", "=", "register_object", "[", "'value'", "]", "else", ":", "raise", "ValueError", "(", "'Pixel register %s: joint write not supported, pixel register values must be equal'", "%", "register_object", "[", "'name'", "]", ")", "if", "do_latch", ":", "self", ".", "set_global_register_value", "(", "\"Latch_En\"", ",", "1", ")", "else", ":", "self", ".", "set_global_register_value", "(", "\"Latch_En\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"Pixel_Strobes\"", ",", "pxstrobes", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Pixel_Strobes\"", ",", "\"Latch_En\"", "]", ")", ")", "for", "dc_no", "in", "(", "dcs", "[", ":", "1", "]", "if", "same_mask_for_all_dc", "else", "dcs", ")", ":", "self", ".", "set_global_register_value", "(", "\"Colpr_Addr\"", ",", "dc_no", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Colpr_Addr\"", "]", ")", ")", "register_bitset", "=", "self", ".", "get_pixel_register_bitset", "(", "register_objects", "[", "0", "]", ",", "0", ",", "dc_no", ")", "commands", ".", "extend", "(", "[", "self", ".", "build_command", "(", "command_name", ",", "PixelData", "=", "register_bitset", ",", "ChipID", "=", "8", ",", "*", "*", "kwargs", ")", "]", ")", "# broadcast\r", "if", "do_latch", ":", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"GlobalPulse\"", ",", "Width", "=", "0", ")", ")", "else", ":", "for", "register_object", "in", "register_objects", ":", "pxstrobe", "=", "register_object", "[", "'pxstrobe'", "]", "if", "isinstance", "(", "pxstrobe", ",", "basestring", ")", ":", "do_latch", "=", "False", "self", ".", "set_global_register_value", "(", "\"Pixel_Strobes\"", ",", "0", ")", "# no latch\r", "self", ".", "set_global_register_value", "(", "\"Latch_En\"", ",", "0", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Pixel_Strobes\"", ",", "\"Latch_En\"", "]", ")", ")", "else", ":", "do_latch", "=", "True", "self", ".", "set_global_register_value", "(", "\"Latch_En\"", ",", "1", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Latch_En\"", "]", ")", ")", "bitlength", "=", "register_object", "[", "'bitlength'", "]", "for", "bit_no", ",", "pxstrobe_bit_no", "in", "(", "enumerate", "(", "range", "(", "bitlength", ")", ")", "if", "(", "register_object", "[", "'littleendian'", "]", "is", "False", ")", "else", "enumerate", "(", "reversed", "(", "range", "(", "bitlength", ")", ")", ")", ")", ":", "if", "do_latch", ":", "self", ".", "set_global_register_value", "(", "\"Pixel_Strobes\"", ",", "2", "**", "(", "pxstrobe", "+", "bit_no", ")", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Pixel_Strobes\"", "]", ")", ")", "for", "dc_no", "in", "(", "dcs", "[", ":", "1", "]", "if", "same_mask_for_all_dc", "else", "dcs", ")", ":", "self", ".", "set_global_register_value", "(", "\"Colpr_Addr\"", ",", "dc_no", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Colpr_Addr\"", "]", ")", ")", "register_bitset", "=", "self", ".", "get_pixel_register_bitset", "(", "register_object", ",", "pxstrobe_bit_no", ",", "dc_no", ")", "commands", ".", "extend", "(", "[", "self", ".", "build_command", "(", "command_name", ",", "PixelData", "=", "register_bitset", ",", "ChipID", "=", "8", ",", "*", "*", "kwargs", ")", "]", ")", "# broadcast\r", "if", "do_latch", ":", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"GlobalPulse\"", ",", "Width", "=", "0", ")", ")", "self", ".", "restore", "(", "pixel_register", "=", "False", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "registers", ")", ")", "elif", "command_name", "==", "\"RdFrontEnd\"", ":", "registers", "=", "[", "\"Conf_AddrEnable\"", ",", "\"S0\"", ",", "\"S1\"", ",", "\"SR_Clr\"", ",", "\"CalEn\"", ",", "\"DIGHITIN_SEL\"", ",", "\"GateHitOr\"", ",", "\"ReadErrorReq\"", ",", "\"StopClkPulse\"", ",", "\"SR_Clock\"", ",", "\"Efuse_Sense\"", ",", "\"HITLD_IN\"", ",", "\"Colpr_Mode\"", ",", "\"Colpr_Addr\"", ",", "\"Pixel_Strobes\"", ",", "\"Latch_En\"", "]", "if", "self", ".", "fei4a", ":", "registers", ".", "append", "(", "\"ReadSkipped\"", ")", "elif", "self", ".", "fei4b", ":", "registers", ".", "append", "(", "\"SR_Read\"", ")", "self", ".", "create_restore_point", "(", ")", "dcs", "=", "kwargs", ".", "pop", "(", "\"dcs\"", ",", "range", "(", "40", ")", ")", "# set the double columns to latch\r", "# in case of empty list\r", "if", "not", "dcs", ":", "dcs", "=", "range", "(", "40", ")", "register_objects", "=", "self", ".", "get_pixel_register_objects", "(", "*", "*", "kwargs", ")", "self", ".", "set_global_register_value", "(", "'Conf_AddrEnable'", ",", "1", ")", "self", ".", "set_global_register_value", "(", "\"S0\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"S1\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"SR_Clr\"", ",", "0", ")", "if", "self", ".", "fei4b", ":", "self", ".", "set_global_register_value", "(", "\"SR_Read\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"CalEn\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"DIGHITIN_SEL\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"GateHitOr\"", ",", "0", ")", "if", "self", ".", "fei4a", ":", "self", ".", "set_global_register_value", "(", "\"ReadSkipped\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"ReadErrorReq\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"StopClkPulse\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"SR_Clock\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"Efuse_Sense\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"HITLD_IN\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"Colpr_Mode\"", ",", "0", ")", "# write only the addressed double-column\r", "self", ".", "set_global_register_value", "(", "\"Colpr_Addr\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"Latch_En\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"Pixel_Strobes\"", ",", "0", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "registers", ")", ")", "for", "index", ",", "register_object", "in", "enumerate", "(", "register_objects", ")", ":", "# make sure that EnableDigInj is first read back, because it is not latched\r", "if", "register_object", "[", "'name'", "]", "==", "'EnableDigInj'", ":", "register_objects", "[", "0", "]", ",", "register_objects", "[", "index", "]", "=", "register_objects", "[", "index", "]", ",", "register_objects", "[", "0", "]", "break", "for", "register_object", "in", "register_objects", ":", "pxstrobe", "=", "register_object", "[", "'pxstrobe'", "]", "bitlength", "=", "register_object", "[", "'bitlength'", "]", "for", "pxstrobe_bit_no", "in", "range", "(", "bitlength", ")", ":", "logging", ".", "debug", "(", "'Pixel Register %s Bit %d'", ",", "register_object", "[", "'name'", "]", ",", "pxstrobe_bit_no", ")", "do_latch", "=", "True", "try", ":", "self", ".", "set_global_register_value", "(", "\"Pixel_Strobes\"", ",", "2", "**", "(", "pxstrobe", "+", "pxstrobe_bit_no", ")", ")", "except", "TypeError", ":", "# thrown for not latched digInjection\r", "self", ".", "set_global_register_value", "(", "\"Pixel_Strobes\"", ",", "0", ")", "# do not latch\r", "do_latch", "=", "False", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Pixel_Strobes\"", "]", ")", ")", "for", "dc_no", "in", "dcs", ":", "self", ".", "set_global_register_value", "(", "\"Colpr_Addr\"", ",", "dc_no", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"Colpr_Addr\"", "]", ")", ")", "if", "do_latch", "is", "True", ":", "self", ".", "set_global_register_value", "(", "\"S0\"", ",", "1", ")", "self", ".", "set_global_register_value", "(", "\"S1\"", ",", "1", ")", "self", ".", "set_global_register_value", "(", "\"SR_Clock\"", ",", "1", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"S0\"", ",", "\"S1\"", ",", "\"SR_Clock\"", "]", ")", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"GlobalPulse\"", ",", "Width", "=", "0", ")", ")", "self", ".", "set_global_register_value", "(", "\"S0\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"S1\"", ",", "0", ")", "self", ".", "set_global_register_value", "(", "\"SR_Clock\"", ",", "0", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"S0\"", ",", "\"S1\"", ",", "\"SR_Clock\"", "]", ")", ")", "register_bitset", "=", "self", ".", "get_pixel_register_bitset", "(", "register_object", ",", "pxstrobe_bit_no", "if", "(", "register_object", "[", "'littleendian'", "]", "is", "False", ")", "else", "register_object", "[", "'bitlength'", "]", "-", "pxstrobe_bit_no", "-", "1", ",", "dc_no", ")", "if", "self", ".", "fei4b", ":", "self", ".", "set_global_register_value", "(", "\"SR_Read\"", ",", "1", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"SR_Read\"", "]", ")", ")", "commands", ".", "extend", "(", "[", "self", ".", "build_command", "(", "\"WrFrontEnd\"", ",", "PixelData", "=", "register_bitset", ",", "ChipID", "=", "chip_id", ")", "]", ")", "if", "self", ".", "fei4b", ":", "self", ".", "set_global_register_value", "(", "\"SR_Read\"", ",", "0", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "[", "\"SR_Read\"", "]", ")", ")", "self", ".", "restore", "(", "pixel_register", "=", "False", ")", "commands", ".", "extend", "(", "self", ".", "get_commands", "(", "\"WrRegister\"", ",", "name", "=", "registers", ")", ")", "else", ":", "commands", ".", "append", "(", "self", ".", "build_command", "(", "command_name", ",", "ChipID", "=", "chip_id", ",", "*", "*", "kwargs", ")", ")", "return", "commands" ]
get fe_command from command name and keyword arguments wrapper for build_commands() implements FEI4 specific behavior
[ "get", "fe_command", "from", "command", "name", "and", "keyword", "arguments", "wrapper", "for", "build_commands", "()", "implements", "FEI4", "specific", "behavior" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L205-L410
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.build_command
def build_command(self, command_name, **kwargs): """build command from command_name and keyword values Returns ------- command_bitvector : list List of bitarrays. Usage ----- Receives: command name as defined inside xml file, key-value-pairs as defined inside bit stream filed for each command """ # command_name = command_name.lower() command_bitvector = bitarray(0, endian='little') if command_name not in self.commands: raise ValueError('Unknown command %s' % command_name) command_object = self.commands[command_name] command_parts = re.split(r'\s*[+]\s*', command_object['bitstream']) # for index, part in enumerate(command_parts, start = 1): # loop over command parts for part in command_parts: # loop over command parts try: command_part_object = self.commands[part] except KeyError: command_part_object = None if command_part_object and 'bitstream'in command_part_object: # command parts of defined content and length, e.g. Slow, ... if string_is_binary(command_part_object['bitstream']): command_bitvector += bitarray(command_part_object['bitstream'], endian='little') else: command_bitvector += self.build_command(part, **kwargs) elif command_part_object: # Command parts with any content of defined length, e.g. ChipID, Address, ... if part in kwargs: value = kwargs[part] else: raise ValueError('Value of command part %s not given' % part) try: command_bitvector += value except TypeError: # value is no bitarray if string_is_binary(value): value = int(value, 2) try: command_bitvector += bitarray_from_value(value=int(value), size=command_part_object['bitlength'], fmt='I') except Exception: raise TypeError("Type of value not supported") elif string_is_binary(part): command_bitvector += bitarray(part, endian='little') # elif part in kwargs.keys(): # command_bitvector += kwargs[command_name] else: raise ValueError("Cannot process command part %s" % part) if command_bitvector.length() != command_object['bitlength']: raise ValueError("Command has unexpected length") if command_bitvector.length() == 0: raise ValueError("Command has length 0") return command_bitvector
python
def build_command(self, command_name, **kwargs): """build command from command_name and keyword values Returns ------- command_bitvector : list List of bitarrays. Usage ----- Receives: command name as defined inside xml file, key-value-pairs as defined inside bit stream filed for each command """ # command_name = command_name.lower() command_bitvector = bitarray(0, endian='little') if command_name not in self.commands: raise ValueError('Unknown command %s' % command_name) command_object = self.commands[command_name] command_parts = re.split(r'\s*[+]\s*', command_object['bitstream']) # for index, part in enumerate(command_parts, start = 1): # loop over command parts for part in command_parts: # loop over command parts try: command_part_object = self.commands[part] except KeyError: command_part_object = None if command_part_object and 'bitstream'in command_part_object: # command parts of defined content and length, e.g. Slow, ... if string_is_binary(command_part_object['bitstream']): command_bitvector += bitarray(command_part_object['bitstream'], endian='little') else: command_bitvector += self.build_command(part, **kwargs) elif command_part_object: # Command parts with any content of defined length, e.g. ChipID, Address, ... if part in kwargs: value = kwargs[part] else: raise ValueError('Value of command part %s not given' % part) try: command_bitvector += value except TypeError: # value is no bitarray if string_is_binary(value): value = int(value, 2) try: command_bitvector += bitarray_from_value(value=int(value), size=command_part_object['bitlength'], fmt='I') except Exception: raise TypeError("Type of value not supported") elif string_is_binary(part): command_bitvector += bitarray(part, endian='little') # elif part in kwargs.keys(): # command_bitvector += kwargs[command_name] else: raise ValueError("Cannot process command part %s" % part) if command_bitvector.length() != command_object['bitlength']: raise ValueError("Command has unexpected length") if command_bitvector.length() == 0: raise ValueError("Command has length 0") return command_bitvector
[ "def", "build_command", "(", "self", ",", "command_name", ",", "*", "*", "kwargs", ")", ":", "# command_name = command_name.lower()\r", "command_bitvector", "=", "bitarray", "(", "0", ",", "endian", "=", "'little'", ")", "if", "command_name", "not", "in", "self", ".", "commands", ":", "raise", "ValueError", "(", "'Unknown command %s'", "%", "command_name", ")", "command_object", "=", "self", ".", "commands", "[", "command_name", "]", "command_parts", "=", "re", ".", "split", "(", "r'\\s*[+]\\s*'", ",", "command_object", "[", "'bitstream'", "]", ")", "# for index, part in enumerate(command_parts, start = 1): # loop over command parts\r", "for", "part", "in", "command_parts", ":", "# loop over command parts\r", "try", ":", "command_part_object", "=", "self", ".", "commands", "[", "part", "]", "except", "KeyError", ":", "command_part_object", "=", "None", "if", "command_part_object", "and", "'bitstream'", "in", "command_part_object", ":", "# command parts of defined content and length, e.g. Slow, ...\r", "if", "string_is_binary", "(", "command_part_object", "[", "'bitstream'", "]", ")", ":", "command_bitvector", "+=", "bitarray", "(", "command_part_object", "[", "'bitstream'", "]", ",", "endian", "=", "'little'", ")", "else", ":", "command_bitvector", "+=", "self", ".", "build_command", "(", "part", ",", "*", "*", "kwargs", ")", "elif", "command_part_object", ":", "# Command parts with any content of defined length, e.g. ChipID, Address, ...\r", "if", "part", "in", "kwargs", ":", "value", "=", "kwargs", "[", "part", "]", "else", ":", "raise", "ValueError", "(", "'Value of command part %s not given'", "%", "part", ")", "try", ":", "command_bitvector", "+=", "value", "except", "TypeError", ":", "# value is no bitarray\r", "if", "string_is_binary", "(", "value", ")", ":", "value", "=", "int", "(", "value", ",", "2", ")", "try", ":", "command_bitvector", "+=", "bitarray_from_value", "(", "value", "=", "int", "(", "value", ")", ",", "size", "=", "command_part_object", "[", "'bitlength'", "]", ",", "fmt", "=", "'I'", ")", "except", "Exception", ":", "raise", "TypeError", "(", "\"Type of value not supported\"", ")", "elif", "string_is_binary", "(", "part", ")", ":", "command_bitvector", "+=", "bitarray", "(", "part", ",", "endian", "=", "'little'", ")", "# elif part in kwargs.keys():\r", "# command_bitvector += kwargs[command_name]\r", "else", ":", "raise", "ValueError", "(", "\"Cannot process command part %s\"", "%", "part", ")", "if", "command_bitvector", ".", "length", "(", ")", "!=", "command_object", "[", "'bitlength'", "]", ":", "raise", "ValueError", "(", "\"Command has unexpected length\"", ")", "if", "command_bitvector", ".", "length", "(", ")", "==", "0", ":", "raise", "ValueError", "(", "\"Command has length 0\"", ")", "return", "command_bitvector" ]
build command from command_name and keyword values Returns ------- command_bitvector : list List of bitarrays. Usage ----- Receives: command name as defined inside xml file, key-value-pairs as defined inside bit stream filed for each command
[ "build", "command", "from", "command_name", "and", "keyword", "values", "Returns", "-------", "command_bitvector", ":", "list", "List", "of", "bitarrays", ".", "Usage", "-----", "Receives", ":", "command", "name", "as", "defined", "inside", "xml", "file", "key", "-", "value", "-", "pairs", "as", "defined", "inside", "bit", "stream", "filed", "for", "each", "command" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L412-L465
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.get_global_register_attributes
def get_global_register_attributes(self, register_attribute, do_sort=True, **kwargs): """Calculating register numbers from register names. Usage: get_global_register_attributes("attribute_name", name = [regname_1, regname_2, ...], addresses = 2) Receives: attribute name to be returned, dictionaries (kwargs) of register attributes and values for making cuts Returns: list of attribute values that matches dictionaries of attributes """ # speed up of the most often used keyword name try: names = iterable(kwargs.pop('name')) except KeyError: register_attribute_list = [] else: register_attribute_list = [self.global_registers[reg][register_attribute] for reg in names] for keyword in kwargs.keys(): allowed_values = iterable(kwargs[keyword]) try: register_attribute_list.extend(map(itemgetter(register_attribute), filter(lambda global_register: set(iterable(global_register[keyword])).intersection(allowed_values), self.global_registers.itervalues()))) except AttributeError: pass if not register_attribute_list and filter(None, kwargs.itervalues()): raise ValueError('Global register attribute %s empty' % register_attribute) if do_sort: return sorted(set(flatten_iterable(register_attribute_list))) else: return flatten_iterable(register_attribute_list)
python
def get_global_register_attributes(self, register_attribute, do_sort=True, **kwargs): """Calculating register numbers from register names. Usage: get_global_register_attributes("attribute_name", name = [regname_1, regname_2, ...], addresses = 2) Receives: attribute name to be returned, dictionaries (kwargs) of register attributes and values for making cuts Returns: list of attribute values that matches dictionaries of attributes """ # speed up of the most often used keyword name try: names = iterable(kwargs.pop('name')) except KeyError: register_attribute_list = [] else: register_attribute_list = [self.global_registers[reg][register_attribute] for reg in names] for keyword in kwargs.keys(): allowed_values = iterable(kwargs[keyword]) try: register_attribute_list.extend(map(itemgetter(register_attribute), filter(lambda global_register: set(iterable(global_register[keyword])).intersection(allowed_values), self.global_registers.itervalues()))) except AttributeError: pass if not register_attribute_list and filter(None, kwargs.itervalues()): raise ValueError('Global register attribute %s empty' % register_attribute) if do_sort: return sorted(set(flatten_iterable(register_attribute_list))) else: return flatten_iterable(register_attribute_list)
[ "def", "get_global_register_attributes", "(", "self", ",", "register_attribute", ",", "do_sort", "=", "True", ",", "*", "*", "kwargs", ")", ":", "# speed up of the most often used keyword name\r", "try", ":", "names", "=", "iterable", "(", "kwargs", ".", "pop", "(", "'name'", ")", ")", "except", "KeyError", ":", "register_attribute_list", "=", "[", "]", "else", ":", "register_attribute_list", "=", "[", "self", ".", "global_registers", "[", "reg", "]", "[", "register_attribute", "]", "for", "reg", "in", "names", "]", "for", "keyword", "in", "kwargs", ".", "keys", "(", ")", ":", "allowed_values", "=", "iterable", "(", "kwargs", "[", "keyword", "]", ")", "try", ":", "register_attribute_list", ".", "extend", "(", "map", "(", "itemgetter", "(", "register_attribute", ")", ",", "filter", "(", "lambda", "global_register", ":", "set", "(", "iterable", "(", "global_register", "[", "keyword", "]", ")", ")", ".", "intersection", "(", "allowed_values", ")", ",", "self", ".", "global_registers", ".", "itervalues", "(", ")", ")", ")", ")", "except", "AttributeError", ":", "pass", "if", "not", "register_attribute_list", "and", "filter", "(", "None", ",", "kwargs", ".", "itervalues", "(", ")", ")", ":", "raise", "ValueError", "(", "'Global register attribute %s empty'", "%", "register_attribute", ")", "if", "do_sort", ":", "return", "sorted", "(", "set", "(", "flatten_iterable", "(", "register_attribute_list", ")", ")", ")", "else", ":", "return", "flatten_iterable", "(", "register_attribute_list", ")" ]
Calculating register numbers from register names. Usage: get_global_register_attributes("attribute_name", name = [regname_1, regname_2, ...], addresses = 2) Receives: attribute name to be returned, dictionaries (kwargs) of register attributes and values for making cuts Returns: list of attribute values that matches dictionaries of attributes
[ "Calculating", "register", "numbers", "from", "register", "names", ".", "Usage", ":", "get_global_register_attributes", "(", "attribute_name", "name", "=", "[", "regname_1", "regname_2", "...", "]", "addresses", "=", "2", ")", "Receives", ":", "attribute", "name", "to", "be", "returned", "dictionaries", "(", "kwargs", ")", "of", "register", "attributes", "and", "values", "for", "making", "cuts", "Returns", ":", "list", "of", "attribute", "values", "that", "matches", "dictionaries", "of", "attributes" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L467-L493
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.get_global_register_objects
def get_global_register_objects(self, do_sort=None, reverse=False, **kwargs): """Generate register objects (list) from register name list Usage: get_global_register_objects(name = ["Amp2Vbn", "GateHitOr", "DisableColumnCnfg"], address = [2, 3]) Receives: keyword lists of register names, addresses,... for making cuts Returns: list of register objects """ # speed up of the most often used keyword name try: names = iterable(kwargs.pop('name')) except KeyError: register_objects = [] else: register_objects = [self.global_registers[reg] for reg in names] for keyword in kwargs.iterkeys(): allowed_values = iterable(kwargs[keyword]) register_objects.extend(filter(lambda global_register: set(iterable(global_register[keyword])).intersection(allowed_values), self.global_registers.itervalues())) if not register_objects and filter(None, kwargs.itervalues()): raise ValueError('Global register objects empty') if do_sort: return sorted(register_objects, key=itemgetter(*do_sort), reverse=reverse) else: return register_objects
python
def get_global_register_objects(self, do_sort=None, reverse=False, **kwargs): """Generate register objects (list) from register name list Usage: get_global_register_objects(name = ["Amp2Vbn", "GateHitOr", "DisableColumnCnfg"], address = [2, 3]) Receives: keyword lists of register names, addresses,... for making cuts Returns: list of register objects """ # speed up of the most often used keyword name try: names = iterable(kwargs.pop('name')) except KeyError: register_objects = [] else: register_objects = [self.global_registers[reg] for reg in names] for keyword in kwargs.iterkeys(): allowed_values = iterable(kwargs[keyword]) register_objects.extend(filter(lambda global_register: set(iterable(global_register[keyword])).intersection(allowed_values), self.global_registers.itervalues())) if not register_objects and filter(None, kwargs.itervalues()): raise ValueError('Global register objects empty') if do_sort: return sorted(register_objects, key=itemgetter(*do_sort), reverse=reverse) else: return register_objects
[ "def", "get_global_register_objects", "(", "self", ",", "do_sort", "=", "None", ",", "reverse", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# speed up of the most often used keyword name\r", "try", ":", "names", "=", "iterable", "(", "kwargs", ".", "pop", "(", "'name'", ")", ")", "except", "KeyError", ":", "register_objects", "=", "[", "]", "else", ":", "register_objects", "=", "[", "self", ".", "global_registers", "[", "reg", "]", "for", "reg", "in", "names", "]", "for", "keyword", "in", "kwargs", ".", "iterkeys", "(", ")", ":", "allowed_values", "=", "iterable", "(", "kwargs", "[", "keyword", "]", ")", "register_objects", ".", "extend", "(", "filter", "(", "lambda", "global_register", ":", "set", "(", "iterable", "(", "global_register", "[", "keyword", "]", ")", ")", ".", "intersection", "(", "allowed_values", ")", ",", "self", ".", "global_registers", ".", "itervalues", "(", ")", ")", ")", "if", "not", "register_objects", "and", "filter", "(", "None", ",", "kwargs", ".", "itervalues", "(", ")", ")", ":", "raise", "ValueError", "(", "'Global register objects empty'", ")", "if", "do_sort", ":", "return", "sorted", "(", "register_objects", ",", "key", "=", "itemgetter", "(", "*", "do_sort", ")", ",", "reverse", "=", "reverse", ")", "else", ":", "return", "register_objects" ]
Generate register objects (list) from register name list Usage: get_global_register_objects(name = ["Amp2Vbn", "GateHitOr", "DisableColumnCnfg"], address = [2, 3]) Receives: keyword lists of register names, addresses,... for making cuts Returns: list of register objects
[ "Generate", "register", "objects", "(", "list", ")", "from", "register", "name", "list", "Usage", ":", "get_global_register_objects", "(", "name", "=", "[", "Amp2Vbn", "GateHitOr", "DisableColumnCnfg", "]", "address", "=", "[", "2", "3", "]", ")", "Receives", ":", "keyword", "lists", "of", "register", "names", "addresses", "...", "for", "making", "cuts", "Returns", ":", "list", "of", "register", "objects" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L495-L518
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.get_global_register_bitsets
def get_global_register_bitsets(self, register_addresses): # TOTO: add sorting """Calculating register bitsets from register addresses. Usage: get_global_register_bitsets([regaddress_1, regaddress_2, ...]) Receives: list of register addresses Returns: list of register bitsets """ register_bitsets = [] for register_address in register_addresses: register_objects = self.get_global_register_objects(addresses=register_address) register_bitset = bitarray(16, endian='little') # TODO remove hardcoded register size, see also below register_bitset.setall(0) register_littleendian = False for register_object in register_objects: if register_object['register_littleendian']: # check for register endianness register_littleendian = True if (16 * register_object['address'] + register_object['offset'] < 16 * (register_address + 1) and 16 * register_object['address'] + register_object['offset'] + register_object['bitlength'] > 16 * register_address): reg = bitarray_from_value(value=register_object['value'], size=register_object['bitlength']) if register_object['littleendian']: reg.reverse() # register_bitset[max(0, 16 * (register_object['address'] - register_address) + register_object['offset']):min(16, 16 * (register_object['address'] - register_address) + register_object['offset'] + register_object['bitlength'])] |= reg[max(0, 16 * (register_address - register_object['address']) - register_object['offset']):min(register_object['bitlength'], 16 * (register_address - register_object['address'] + 1) - register_object['offset'])] # [ bit(n) bit(n-1)... bit(0) ] register_bitset[max(0, 16 - 16 * (register_object['address'] - register_address) - register_object['offset'] - register_object['bitlength']):min(16, 16 - 16 * (register_object['address'] - register_address) - register_object['offset'])] |= reg[max(0, register_object['bitlength'] - 16 - 16 * (register_address - register_object['address']) + register_object['offset']):min(register_object['bitlength'], register_object['bitlength'] + 16 - 16 * (register_address - register_object['address'] + 1) + register_object['offset'])] # [ bit(0)... bit(n-1) bit(n) ] else: raise Exception("wrong register object") if register_littleendian: register_bitset.reverse() register_bitsets.append(register_bitset) return register_bitsets
python
def get_global_register_bitsets(self, register_addresses): # TOTO: add sorting """Calculating register bitsets from register addresses. Usage: get_global_register_bitsets([regaddress_1, regaddress_2, ...]) Receives: list of register addresses Returns: list of register bitsets """ register_bitsets = [] for register_address in register_addresses: register_objects = self.get_global_register_objects(addresses=register_address) register_bitset = bitarray(16, endian='little') # TODO remove hardcoded register size, see also below register_bitset.setall(0) register_littleendian = False for register_object in register_objects: if register_object['register_littleendian']: # check for register endianness register_littleendian = True if (16 * register_object['address'] + register_object['offset'] < 16 * (register_address + 1) and 16 * register_object['address'] + register_object['offset'] + register_object['bitlength'] > 16 * register_address): reg = bitarray_from_value(value=register_object['value'], size=register_object['bitlength']) if register_object['littleendian']: reg.reverse() # register_bitset[max(0, 16 * (register_object['address'] - register_address) + register_object['offset']):min(16, 16 * (register_object['address'] - register_address) + register_object['offset'] + register_object['bitlength'])] |= reg[max(0, 16 * (register_address - register_object['address']) - register_object['offset']):min(register_object['bitlength'], 16 * (register_address - register_object['address'] + 1) - register_object['offset'])] # [ bit(n) bit(n-1)... bit(0) ] register_bitset[max(0, 16 - 16 * (register_object['address'] - register_address) - register_object['offset'] - register_object['bitlength']):min(16, 16 - 16 * (register_object['address'] - register_address) - register_object['offset'])] |= reg[max(0, register_object['bitlength'] - 16 - 16 * (register_address - register_object['address']) + register_object['offset']):min(register_object['bitlength'], register_object['bitlength'] + 16 - 16 * (register_address - register_object['address'] + 1) + register_object['offset'])] # [ bit(0)... bit(n-1) bit(n) ] else: raise Exception("wrong register object") if register_littleendian: register_bitset.reverse() register_bitsets.append(register_bitset) return register_bitsets
[ "def", "get_global_register_bitsets", "(", "self", ",", "register_addresses", ")", ":", "# TOTO: add sorting\r", "register_bitsets", "=", "[", "]", "for", "register_address", "in", "register_addresses", ":", "register_objects", "=", "self", ".", "get_global_register_objects", "(", "addresses", "=", "register_address", ")", "register_bitset", "=", "bitarray", "(", "16", ",", "endian", "=", "'little'", ")", "# TODO remove hardcoded register size, see also below\r", "register_bitset", ".", "setall", "(", "0", ")", "register_littleendian", "=", "False", "for", "register_object", "in", "register_objects", ":", "if", "register_object", "[", "'register_littleendian'", "]", ":", "# check for register endianness\r", "register_littleendian", "=", "True", "if", "(", "16", "*", "register_object", "[", "'address'", "]", "+", "register_object", "[", "'offset'", "]", "<", "16", "*", "(", "register_address", "+", "1", ")", "and", "16", "*", "register_object", "[", "'address'", "]", "+", "register_object", "[", "'offset'", "]", "+", "register_object", "[", "'bitlength'", "]", ">", "16", "*", "register_address", ")", ":", "reg", "=", "bitarray_from_value", "(", "value", "=", "register_object", "[", "'value'", "]", ",", "size", "=", "register_object", "[", "'bitlength'", "]", ")", "if", "register_object", "[", "'littleendian'", "]", ":", "reg", ".", "reverse", "(", ")", "# register_bitset[max(0, 16 * (register_object['address'] - register_address) + register_object['offset']):min(16, 16 * (register_object['address'] - register_address) + register_object['offset'] + register_object['bitlength'])] |= reg[max(0, 16 * (register_address - register_object['address']) - register_object['offset']):min(register_object['bitlength'], 16 * (register_address - register_object['address'] + 1) - register_object['offset'])] # [ bit(n) bit(n-1)... bit(0) ]\r", "register_bitset", "[", "max", "(", "0", ",", "16", "-", "16", "*", "(", "register_object", "[", "'address'", "]", "-", "register_address", ")", "-", "register_object", "[", "'offset'", "]", "-", "register_object", "[", "'bitlength'", "]", ")", ":", "min", "(", "16", ",", "16", "-", "16", "*", "(", "register_object", "[", "'address'", "]", "-", "register_address", ")", "-", "register_object", "[", "'offset'", "]", ")", "]", "|=", "reg", "[", "max", "(", "0", ",", "register_object", "[", "'bitlength'", "]", "-", "16", "-", "16", "*", "(", "register_address", "-", "register_object", "[", "'address'", "]", ")", "+", "register_object", "[", "'offset'", "]", ")", ":", "min", "(", "register_object", "[", "'bitlength'", "]", ",", "register_object", "[", "'bitlength'", "]", "+", "16", "-", "16", "*", "(", "register_address", "-", "register_object", "[", "'address'", "]", "+", "1", ")", "+", "register_object", "[", "'offset'", "]", ")", "]", "# [ bit(0)... bit(n-1) bit(n) ]\r", "else", ":", "raise", "Exception", "(", "\"wrong register object\"", ")", "if", "register_littleendian", ":", "register_bitset", ".", "reverse", "(", ")", "register_bitsets", ".", "append", "(", "register_bitset", ")", "return", "register_bitsets" ]
Calculating register bitsets from register addresses. Usage: get_global_register_bitsets([regaddress_1, regaddress_2, ...]) Receives: list of register addresses Returns: list of register bitsets
[ "Calculating", "register", "bitsets", "from", "register", "addresses", ".", "Usage", ":", "get_global_register_bitsets", "(", "[", "regaddress_1", "regaddress_2", "...", "]", ")", "Receives", ":", "list", "of", "register", "addresses", "Returns", ":", "list", "of", "register", "bitsets" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L520-L548
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.get_pixel_register_objects
def get_pixel_register_objects(self, do_sort=None, reverse=False, **kwargs): """Generate register objects (list) from register name list Usage: get_pixel_register_objects(name = ["TDAC", "FDAC"]) Receives: keyword lists of register names, addresses,... Returns: list of register objects """ # speed up of the most often used keyword name try: names = iterable(kwargs.pop('name')) except KeyError: register_objects = [] else: register_objects = [self.pixel_registers[reg] for reg in names] for keyword in kwargs.iterkeys(): allowed_values = iterable(kwargs[keyword]) register_objects.extend(filter(lambda pixel_register: pixel_register[keyword] in allowed_values, self.pixel_registers.itervalues())) if not register_objects and filter(None, kwargs.itervalues()): raise ValueError('Pixel register objects empty') if do_sort: return sorted(register_objects, key=itemgetter(*do_sort), reverse=reverse) else: return register_objects
python
def get_pixel_register_objects(self, do_sort=None, reverse=False, **kwargs): """Generate register objects (list) from register name list Usage: get_pixel_register_objects(name = ["TDAC", "FDAC"]) Receives: keyword lists of register names, addresses,... Returns: list of register objects """ # speed up of the most often used keyword name try: names = iterable(kwargs.pop('name')) except KeyError: register_objects = [] else: register_objects = [self.pixel_registers[reg] for reg in names] for keyword in kwargs.iterkeys(): allowed_values = iterable(kwargs[keyword]) register_objects.extend(filter(lambda pixel_register: pixel_register[keyword] in allowed_values, self.pixel_registers.itervalues())) if not register_objects and filter(None, kwargs.itervalues()): raise ValueError('Pixel register objects empty') if do_sort: return sorted(register_objects, key=itemgetter(*do_sort), reverse=reverse) else: return register_objects
[ "def", "get_pixel_register_objects", "(", "self", ",", "do_sort", "=", "None", ",", "reverse", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# speed up of the most often used keyword name\r", "try", ":", "names", "=", "iterable", "(", "kwargs", ".", "pop", "(", "'name'", ")", ")", "except", "KeyError", ":", "register_objects", "=", "[", "]", "else", ":", "register_objects", "=", "[", "self", ".", "pixel_registers", "[", "reg", "]", "for", "reg", "in", "names", "]", "for", "keyword", "in", "kwargs", ".", "iterkeys", "(", ")", ":", "allowed_values", "=", "iterable", "(", "kwargs", "[", "keyword", "]", ")", "register_objects", ".", "extend", "(", "filter", "(", "lambda", "pixel_register", ":", "pixel_register", "[", "keyword", "]", "in", "allowed_values", ",", "self", ".", "pixel_registers", ".", "itervalues", "(", ")", ")", ")", "if", "not", "register_objects", "and", "filter", "(", "None", ",", "kwargs", ".", "itervalues", "(", ")", ")", ":", "raise", "ValueError", "(", "'Pixel register objects empty'", ")", "if", "do_sort", ":", "return", "sorted", "(", "register_objects", ",", "key", "=", "itemgetter", "(", "*", "do_sort", ")", ",", "reverse", "=", "reverse", ")", "else", ":", "return", "register_objects" ]
Generate register objects (list) from register name list Usage: get_pixel_register_objects(name = ["TDAC", "FDAC"]) Receives: keyword lists of register names, addresses,... Returns: list of register objects
[ "Generate", "register", "objects", "(", "list", ")", "from", "register", "name", "list", "Usage", ":", "get_pixel_register_objects", "(", "name", "=", "[", "TDAC", "FDAC", "]", ")", "Receives", ":", "keyword", "lists", "of", "register", "names", "addresses", "...", "Returns", ":", "list", "of", "register", "objects" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L550-L573
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.get_pixel_register_bitset
def get_pixel_register_bitset(self, register_object, bit_no, dc_no): """Calculating pixel register bitsets from pixel register addresses. Usage: get_pixel_register_bitset(object, bit_number, double_column_number) Receives: register object, bit number, double column number Returns: double column bitset """ if not 0 <= dc_no < 40: raise ValueError("Pixel register %s: DC out of range" % register_object['name']) if not 0 <= bit_no < register_object['bitlength']: raise ValueError("Pixel register %s: bit number out of range" % register_object['name']) col0 = register_object['value'][dc_no * 2, :] sel0 = (2 ** bit_no == (col0 & 2 ** bit_no)) bv0 = bitarray(sel0.tolist(), endian='little') col1 = register_object['value'][dc_no * 2 + 1, :] sel1 = (2 ** bit_no == (col1 & 2 ** bit_no)) # sel1 = sel1.astype(numpy.uint8) # copy of array # sel1 = sel1.view(dtype=np.uint8) # in-place type conversion bv1 = bitarray(sel1.tolist(), endian='little') bv1.reverse() # shifted first # bv = bv1+bv0 return bv1 + bv0
python
def get_pixel_register_bitset(self, register_object, bit_no, dc_no): """Calculating pixel register bitsets from pixel register addresses. Usage: get_pixel_register_bitset(object, bit_number, double_column_number) Receives: register object, bit number, double column number Returns: double column bitset """ if not 0 <= dc_no < 40: raise ValueError("Pixel register %s: DC out of range" % register_object['name']) if not 0 <= bit_no < register_object['bitlength']: raise ValueError("Pixel register %s: bit number out of range" % register_object['name']) col0 = register_object['value'][dc_no * 2, :] sel0 = (2 ** bit_no == (col0 & 2 ** bit_no)) bv0 = bitarray(sel0.tolist(), endian='little') col1 = register_object['value'][dc_no * 2 + 1, :] sel1 = (2 ** bit_no == (col1 & 2 ** bit_no)) # sel1 = sel1.astype(numpy.uint8) # copy of array # sel1 = sel1.view(dtype=np.uint8) # in-place type conversion bv1 = bitarray(sel1.tolist(), endian='little') bv1.reverse() # shifted first # bv = bv1+bv0 return bv1 + bv0
[ "def", "get_pixel_register_bitset", "(", "self", ",", "register_object", ",", "bit_no", ",", "dc_no", ")", ":", "if", "not", "0", "<=", "dc_no", "<", "40", ":", "raise", "ValueError", "(", "\"Pixel register %s: DC out of range\"", "%", "register_object", "[", "'name'", "]", ")", "if", "not", "0", "<=", "bit_no", "<", "register_object", "[", "'bitlength'", "]", ":", "raise", "ValueError", "(", "\"Pixel register %s: bit number out of range\"", "%", "register_object", "[", "'name'", "]", ")", "col0", "=", "register_object", "[", "'value'", "]", "[", "dc_no", "*", "2", ",", ":", "]", "sel0", "=", "(", "2", "**", "bit_no", "==", "(", "col0", "&", "2", "**", "bit_no", ")", ")", "bv0", "=", "bitarray", "(", "sel0", ".", "tolist", "(", ")", ",", "endian", "=", "'little'", ")", "col1", "=", "register_object", "[", "'value'", "]", "[", "dc_no", "*", "2", "+", "1", ",", ":", "]", "sel1", "=", "(", "2", "**", "bit_no", "==", "(", "col1", "&", "2", "**", "bit_no", ")", ")", "# sel1 = sel1.astype(numpy.uint8) # copy of array\r", "# sel1 = sel1.view(dtype=np.uint8) # in-place type conversion\r", "bv1", "=", "bitarray", "(", "sel1", ".", "tolist", "(", ")", ",", "endian", "=", "'little'", ")", "bv1", ".", "reverse", "(", ")", "# shifted first\r", "# bv = bv1+bv0\r", "return", "bv1", "+", "bv0" ]
Calculating pixel register bitsets from pixel register addresses. Usage: get_pixel_register_bitset(object, bit_number, double_column_number) Receives: register object, bit number, double column number Returns: double column bitset
[ "Calculating", "pixel", "register", "bitsets", "from", "pixel", "register", "addresses", ".", "Usage", ":", "get_pixel_register_bitset", "(", "object", "bit_number", "double_column_number", ")", "Receives", ":", "register", "object", "bit", "number", "double", "column", "number", "Returns", ":", "double", "column", "bitset" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L575-L597
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.create_restore_point
def create_restore_point(self, name=None): '''Creating a configuration restore point. Parameters ---------- name : str Name of the restore point. If not given, a md5 hash will be generated. ''' if name is None: for i in iter(int, 1): name = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f') + '_' + str(i) try: self.config_state[name] except KeyError: break else: pass if name in self.config_state: raise ValueError('Restore point %s already exists' % name) self.config_state[name] = (copy.deepcopy(self.global_registers), copy.deepcopy(self.pixel_registers)) return name
python
def create_restore_point(self, name=None): '''Creating a configuration restore point. Parameters ---------- name : str Name of the restore point. If not given, a md5 hash will be generated. ''' if name is None: for i in iter(int, 1): name = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f') + '_' + str(i) try: self.config_state[name] except KeyError: break else: pass if name in self.config_state: raise ValueError('Restore point %s already exists' % name) self.config_state[name] = (copy.deepcopy(self.global_registers), copy.deepcopy(self.pixel_registers)) return name
[ "def", "create_restore_point", "(", "self", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "for", "i", "in", "iter", "(", "int", ",", "1", ")", ":", "name", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "'%Y%m%d%H%M%S%f'", ")", "+", "'_'", "+", "str", "(", "i", ")", "try", ":", "self", ".", "config_state", "[", "name", "]", "except", "KeyError", ":", "break", "else", ":", "pass", "if", "name", "in", "self", ".", "config_state", ":", "raise", "ValueError", "(", "'Restore point %s already exists'", "%", "name", ")", "self", ".", "config_state", "[", "name", "]", "=", "(", "copy", ".", "deepcopy", "(", "self", ".", "global_registers", ")", ",", "copy", ".", "deepcopy", "(", "self", ".", "pixel_registers", ")", ")", "return", "name" ]
Creating a configuration restore point. Parameters ---------- name : str Name of the restore point. If not given, a md5 hash will be generated.
[ "Creating", "a", "configuration", "restore", "point", ".", "Parameters", "----------", "name", ":", "str", "Name", "of", "the", "restore", "point", ".", "If", "not", "given", "a", "md5", "hash", "will", "be", "generated", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L607-L627
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.restore
def restore(self, name=None, keep=False, last=True, global_register=True, pixel_register=True): '''Restoring a configuration restore point. Parameters ---------- name : str Name of the restore point. If not given, a md5 hash will be generated. keep : bool Keeping restore point for later use. last : bool If name is not given, the latest restore point will be taken. global_register : bool Restore global register. pixel_register : bool Restore pixel register. ''' if name is None: if keep: name = next(reversed(self.config_state)) if last else next(iter(self.config_state)) value = self.config_state[name] else: name, value = self.config_state.popitem(last=last) else: value = self.config_state[name] if not keep: value = copy.deepcopy(value) # make a copy before deleting object del self.config_state[name] if global_register: self.global_registers = copy.deepcopy(value[0]) if pixel_register: self.pixel_registers = copy.deepcopy(value[1])
python
def restore(self, name=None, keep=False, last=True, global_register=True, pixel_register=True): '''Restoring a configuration restore point. Parameters ---------- name : str Name of the restore point. If not given, a md5 hash will be generated. keep : bool Keeping restore point for later use. last : bool If name is not given, the latest restore point will be taken. global_register : bool Restore global register. pixel_register : bool Restore pixel register. ''' if name is None: if keep: name = next(reversed(self.config_state)) if last else next(iter(self.config_state)) value = self.config_state[name] else: name, value = self.config_state.popitem(last=last) else: value = self.config_state[name] if not keep: value = copy.deepcopy(value) # make a copy before deleting object del self.config_state[name] if global_register: self.global_registers = copy.deepcopy(value[0]) if pixel_register: self.pixel_registers = copy.deepcopy(value[1])
[ "def", "restore", "(", "self", ",", "name", "=", "None", ",", "keep", "=", "False", ",", "last", "=", "True", ",", "global_register", "=", "True", ",", "pixel_register", "=", "True", ")", ":", "if", "name", "is", "None", ":", "if", "keep", ":", "name", "=", "next", "(", "reversed", "(", "self", ".", "config_state", ")", ")", "if", "last", "else", "next", "(", "iter", "(", "self", ".", "config_state", ")", ")", "value", "=", "self", ".", "config_state", "[", "name", "]", "else", ":", "name", ",", "value", "=", "self", ".", "config_state", ".", "popitem", "(", "last", "=", "last", ")", "else", ":", "value", "=", "self", ".", "config_state", "[", "name", "]", "if", "not", "keep", ":", "value", "=", "copy", ".", "deepcopy", "(", "value", ")", "# make a copy before deleting object\r", "del", "self", ".", "config_state", "[", "name", "]", "if", "global_register", ":", "self", ".", "global_registers", "=", "copy", ".", "deepcopy", "(", "value", "[", "0", "]", ")", "if", "pixel_register", ":", "self", ".", "pixel_registers", "=", "copy", ".", "deepcopy", "(", "value", "[", "1", "]", ")" ]
Restoring a configuration restore point. Parameters ---------- name : str Name of the restore point. If not given, a md5 hash will be generated. keep : bool Keeping restore point for later use. last : bool If name is not given, the latest restore point will be taken. global_register : bool Restore global register. pixel_register : bool Restore pixel register.
[ "Restoring", "a", "configuration", "restore", "point", ".", "Parameters", "----------", "name", ":", "str", "Name", "of", "the", "restore", "point", ".", "If", "not", "given", "a", "md5", "hash", "will", "be", "generated", ".", "keep", ":", "bool", "Keeping", "restore", "point", "for", "later", "use", ".", "last", ":", "bool", "If", "name", "is", "not", "given", "the", "latest", "restore", "point", "will", "be", "taken", ".", "global_register", ":", "bool", "Restore", "global", "register", ".", "pixel_register", ":", "bool", "Restore", "pixel", "register", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L629-L660
SiLab-Bonn/pyBAR
pybar/fei4/register.py
FEI4Register.clear_restore_points
def clear_restore_points(self, name=None): '''Deleting all/a configuration restore points/point. Parameters ---------- name : str Name of the restore point to be deleted. If not given, all restore points will be deleted. ''' if name is None: self.config_state.clear() else: del self.config_state[name]
python
def clear_restore_points(self, name=None): '''Deleting all/a configuration restore points/point. Parameters ---------- name : str Name of the restore point to be deleted. If not given, all restore points will be deleted. ''' if name is None: self.config_state.clear() else: del self.config_state[name]
[ "def", "clear_restore_points", "(", "self", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "self", ".", "config_state", ".", "clear", "(", ")", "else", ":", "del", "self", ".", "config_state", "[", "name", "]" ]
Deleting all/a configuration restore points/point. Parameters ---------- name : str Name of the restore point to be deleted. If not given, all restore points will be deleted.
[ "Deleting", "all", "/", "a", "configuration", "restore", "points", "/", "point", ".", "Parameters", "----------", "name", ":", "str", "Name", "of", "the", "restore", "point", "to", "be", "deleted", ".", "If", "not", "given", "all", "restore", "points", "will", "be", "deleted", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/fei4/register.py#L662-L673
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
save_configuration_dict
def save_configuration_dict(h5_file, configuation_name, configuration, **kwargs): '''Stores any configuration dictionary to HDF5 file. Parameters ---------- h5_file : string, file Filename of the HDF5 configuration file or file object. configuation_name : str Configuration name. Will be used for table name. configuration : dict Configuration dictionary. ''' def save_conf(): try: h5_file.remove_node(h5_file.root.configuration, name=configuation_name) except tb.NodeError: pass try: configuration_group = h5_file.create_group(h5_file.root, "configuration") except tb.NodeError: configuration_group = h5_file.root.configuration scan_param_table = h5_file.create_table(configuration_group, name=configuation_name, description=NameValue, title=configuation_name) row_scan_param = scan_param_table.row for key, value in dict.iteritems(configuration): row_scan_param['name'] = key row_scan_param['value'] = str(value) row_scan_param.append() scan_param_table.flush() if isinstance(h5_file, tb.file.File): save_conf() else: if os.path.splitext(h5_file)[1].strip().lower() != ".h5": h5_file = os.path.splitext(h5_file)[0] + ".h5" with tb.open_file(h5_file, mode="a", title='', **kwargs) as h5_file: save_conf()
python
def save_configuration_dict(h5_file, configuation_name, configuration, **kwargs): '''Stores any configuration dictionary to HDF5 file. Parameters ---------- h5_file : string, file Filename of the HDF5 configuration file or file object. configuation_name : str Configuration name. Will be used for table name. configuration : dict Configuration dictionary. ''' def save_conf(): try: h5_file.remove_node(h5_file.root.configuration, name=configuation_name) except tb.NodeError: pass try: configuration_group = h5_file.create_group(h5_file.root, "configuration") except tb.NodeError: configuration_group = h5_file.root.configuration scan_param_table = h5_file.create_table(configuration_group, name=configuation_name, description=NameValue, title=configuation_name) row_scan_param = scan_param_table.row for key, value in dict.iteritems(configuration): row_scan_param['name'] = key row_scan_param['value'] = str(value) row_scan_param.append() scan_param_table.flush() if isinstance(h5_file, tb.file.File): save_conf() else: if os.path.splitext(h5_file)[1].strip().lower() != ".h5": h5_file = os.path.splitext(h5_file)[0] + ".h5" with tb.open_file(h5_file, mode="a", title='', **kwargs) as h5_file: save_conf()
[ "def", "save_configuration_dict", "(", "h5_file", ",", "configuation_name", ",", "configuration", ",", "*", "*", "kwargs", ")", ":", "def", "save_conf", "(", ")", ":", "try", ":", "h5_file", ".", "remove_node", "(", "h5_file", ".", "root", ".", "configuration", ",", "name", "=", "configuation_name", ")", "except", "tb", ".", "NodeError", ":", "pass", "try", ":", "configuration_group", "=", "h5_file", ".", "create_group", "(", "h5_file", ".", "root", ",", "\"configuration\"", ")", "except", "tb", ".", "NodeError", ":", "configuration_group", "=", "h5_file", ".", "root", ".", "configuration", "scan_param_table", "=", "h5_file", ".", "create_table", "(", "configuration_group", ",", "name", "=", "configuation_name", ",", "description", "=", "NameValue", ",", "title", "=", "configuation_name", ")", "row_scan_param", "=", "scan_param_table", ".", "row", "for", "key", ",", "value", "in", "dict", ".", "iteritems", "(", "configuration", ")", ":", "row_scan_param", "[", "'name'", "]", "=", "key", "row_scan_param", "[", "'value'", "]", "=", "str", "(", "value", ")", "row_scan_param", ".", "append", "(", ")", "scan_param_table", ".", "flush", "(", ")", "if", "isinstance", "(", "h5_file", ",", "tb", ".", "file", ".", "File", ")", ":", "save_conf", "(", ")", "else", ":", "if", "os", ".", "path", ".", "splitext", "(", "h5_file", ")", "[", "1", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "!=", "\".h5\"", ":", "h5_file", "=", "os", ".", "path", ".", "splitext", "(", "h5_file", ")", "[", "0", "]", "+", "\".h5\"", "with", "tb", ".", "open_file", "(", "h5_file", ",", "mode", "=", "\"a\"", ",", "title", "=", "''", ",", "*", "*", "kwargs", ")", "as", "h5_file", ":", "save_conf", "(", ")" ]
Stores any configuration dictionary to HDF5 file. Parameters ---------- h5_file : string, file Filename of the HDF5 configuration file or file object. configuation_name : str Configuration name. Will be used for table name. configuration : dict Configuration dictionary.
[ "Stores", "any", "configuration", "dictionary", "to", "HDF5", "file", ".", "Parameters", "----------", "h5_file", ":", "string", "file", "Filename", "of", "the", "HDF5", "configuration", "file", "or", "file", "object", ".", "configuation_name", ":", "str", "Configuration", "name", ".", "Will", "be", "used", "for", "table", "name", ".", "configuration", ":", "dict", "Configuration", "dictionary", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L13-L49
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
convert_data_array
def convert_data_array(array, filter_func=None, converter_func=None): # TODO: add copy parameter, otherwise in-place '''Filter and convert raw data numpy array (numpy.ndarray). Parameters ---------- array : numpy.array Raw data array. filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- data_array : numpy.array Data numpy array of specified dimension (converter_func) and content (filter_func) ''' # if filter_func != None: # if not hasattr(filter_func, '__call__'): # raise ValueError('Filter is not callable') if filter_func: array = array[filter_func(array)] # if converter_func != None: # if not hasattr(converter_func, '__call__'): # raise ValueError('Converter is not callable') if converter_func: array = converter_func(array) return array
python
def convert_data_array(array, filter_func=None, converter_func=None): # TODO: add copy parameter, otherwise in-place '''Filter and convert raw data numpy array (numpy.ndarray). Parameters ---------- array : numpy.array Raw data array. filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- data_array : numpy.array Data numpy array of specified dimension (converter_func) and content (filter_func) ''' # if filter_func != None: # if not hasattr(filter_func, '__call__'): # raise ValueError('Filter is not callable') if filter_func: array = array[filter_func(array)] # if converter_func != None: # if not hasattr(converter_func, '__call__'): # raise ValueError('Converter is not callable') if converter_func: array = converter_func(array) return array
[ "def", "convert_data_array", "(", "array", ",", "filter_func", "=", "None", ",", "converter_func", "=", "None", ")", ":", "# TODO: add copy parameter, otherwise in-place\r", "# if filter_func != None:\r", "# if not hasattr(filter_func, '__call__'):\r", "# raise ValueError('Filter is not callable')\r", "if", "filter_func", ":", "array", "=", "array", "[", "filter_func", "(", "array", ")", "]", "# if converter_func != None:\r", "# if not hasattr(converter_func, '__call__'):\r", "# raise ValueError('Converter is not callable')\r", "if", "converter_func", ":", "array", "=", "converter_func", "(", "array", ")", "return", "array" ]
Filter and convert raw data numpy array (numpy.ndarray). Parameters ---------- array : numpy.array Raw data array. filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- data_array : numpy.array Data numpy array of specified dimension (converter_func) and content (filter_func)
[ "Filter", "and", "convert", "raw", "data", "numpy", "array", "(", "numpy", ".", "ndarray", ")", ".", "Parameters", "----------", "array", ":", "numpy", ".", "array", "Raw", "data", "array", ".", "filter_func", ":", "function", "Function", "that", "takes", "array", "and", "returns", "true", "or", "false", "for", "each", "item", "in", "array", ".", "converter_func", ":", "function", "Function", "that", "takes", "array", "and", "returns", "an", "array", "or", "tuple", "of", "arrays", ".", "Returns", "-------", "data_array", ":", "numpy", ".", "array", "Data", "numpy", "array", "of", "specified", "dimension", "(", "converter_func", ")", "and", "content", "(", "filter_func", ")" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L52-L79
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
convert_data_iterable
def convert_data_iterable(data_iterable, filter_func=None, converter_func=None): # TODO: add concatenate parameter '''Convert raw data in data iterable. Parameters ---------- data_iterable : iterable Iterable where each element is a tuple with following content: (raw data, timestamp_start, timestamp_stop, status). filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- data_list : list Data list of the form [(converted data, timestamp_start, timestamp_stop, status), (...), ...] ''' data_list = [] for item in data_iterable: data_list.append((convert_data_array(item[0], filter_func=filter_func, converter_func=converter_func), item[1], item[2], item[3])) return data_list
python
def convert_data_iterable(data_iterable, filter_func=None, converter_func=None): # TODO: add concatenate parameter '''Convert raw data in data iterable. Parameters ---------- data_iterable : iterable Iterable where each element is a tuple with following content: (raw data, timestamp_start, timestamp_stop, status). filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- data_list : list Data list of the form [(converted data, timestamp_start, timestamp_stop, status), (...), ...] ''' data_list = [] for item in data_iterable: data_list.append((convert_data_array(item[0], filter_func=filter_func, converter_func=converter_func), item[1], item[2], item[3])) return data_list
[ "def", "convert_data_iterable", "(", "data_iterable", ",", "filter_func", "=", "None", ",", "converter_func", "=", "None", ")", ":", "# TODO: add concatenate parameter\r", "data_list", "=", "[", "]", "for", "item", "in", "data_iterable", ":", "data_list", ".", "append", "(", "(", "convert_data_array", "(", "item", "[", "0", "]", ",", "filter_func", "=", "filter_func", ",", "converter_func", "=", "converter_func", ")", ",", "item", "[", "1", "]", ",", "item", "[", "2", "]", ",", "item", "[", "3", "]", ")", ")", "return", "data_list" ]
Convert raw data in data iterable. Parameters ---------- data_iterable : iterable Iterable where each element is a tuple with following content: (raw data, timestamp_start, timestamp_stop, status). filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- data_list : list Data list of the form [(converted data, timestamp_start, timestamp_stop, status), (...), ...]
[ "Convert", "raw", "data", "in", "data", "iterable", ".", "Parameters", "----------", "data_iterable", ":", "iterable", "Iterable", "where", "each", "element", "is", "a", "tuple", "with", "following", "content", ":", "(", "raw", "data", "timestamp_start", "timestamp_stop", "status", ")", ".", "filter_func", ":", "function", "Function", "that", "takes", "array", "and", "returns", "true", "or", "false", "for", "each", "item", "in", "array", ".", "converter_func", ":", "function", "Function", "that", "takes", "array", "and", "returns", "an", "array", "or", "tuple", "of", "arrays", ".", "Returns", "-------", "data_list", ":", "list", "Data", "list", "of", "the", "form", "[", "(", "converted", "data", "timestamp_start", "timestamp_stop", "status", ")", "(", "...", ")", "...", "]" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L82-L102
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
data_array_from_data_iterable
def data_array_from_data_iterable(data_iterable): '''Convert data iterable to raw data numpy array. Parameters ---------- data_iterable : iterable Iterable where each element is a tuple with following content: (raw data, timestamp_start, timestamp_stop, status). Returns ------- data_array : numpy.array concatenated data array ''' try: data_array = np.concatenate([item[0] for item in data_iterable]) except ValueError: # length is 0 data_array = np.empty(0, dtype=np.uint32) return data_array
python
def data_array_from_data_iterable(data_iterable): '''Convert data iterable to raw data numpy array. Parameters ---------- data_iterable : iterable Iterable where each element is a tuple with following content: (raw data, timestamp_start, timestamp_stop, status). Returns ------- data_array : numpy.array concatenated data array ''' try: data_array = np.concatenate([item[0] for item in data_iterable]) except ValueError: # length is 0 data_array = np.empty(0, dtype=np.uint32) return data_array
[ "def", "data_array_from_data_iterable", "(", "data_iterable", ")", ":", "try", ":", "data_array", "=", "np", ".", "concatenate", "(", "[", "item", "[", "0", "]", "for", "item", "in", "data_iterable", "]", ")", "except", "ValueError", ":", "# length is 0\r", "data_array", "=", "np", ".", "empty", "(", "0", ",", "dtype", "=", "np", ".", "uint32", ")", "return", "data_array" ]
Convert data iterable to raw data numpy array. Parameters ---------- data_iterable : iterable Iterable where each element is a tuple with following content: (raw data, timestamp_start, timestamp_stop, status). Returns ------- data_array : numpy.array concatenated data array
[ "Convert", "data", "iterable", "to", "raw", "data", "numpy", "array", ".", "Parameters", "----------", "data_iterable", ":", "iterable", "Iterable", "where", "each", "element", "is", "a", "tuple", "with", "following", "content", ":", "(", "raw", "data", "timestamp_start", "timestamp_stop", "status", ")", ".", "Returns", "-------", "data_array", ":", "numpy", ".", "array", "concatenated", "data", "array" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L105-L122
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
convert_tdc_to_channel
def convert_tdc_to_channel(channel): ''' Converts TDC words at a given channel to common TDC header (0x4). ''' def f(value): filter_func = logical_and(is_tdc_word, is_tdc_from_channel(channel)) select = filter_func(value) value[select] = np.bitwise_and(value[select], 0x0FFFFFFF) value[select] = np.bitwise_or(value[select], 0x40000000) f.__name__ = "convert_tdc_to_channel_" + str(channel) return value return f
python
def convert_tdc_to_channel(channel): ''' Converts TDC words at a given channel to common TDC header (0x4). ''' def f(value): filter_func = logical_and(is_tdc_word, is_tdc_from_channel(channel)) select = filter_func(value) value[select] = np.bitwise_and(value[select], 0x0FFFFFFF) value[select] = np.bitwise_or(value[select], 0x40000000) f.__name__ = "convert_tdc_to_channel_" + str(channel) return value return f
[ "def", "convert_tdc_to_channel", "(", "channel", ")", ":", "def", "f", "(", "value", ")", ":", "filter_func", "=", "logical_and", "(", "is_tdc_word", ",", "is_tdc_from_channel", "(", "channel", ")", ")", "select", "=", "filter_func", "(", "value", ")", "value", "[", "select", "]", "=", "np", ".", "bitwise_and", "(", "value", "[", "select", "]", ",", "0x0FFFFFFF", ")", "value", "[", "select", "]", "=", "np", ".", "bitwise_or", "(", "value", "[", "select", "]", ",", "0x40000000", ")", "f", ".", "__name__", "=", "\"convert_tdc_to_channel_\"", "+", "str", "(", "channel", ")", "return", "value", "return", "f" ]
Converts TDC words at a given channel to common TDC header (0x4).
[ "Converts", "TDC", "words", "at", "a", "given", "channel", "to", "common", "TDC", "header", "(", "0x4", ")", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L151-L161
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
is_data_from_channel
def is_data_from_channel(channel=4): # function factory '''Selecting FE data from given channel. Parameters ---------- channel : int Channel number (4 is default channel on Single Chip Card). Returns ------- Function. Usage: 1 Selecting FE data from channel 4 (combine with is_fe_word): filter_fe_data_from_channel_4 = logical_and(is_fe_word, is_data_from_channel(4)) fe_data_from_channel_4 = data_array[filter_fe_data_from_channel_4(data_array)] 2 Sleceting data from channel 4: filter_data_from_channel_4 = is_data_from_channel(4) data_from_channel_4 = data_array[filter_data_from_channel_4(fe_data_array)] 3 Sleceting data from channel 4: data_from_channel_4 = is_data_from_channel(4)(fe_raw_data) Other usage: f_ch4 = functoools.partial(is_data_from_channel, channel=4) l_ch4 = lambda x: is_data_from_channel(x, channel=4) ''' if channel >= 0 and channel < 16: def f(value): return np.equal(np.right_shift(np.bitwise_and(value, 0x0F000000), 24), channel) f.__name__ = "is_data_from_channel_" + str(channel) # or use inspect module: inspect.stack()[0][3] return f else: raise ValueError('Invalid channel number')
python
def is_data_from_channel(channel=4): # function factory '''Selecting FE data from given channel. Parameters ---------- channel : int Channel number (4 is default channel on Single Chip Card). Returns ------- Function. Usage: 1 Selecting FE data from channel 4 (combine with is_fe_word): filter_fe_data_from_channel_4 = logical_and(is_fe_word, is_data_from_channel(4)) fe_data_from_channel_4 = data_array[filter_fe_data_from_channel_4(data_array)] 2 Sleceting data from channel 4: filter_data_from_channel_4 = is_data_from_channel(4) data_from_channel_4 = data_array[filter_data_from_channel_4(fe_data_array)] 3 Sleceting data from channel 4: data_from_channel_4 = is_data_from_channel(4)(fe_raw_data) Other usage: f_ch4 = functoools.partial(is_data_from_channel, channel=4) l_ch4 = lambda x: is_data_from_channel(x, channel=4) ''' if channel >= 0 and channel < 16: def f(value): return np.equal(np.right_shift(np.bitwise_and(value, 0x0F000000), 24), channel) f.__name__ = "is_data_from_channel_" + str(channel) # or use inspect module: inspect.stack()[0][3] return f else: raise ValueError('Invalid channel number')
[ "def", "is_data_from_channel", "(", "channel", "=", "4", ")", ":", "# function factory\r", "if", "channel", ">=", "0", "and", "channel", "<", "16", ":", "def", "f", "(", "value", ")", ":", "return", "np", ".", "equal", "(", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x0F000000", ")", ",", "24", ")", ",", "channel", ")", "f", ".", "__name__", "=", "\"is_data_from_channel_\"", "+", "str", "(", "channel", ")", "# or use inspect module: inspect.stack()[0][3]\r", "return", "f", "else", ":", "raise", "ValueError", "(", "'Invalid channel number'", ")" ]
Selecting FE data from given channel. Parameters ---------- channel : int Channel number (4 is default channel on Single Chip Card). Returns ------- Function. Usage: 1 Selecting FE data from channel 4 (combine with is_fe_word): filter_fe_data_from_channel_4 = logical_and(is_fe_word, is_data_from_channel(4)) fe_data_from_channel_4 = data_array[filter_fe_data_from_channel_4(data_array)] 2 Sleceting data from channel 4: filter_data_from_channel_4 = is_data_from_channel(4) data_from_channel_4 = data_array[filter_data_from_channel_4(fe_data_array)] 3 Sleceting data from channel 4: data_from_channel_4 = is_data_from_channel(4)(fe_raw_data) Other usage: f_ch4 = functoools.partial(is_data_from_channel, channel=4) l_ch4 = lambda x: is_data_from_channel(x, channel=4)
[ "Selecting", "FE", "data", "from", "given", "channel", ".", "Parameters", "----------", "channel", ":", "int", "Channel", "number", "(", "4", "is", "default", "channel", "on", "Single", "Chip", "Card", ")", ".", "Returns", "-------", "Function", ".", "Usage", ":", "1", "Selecting", "FE", "data", "from", "channel", "4", "(", "combine", "with", "is_fe_word", ")", ":", "filter_fe_data_from_channel_4", "=", "logical_and", "(", "is_fe_word", "is_data_from_channel", "(", "4", "))", "fe_data_from_channel_4", "=", "data_array", "[", "filter_fe_data_from_channel_4", "(", "data_array", ")", "]", "2", "Sleceting", "data", "from", "channel", "4", ":", "filter_data_from_channel_4", "=", "is_data_from_channel", "(", "4", ")", "data_from_channel_4", "=", "data_array", "[", "filter_data_from_channel_4", "(", "fe_data_array", ")", "]", "3", "Sleceting", "data", "from", "channel", "4", ":", "data_from_channel_4", "=", "is_data_from_channel", "(", "4", ")", "(", "fe_raw_data", ")", "Other", "usage", ":", "f_ch4", "=", "functoools", ".", "partial", "(", "is_data_from_channel", "channel", "=", "4", ")", "l_ch4", "=", "lambda", "x", ":", "is_data_from_channel", "(", "x", "channel", "=", "4", ")" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L164-L196
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
logical_and
def logical_and(f1, f2): # function factory '''Logical and from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. Usage: filter_func=logical_and(is_data_record, is_data_from_channel(4)) # new filter function filter_func(array) # array that has Data Records from channel 4 ''' def f(value): return np.logical_and(f1(value), f2(value)) f.__name__ = "(" + f1.__name__ + "_and_" + f2.__name__ + ")" return f
python
def logical_and(f1, f2): # function factory '''Logical and from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. Usage: filter_func=logical_and(is_data_record, is_data_from_channel(4)) # new filter function filter_func(array) # array that has Data Records from channel 4 ''' def f(value): return np.logical_and(f1(value), f2(value)) f.__name__ = "(" + f1.__name__ + "_and_" + f2.__name__ + ")" return f
[ "def", "logical_and", "(", "f1", ",", "f2", ")", ":", "# function factory\r", "def", "f", "(", "value", ")", ":", "return", "np", ".", "logical_and", "(", "f1", "(", "value", ")", ",", "f2", "(", "value", ")", ")", "f", ".", "__name__", "=", "\"(\"", "+", "f1", ".", "__name__", "+", "\"_and_\"", "+", "f2", ".", "__name__", "+", "\")\"", "return", "f" ]
Logical and from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. Usage: filter_func=logical_and(is_data_record, is_data_from_channel(4)) # new filter function filter_func(array) # array that has Data Records from channel 4
[ "Logical", "and", "from", "functions", ".", "Parameters", "----------", "f1", "f2", ":", "function", "Function", "that", "takes", "array", "and", "returns", "true", "or", "false", "for", "each", "item", "in", "array", ".", "Returns", "-------", "Function", ".", "Usage", ":", "filter_func", "=", "logical_and", "(", "is_data_record", "is_data_from_channel", "(", "4", "))", "#", "new", "filter", "function", "filter_func", "(", "array", ")", "#", "array", "that", "has", "Data", "Records", "from", "channel", "4" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L199-L218
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
logical_or
def logical_or(f1, f2): # function factory '''Logical or from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. ''' def f(value): return np.logical_or(f1(value), f2(value)) f.__name__ = "(" + f1.__name__ + "_or_" + f2.__name__ + ")" return f
python
def logical_or(f1, f2): # function factory '''Logical or from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. ''' def f(value): return np.logical_or(f1(value), f2(value)) f.__name__ = "(" + f1.__name__ + "_or_" + f2.__name__ + ")" return f
[ "def", "logical_or", "(", "f1", ",", "f2", ")", ":", "# function factory\r", "def", "f", "(", "value", ")", ":", "return", "np", ".", "logical_or", "(", "f1", "(", "value", ")", ",", "f2", "(", "value", ")", ")", "f", ".", "__name__", "=", "\"(\"", "+", "f1", ".", "__name__", "+", "\"_or_\"", "+", "f2", ".", "__name__", "+", "\")\"", "return", "f" ]
Logical or from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function.
[ "Logical", "or", "from", "functions", ".", "Parameters", "----------", "f1", "f2", ":", "function", "Function", "that", "takes", "array", "and", "returns", "true", "or", "false", "for", "each", "item", "in", "array", ".", "Returns", "-------", "Function", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L221-L236
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
logical_not
def logical_not(f): # function factory '''Logical not from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. ''' def f(value): return np.logical_not(f(value)) f.__name__ = "not_" + f.__name__ return f
python
def logical_not(f): # function factory '''Logical not from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. ''' def f(value): return np.logical_not(f(value)) f.__name__ = "not_" + f.__name__ return f
[ "def", "logical_not", "(", "f", ")", ":", "# function factory\r", "def", "f", "(", "value", ")", ":", "return", "np", ".", "logical_not", "(", "f", "(", "value", ")", ")", "f", ".", "__name__", "=", "\"not_\"", "+", "f", ".", "__name__", "return", "f" ]
Logical not from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function.
[ "Logical", "not", "from", "functions", ".", "Parameters", "----------", "f1", "f2", ":", "function", "Function", "that", "takes", "array", "and", "returns", "true", "or", "false", "for", "each", "item", "in", "array", ".", "Returns", "-------", "Function", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L239-L254
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
logical_xor
def logical_xor(f1, f2): # function factory '''Logical xor from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. ''' def f(value): return np.logical_xor(f1(value), f2(value)) f.__name__ = "(" + f1.__name__ + "_xor_" + f2.__name__ + ")" return f
python
def logical_xor(f1, f2): # function factory '''Logical xor from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. ''' def f(value): return np.logical_xor(f1(value), f2(value)) f.__name__ = "(" + f1.__name__ + "_xor_" + f2.__name__ + ")" return f
[ "def", "logical_xor", "(", "f1", ",", "f2", ")", ":", "# function factory\r", "def", "f", "(", "value", ")", ":", "return", "np", ".", "logical_xor", "(", "f1", "(", "value", ")", ",", "f2", "(", "value", ")", ")", "f", ".", "__name__", "=", "\"(\"", "+", "f1", ".", "__name__", "+", "\"_xor_\"", "+", "f2", ".", "__name__", "+", "\")\"", "return", "f" ]
Logical xor from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function.
[ "Logical", "xor", "from", "functions", ".", "Parameters", "----------", "f1", "f2", ":", "function", "Function", "that", "takes", "array", "and", "returns", "true", "or", "false", "for", "each", "item", "in", "array", ".", "Returns", "-------", "Function", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L257-L272
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
get_trigger_data
def get_trigger_data(value, mode=0): '''Returns 31bit trigger counter (mode=0), 31bit timestamp (mode=1), 15bit timestamp and 16bit trigger counter (mode=2) ''' if mode == 2: return np.right_shift(np.bitwise_and(value, 0x7FFF0000), 16), np.bitwise_and(value, 0x0000FFFF) else: return np.bitwise_and(value, 0x7FFFFFFF)
python
def get_trigger_data(value, mode=0): '''Returns 31bit trigger counter (mode=0), 31bit timestamp (mode=1), 15bit timestamp and 16bit trigger counter (mode=2) ''' if mode == 2: return np.right_shift(np.bitwise_and(value, 0x7FFF0000), 16), np.bitwise_and(value, 0x0000FFFF) else: return np.bitwise_and(value, 0x7FFFFFFF)
[ "def", "get_trigger_data", "(", "value", ",", "mode", "=", "0", ")", ":", "if", "mode", "==", "2", ":", "return", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x7FFF0000", ")", ",", "16", ")", ",", "np", ".", "bitwise_and", "(", "value", ",", "0x0000FFFF", ")", "else", ":", "return", "np", ".", "bitwise_and", "(", "value", ",", "0x7FFFFFFF", ")" ]
Returns 31bit trigger counter (mode=0), 31bit timestamp (mode=1), 15bit timestamp and 16bit trigger counter (mode=2)
[ "Returns", "31bit", "trigger", "counter", "(", "mode", "=", "0", ")", "31bit", "timestamp", "(", "mode", "=", "1", ")", "15bit", "timestamp", "and", "16bit", "trigger", "counter", "(", "mode", "=", "2", ")" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L315-L321
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
get_col_row_tot_array_from_data_record_array
def get_col_row_tot_array_from_data_record_array(array): # TODO: max ToT '''Convert raw data array to column, row, and ToT array. Parameters ---------- array : numpy.array Raw data array. Returns ------- Tuple of arrays. ''' def get_col_row_tot_1_array_from_data_record_array(value): return np.right_shift(np.bitwise_and(value, 0x00FE0000), 17), np.right_shift(np.bitwise_and(value, 0x0001FF00), 8), np.right_shift(np.bitwise_and(value, 0x000000F0), 4) def get_col_row_tot_2_array_from_data_record_array(value): return np.right_shift(np.bitwise_and(value, 0x00FE0000), 17), np.add(np.right_shift(np.bitwise_and(value, 0x0001FF00), 8), 1), np.bitwise_and(value, 0x0000000F) col_row_tot_1_array = np.column_stack(get_col_row_tot_1_array_from_data_record_array(array)) col_row_tot_2_array = np.column_stack(get_col_row_tot_2_array_from_data_record_array(array)) # interweave array here col_row_tot_array = np.vstack((col_row_tot_1_array.T, col_row_tot_2_array.T)).reshape((3, -1), order='F').T # http://stackoverflow.com/questions/5347065/interweaving-two-numpy-arrays # remove ToT > 14 (late hit, no hit) from array, remove row > 336 in case we saw hit in row 336 (no double hit possible) try: col_row_tot_array_filtered = col_row_tot_array[col_row_tot_array[:, 2] < 14] # [np.logical_and(col_row_tot_array[:,2]<14, col_row_tot_array[:,1]<=336)] except IndexError: # logging.warning('Array is empty') return np.array([], dtype=np.dtype('>u4')), np.array([], dtype=np.dtype('>u4')), np.array([], dtype=np.dtype('>u4')) return col_row_tot_array_filtered[:, 0], col_row_tot_array_filtered[:, 1], col_row_tot_array_filtered[:, 2]
python
def get_col_row_tot_array_from_data_record_array(array): # TODO: max ToT '''Convert raw data array to column, row, and ToT array. Parameters ---------- array : numpy.array Raw data array. Returns ------- Tuple of arrays. ''' def get_col_row_tot_1_array_from_data_record_array(value): return np.right_shift(np.bitwise_and(value, 0x00FE0000), 17), np.right_shift(np.bitwise_and(value, 0x0001FF00), 8), np.right_shift(np.bitwise_and(value, 0x000000F0), 4) def get_col_row_tot_2_array_from_data_record_array(value): return np.right_shift(np.bitwise_and(value, 0x00FE0000), 17), np.add(np.right_shift(np.bitwise_and(value, 0x0001FF00), 8), 1), np.bitwise_and(value, 0x0000000F) col_row_tot_1_array = np.column_stack(get_col_row_tot_1_array_from_data_record_array(array)) col_row_tot_2_array = np.column_stack(get_col_row_tot_2_array_from_data_record_array(array)) # interweave array here col_row_tot_array = np.vstack((col_row_tot_1_array.T, col_row_tot_2_array.T)).reshape((3, -1), order='F').T # http://stackoverflow.com/questions/5347065/interweaving-two-numpy-arrays # remove ToT > 14 (late hit, no hit) from array, remove row > 336 in case we saw hit in row 336 (no double hit possible) try: col_row_tot_array_filtered = col_row_tot_array[col_row_tot_array[:, 2] < 14] # [np.logical_and(col_row_tot_array[:,2]<14, col_row_tot_array[:,1]<=336)] except IndexError: # logging.warning('Array is empty') return np.array([], dtype=np.dtype('>u4')), np.array([], dtype=np.dtype('>u4')), np.array([], dtype=np.dtype('>u4')) return col_row_tot_array_filtered[:, 0], col_row_tot_array_filtered[:, 1], col_row_tot_array_filtered[:, 2]
[ "def", "get_col_row_tot_array_from_data_record_array", "(", "array", ")", ":", "# TODO: max ToT\r", "def", "get_col_row_tot_1_array_from_data_record_array", "(", "value", ")", ":", "return", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x00FE0000", ")", ",", "17", ")", ",", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x0001FF00", ")", ",", "8", ")", ",", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x000000F0", ")", ",", "4", ")", "def", "get_col_row_tot_2_array_from_data_record_array", "(", "value", ")", ":", "return", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x00FE0000", ")", ",", "17", ")", ",", "np", ".", "add", "(", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x0001FF00", ")", ",", "8", ")", ",", "1", ")", ",", "np", ".", "bitwise_and", "(", "value", ",", "0x0000000F", ")", "col_row_tot_1_array", "=", "np", ".", "column_stack", "(", "get_col_row_tot_1_array_from_data_record_array", "(", "array", ")", ")", "col_row_tot_2_array", "=", "np", ".", "column_stack", "(", "get_col_row_tot_2_array_from_data_record_array", "(", "array", ")", ")", "# interweave array here\r", "col_row_tot_array", "=", "np", ".", "vstack", "(", "(", "col_row_tot_1_array", ".", "T", ",", "col_row_tot_2_array", ".", "T", ")", ")", ".", "reshape", "(", "(", "3", ",", "-", "1", ")", ",", "order", "=", "'F'", ")", ".", "T", "# http://stackoverflow.com/questions/5347065/interweaving-two-numpy-arrays\r", "# remove ToT > 14 (late hit, no hit) from array, remove row > 336 in case we saw hit in row 336 (no double hit possible)\r", "try", ":", "col_row_tot_array_filtered", "=", "col_row_tot_array", "[", "col_row_tot_array", "[", ":", ",", "2", "]", "<", "14", "]", "# [np.logical_and(col_row_tot_array[:,2]<14, col_row_tot_array[:,1]<=336)]\r", "except", "IndexError", ":", "# logging.warning('Array is empty')\r", "return", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "dtype", "(", "'>u4'", ")", ")", ",", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "dtype", "(", "'>u4'", ")", ")", ",", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "dtype", "(", "'>u4'", ")", ")", "return", "col_row_tot_array_filtered", "[", ":", ",", "0", "]", ",", "col_row_tot_array_filtered", "[", ":", ",", "1", "]", ",", "col_row_tot_array_filtered", "[", ":", ",", "2", "]" ]
Convert raw data array to column, row, and ToT array. Parameters ---------- array : numpy.array Raw data array. Returns ------- Tuple of arrays.
[ "Convert", "raw", "data", "array", "to", "column", "row", "and", "ToT", "array", ".", "Parameters", "----------", "array", ":", "numpy", ".", "array", "Raw", "data", "array", ".", "Returns", "-------", "Tuple", "of", "arrays", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L342-L370
SiLab-Bonn/pyBAR
pybar/daq/readout_utils.py
interpret_pixel_data
def interpret_pixel_data(data, dc, pixel_array, invert=True): '''Takes the pixel raw data and interprets them. This includes consistency checks and pixel/data matching. The data has to come from one double column only but can have more than one pixel bit (e.g. TDAC = 5 bit). Parameters ---------- data : numpy.ndarray The raw data words. dc : int The double column where the data is from. pixel_array : numpy.ma.ndarray The masked numpy.ndarrays to be filled. The masked is set to zero for pixels with valid data. invert : boolean Invert the read pixel data. ''' # data validity cut, VR has to follow an AR index_value = np.where(is_address_record(data))[0] + 1 # assume value record follows address record index_value = index_value[is_value_record(data[index_value])] # delete all non value records index_address = index_value - 1 # calculate address record indices that are followed by an value record # create the pixel address/value arrays address = get_address_record_address(data[index_address]) value = get_value_record(data[index_address + 1]) # split array for each bit in pixel data, split is done on decreasing address values address_split = np.array_split(address, np.where(np.diff(address.astype(np.int32)) < 0)[0] + 1) value_split = np.array_split(value, np.where(np.diff(address.astype(np.int32)) < 0)[0] + 1) if len(address_split) > 5: pixel_array.mask[dc * 2, :] = True pixel_array.mask[dc * 2 + 1, :] = True logging.warning('Invalid pixel data for DC %d', dc) return mask = np.empty_like(pixel_array.data) # BUG in numpy: pixel_array is de-masked if not .data is used mask[:] = len(address_split) for bit, (bit_address, bit_value) in enumerate(zip(address_split, value_split)): # loop over all bits of the pixel data # error output, pixel data is often corrupt for FE-I4A if len(bit_address) == 0: logging.warning('No pixel data for DC %d', dc) continue if len(bit_address) != 42: logging.warning('Some pixel data missing for DC %d', dc) if (np.any(bit_address > 672)): RuntimeError('Pixel data corrupt for DC %d', dc) # set pixel that occurred in the data stream pixel = [] for i in bit_address: pixel.extend(range(i - 15, i + 1)) pixel = np.array(pixel) # create bit set array value_new = bit_value.view(np.uint8) # interpret 32 bit numpy array as uint8 to be able to use bit unpacking; byte unpacking is not supported yet if invert: value_new = np.invert(value_new) # read back values are inverted value_new = np.insert(value_new[::4], np.arange(len(value_new[1::4])), value_new[1::4]) # delete 0 padding value_bit = np.unpackbits(value_new, axis=0) if len(address_split) == 5: # detect TDAC data, here the bit order is flipped bit_set = len(address_split) - bit - 1 else: bit_set = bit pixel_array.data[dc * 2, pixel[pixel >= 336] - 336] = np.bitwise_or(pixel_array.data[dc * 2, pixel[pixel >= 336] - 336], np.left_shift(value_bit[pixel >= 336], bit_set)) pixel_array.data[dc * 2 + 1, pixel[pixel < 336]] = np.bitwise_or(pixel_array.data[dc * 2 + 1, pixel[pixel < 336]], np.left_shift(value_bit[pixel < 336], bit_set)[::-1]) mask[dc * 2, pixel[pixel >= 336] - 336] = mask[dc * 2, pixel[pixel >= 336] - 336] - 1 mask[dc * 2 + 1, pixel[pixel < 336]] = mask[dc * 2 + 1, pixel[pixel < 336]] - 1 pixel_array.mask[np.equal(mask, 0)] = False
python
def interpret_pixel_data(data, dc, pixel_array, invert=True): '''Takes the pixel raw data and interprets them. This includes consistency checks and pixel/data matching. The data has to come from one double column only but can have more than one pixel bit (e.g. TDAC = 5 bit). Parameters ---------- data : numpy.ndarray The raw data words. dc : int The double column where the data is from. pixel_array : numpy.ma.ndarray The masked numpy.ndarrays to be filled. The masked is set to zero for pixels with valid data. invert : boolean Invert the read pixel data. ''' # data validity cut, VR has to follow an AR index_value = np.where(is_address_record(data))[0] + 1 # assume value record follows address record index_value = index_value[is_value_record(data[index_value])] # delete all non value records index_address = index_value - 1 # calculate address record indices that are followed by an value record # create the pixel address/value arrays address = get_address_record_address(data[index_address]) value = get_value_record(data[index_address + 1]) # split array for each bit in pixel data, split is done on decreasing address values address_split = np.array_split(address, np.where(np.diff(address.astype(np.int32)) < 0)[0] + 1) value_split = np.array_split(value, np.where(np.diff(address.astype(np.int32)) < 0)[0] + 1) if len(address_split) > 5: pixel_array.mask[dc * 2, :] = True pixel_array.mask[dc * 2 + 1, :] = True logging.warning('Invalid pixel data for DC %d', dc) return mask = np.empty_like(pixel_array.data) # BUG in numpy: pixel_array is de-masked if not .data is used mask[:] = len(address_split) for bit, (bit_address, bit_value) in enumerate(zip(address_split, value_split)): # loop over all bits of the pixel data # error output, pixel data is often corrupt for FE-I4A if len(bit_address) == 0: logging.warning('No pixel data for DC %d', dc) continue if len(bit_address) != 42: logging.warning('Some pixel data missing for DC %d', dc) if (np.any(bit_address > 672)): RuntimeError('Pixel data corrupt for DC %d', dc) # set pixel that occurred in the data stream pixel = [] for i in bit_address: pixel.extend(range(i - 15, i + 1)) pixel = np.array(pixel) # create bit set array value_new = bit_value.view(np.uint8) # interpret 32 bit numpy array as uint8 to be able to use bit unpacking; byte unpacking is not supported yet if invert: value_new = np.invert(value_new) # read back values are inverted value_new = np.insert(value_new[::4], np.arange(len(value_new[1::4])), value_new[1::4]) # delete 0 padding value_bit = np.unpackbits(value_new, axis=0) if len(address_split) == 5: # detect TDAC data, here the bit order is flipped bit_set = len(address_split) - bit - 1 else: bit_set = bit pixel_array.data[dc * 2, pixel[pixel >= 336] - 336] = np.bitwise_or(pixel_array.data[dc * 2, pixel[pixel >= 336] - 336], np.left_shift(value_bit[pixel >= 336], bit_set)) pixel_array.data[dc * 2 + 1, pixel[pixel < 336]] = np.bitwise_or(pixel_array.data[dc * 2 + 1, pixel[pixel < 336]], np.left_shift(value_bit[pixel < 336], bit_set)[::-1]) mask[dc * 2, pixel[pixel >= 336] - 336] = mask[dc * 2, pixel[pixel >= 336] - 336] - 1 mask[dc * 2 + 1, pixel[pixel < 336]] = mask[dc * 2 + 1, pixel[pixel < 336]] - 1 pixel_array.mask[np.equal(mask, 0)] = False
[ "def", "interpret_pixel_data", "(", "data", ",", "dc", ",", "pixel_array", ",", "invert", "=", "True", ")", ":", "# data validity cut, VR has to follow an AR\r", "index_value", "=", "np", ".", "where", "(", "is_address_record", "(", "data", ")", ")", "[", "0", "]", "+", "1", "# assume value record follows address record\r", "index_value", "=", "index_value", "[", "is_value_record", "(", "data", "[", "index_value", "]", ")", "]", "# delete all non value records\r", "index_address", "=", "index_value", "-", "1", "# calculate address record indices that are followed by an value record\r", "# create the pixel address/value arrays\r", "address", "=", "get_address_record_address", "(", "data", "[", "index_address", "]", ")", "value", "=", "get_value_record", "(", "data", "[", "index_address", "+", "1", "]", ")", "# split array for each bit in pixel data, split is done on decreasing address values\r", "address_split", "=", "np", ".", "array_split", "(", "address", ",", "np", ".", "where", "(", "np", ".", "diff", "(", "address", ".", "astype", "(", "np", ".", "int32", ")", ")", "<", "0", ")", "[", "0", "]", "+", "1", ")", "value_split", "=", "np", ".", "array_split", "(", "value", ",", "np", ".", "where", "(", "np", ".", "diff", "(", "address", ".", "astype", "(", "np", ".", "int32", ")", ")", "<", "0", ")", "[", "0", "]", "+", "1", ")", "if", "len", "(", "address_split", ")", ">", "5", ":", "pixel_array", ".", "mask", "[", "dc", "*", "2", ",", ":", "]", "=", "True", "pixel_array", ".", "mask", "[", "dc", "*", "2", "+", "1", ",", ":", "]", "=", "True", "logging", ".", "warning", "(", "'Invalid pixel data for DC %d'", ",", "dc", ")", "return", "mask", "=", "np", ".", "empty_like", "(", "pixel_array", ".", "data", ")", "# BUG in numpy: pixel_array is de-masked if not .data is used\r", "mask", "[", ":", "]", "=", "len", "(", "address_split", ")", "for", "bit", ",", "(", "bit_address", ",", "bit_value", ")", "in", "enumerate", "(", "zip", "(", "address_split", ",", "value_split", ")", ")", ":", "# loop over all bits of the pixel data\r", "# error output, pixel data is often corrupt for FE-I4A\r", "if", "len", "(", "bit_address", ")", "==", "0", ":", "logging", ".", "warning", "(", "'No pixel data for DC %d'", ",", "dc", ")", "continue", "if", "len", "(", "bit_address", ")", "!=", "42", ":", "logging", ".", "warning", "(", "'Some pixel data missing for DC %d'", ",", "dc", ")", "if", "(", "np", ".", "any", "(", "bit_address", ">", "672", ")", ")", ":", "RuntimeError", "(", "'Pixel data corrupt for DC %d'", ",", "dc", ")", "# set pixel that occurred in the data stream\r", "pixel", "=", "[", "]", "for", "i", "in", "bit_address", ":", "pixel", ".", "extend", "(", "range", "(", "i", "-", "15", ",", "i", "+", "1", ")", ")", "pixel", "=", "np", ".", "array", "(", "pixel", ")", "# create bit set array\r", "value_new", "=", "bit_value", ".", "view", "(", "np", ".", "uint8", ")", "# interpret 32 bit numpy array as uint8 to be able to use bit unpacking; byte unpacking is not supported yet\r", "if", "invert", ":", "value_new", "=", "np", ".", "invert", "(", "value_new", ")", "# read back values are inverted\r", "value_new", "=", "np", ".", "insert", "(", "value_new", "[", ":", ":", "4", "]", ",", "np", ".", "arange", "(", "len", "(", "value_new", "[", "1", ":", ":", "4", "]", ")", ")", ",", "value_new", "[", "1", ":", ":", "4", "]", ")", "# delete 0 padding\r", "value_bit", "=", "np", ".", "unpackbits", "(", "value_new", ",", "axis", "=", "0", ")", "if", "len", "(", "address_split", ")", "==", "5", ":", "# detect TDAC data, here the bit order is flipped\r", "bit_set", "=", "len", "(", "address_split", ")", "-", "bit", "-", "1", "else", ":", "bit_set", "=", "bit", "pixel_array", ".", "data", "[", "dc", "*", "2", ",", "pixel", "[", "pixel", ">=", "336", "]", "-", "336", "]", "=", "np", ".", "bitwise_or", "(", "pixel_array", ".", "data", "[", "dc", "*", "2", ",", "pixel", "[", "pixel", ">=", "336", "]", "-", "336", "]", ",", "np", ".", "left_shift", "(", "value_bit", "[", "pixel", ">=", "336", "]", ",", "bit_set", ")", ")", "pixel_array", ".", "data", "[", "dc", "*", "2", "+", "1", ",", "pixel", "[", "pixel", "<", "336", "]", "]", "=", "np", ".", "bitwise_or", "(", "pixel_array", ".", "data", "[", "dc", "*", "2", "+", "1", ",", "pixel", "[", "pixel", "<", "336", "]", "]", ",", "np", ".", "left_shift", "(", "value_bit", "[", "pixel", "<", "336", "]", ",", "bit_set", ")", "[", ":", ":", "-", "1", "]", ")", "mask", "[", "dc", "*", "2", ",", "pixel", "[", "pixel", ">=", "336", "]", "-", "336", "]", "=", "mask", "[", "dc", "*", "2", ",", "pixel", "[", "pixel", ">=", "336", "]", "-", "336", "]", "-", "1", "mask", "[", "dc", "*", "2", "+", "1", ",", "pixel", "[", "pixel", "<", "336", "]", "]", "=", "mask", "[", "dc", "*", "2", "+", "1", ",", "pixel", "[", "pixel", "<", "336", "]", "]", "-", "1", "pixel_array", ".", "mask", "[", "np", ".", "equal", "(", "mask", ",", "0", ")", "]", "=", "False" ]
Takes the pixel raw data and interprets them. This includes consistency checks and pixel/data matching. The data has to come from one double column only but can have more than one pixel bit (e.g. TDAC = 5 bit). Parameters ---------- data : numpy.ndarray The raw data words. dc : int The double column where the data is from. pixel_array : numpy.ma.ndarray The masked numpy.ndarrays to be filled. The masked is set to zero for pixels with valid data. invert : boolean Invert the read pixel data.
[ "Takes", "the", "pixel", "raw", "data", "and", "interprets", "them", ".", "This", "includes", "consistency", "checks", "and", "pixel", "/", "data", "matching", ".", "The", "data", "has", "to", "come", "from", "one", "double", "column", "only", "but", "can", "have", "more", "than", "one", "pixel", "bit", "(", "e", ".", "g", ".", "TDAC", "=", "5", "bit", ")", ".", "Parameters", "----------", "data", ":", "numpy", ".", "ndarray", "The", "raw", "data", "words", ".", "dc", ":", "int", "The", "double", "column", "where", "the", "data", "is", "from", ".", "pixel_array", ":", "numpy", ".", "ma", ".", "ndarray", "The", "masked", "numpy", ".", "ndarrays", "to", "be", "filled", ".", "The", "masked", "is", "set", "to", "zero", "for", "pixels", "with", "valid", "data", ".", "invert", ":", "boolean", "Invert", "the", "read", "pixel", "data", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/daq/readout_utils.py#L428-L499
SiLab-Bonn/pyBAR
pybar/analysis/analyze_raw_data.py
AnalyzeRawData.set_standard_settings
def set_standard_settings(self): '''Set all settings to their standard values. ''' if self.is_open(self.out_file_h5): self.out_file_h5.close() self.out_file_h5 = None self._setup_clusterizer() self.chunk_size = 3000000 self.n_injections = None self.trig_count = 0 # 0 trig_count = 16 BCID per trigger self.max_tot_value = 13 self.vcal_c0, self.vcal_c1 = None, None self.c_low, self.c_mid, self.c_high = None, None, None self.c_low_mask, self.c_high_mask = None, None self._filter_table = tb.Filters(complib='blosc', complevel=5, fletcher32=False) warnings.simplefilter("ignore", OptimizeWarning) self.meta_event_index = None self.fei4b = False self.create_hit_table = False self.create_empty_event_hits = False self.create_meta_event_index = True self.create_tot_hist = True self.create_mean_tot_hist = False self.create_tot_pixel_hist = True self.create_rel_bcid_hist = True self.correct_corrupted_data = False self.create_error_hist = True self.create_service_record_hist = True self.create_occupancy_hist = True self.create_meta_word_index = False self.create_source_scan_hist = False self.create_tdc_hist = False self.create_tdc_counter_hist = False self.create_tdc_pixel_hist = False self.create_trigger_error_hist = False self.create_threshold_hists = False self.create_threshold_mask = True # Threshold/noise histogram mask: masking all pixels out of bounds self.create_fitted_threshold_mask = True # Fitted threshold/noise histogram mask: masking all pixels out of bounds self.create_fitted_threshold_hists = False self.create_cluster_hit_table = False self.create_cluster_table = False self.create_cluster_size_hist = False self.create_cluster_tot_hist = False self.align_at_trigger = False # use the trigger word to align the events self.align_at_tdc = False # use the trigger word to align the events self.trigger_data_format = 0 # 0: 31bit trigger number, 1: 31bit trigger time stamp, 2: 15bit trigger time stamp + 16bit trigger number self.use_tdc_trigger_time_stamp = False # the tdc time stamp is the difference between trigger and tdc rising edge self.max_tdc_delay = 255 self.max_trigger_number = 2 ** 16 - 1 self.set_stop_mode = False
python
def set_standard_settings(self): '''Set all settings to their standard values. ''' if self.is_open(self.out_file_h5): self.out_file_h5.close() self.out_file_h5 = None self._setup_clusterizer() self.chunk_size = 3000000 self.n_injections = None self.trig_count = 0 # 0 trig_count = 16 BCID per trigger self.max_tot_value = 13 self.vcal_c0, self.vcal_c1 = None, None self.c_low, self.c_mid, self.c_high = None, None, None self.c_low_mask, self.c_high_mask = None, None self._filter_table = tb.Filters(complib='blosc', complevel=5, fletcher32=False) warnings.simplefilter("ignore", OptimizeWarning) self.meta_event_index = None self.fei4b = False self.create_hit_table = False self.create_empty_event_hits = False self.create_meta_event_index = True self.create_tot_hist = True self.create_mean_tot_hist = False self.create_tot_pixel_hist = True self.create_rel_bcid_hist = True self.correct_corrupted_data = False self.create_error_hist = True self.create_service_record_hist = True self.create_occupancy_hist = True self.create_meta_word_index = False self.create_source_scan_hist = False self.create_tdc_hist = False self.create_tdc_counter_hist = False self.create_tdc_pixel_hist = False self.create_trigger_error_hist = False self.create_threshold_hists = False self.create_threshold_mask = True # Threshold/noise histogram mask: masking all pixels out of bounds self.create_fitted_threshold_mask = True # Fitted threshold/noise histogram mask: masking all pixels out of bounds self.create_fitted_threshold_hists = False self.create_cluster_hit_table = False self.create_cluster_table = False self.create_cluster_size_hist = False self.create_cluster_tot_hist = False self.align_at_trigger = False # use the trigger word to align the events self.align_at_tdc = False # use the trigger word to align the events self.trigger_data_format = 0 # 0: 31bit trigger number, 1: 31bit trigger time stamp, 2: 15bit trigger time stamp + 16bit trigger number self.use_tdc_trigger_time_stamp = False # the tdc time stamp is the difference between trigger and tdc rising edge self.max_tdc_delay = 255 self.max_trigger_number = 2 ** 16 - 1 self.set_stop_mode = False
[ "def", "set_standard_settings", "(", "self", ")", ":", "if", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", ":", "self", ".", "out_file_h5", ".", "close", "(", ")", "self", ".", "out_file_h5", "=", "None", "self", ".", "_setup_clusterizer", "(", ")", "self", ".", "chunk_size", "=", "3000000", "self", ".", "n_injections", "=", "None", "self", ".", "trig_count", "=", "0", "# 0 trig_count = 16 BCID per trigger", "self", ".", "max_tot_value", "=", "13", "self", ".", "vcal_c0", ",", "self", ".", "vcal_c1", "=", "None", ",", "None", "self", ".", "c_low", ",", "self", ".", "c_mid", ",", "self", ".", "c_high", "=", "None", ",", "None", ",", "None", "self", ".", "c_low_mask", ",", "self", ".", "c_high_mask", "=", "None", ",", "None", "self", ".", "_filter_table", "=", "tb", ".", "Filters", "(", "complib", "=", "'blosc'", ",", "complevel", "=", "5", ",", "fletcher32", "=", "False", ")", "warnings", ".", "simplefilter", "(", "\"ignore\"", ",", "OptimizeWarning", ")", "self", ".", "meta_event_index", "=", "None", "self", ".", "fei4b", "=", "False", "self", ".", "create_hit_table", "=", "False", "self", ".", "create_empty_event_hits", "=", "False", "self", ".", "create_meta_event_index", "=", "True", "self", ".", "create_tot_hist", "=", "True", "self", ".", "create_mean_tot_hist", "=", "False", "self", ".", "create_tot_pixel_hist", "=", "True", "self", ".", "create_rel_bcid_hist", "=", "True", "self", ".", "correct_corrupted_data", "=", "False", "self", ".", "create_error_hist", "=", "True", "self", ".", "create_service_record_hist", "=", "True", "self", ".", "create_occupancy_hist", "=", "True", "self", ".", "create_meta_word_index", "=", "False", "self", ".", "create_source_scan_hist", "=", "False", "self", ".", "create_tdc_hist", "=", "False", "self", ".", "create_tdc_counter_hist", "=", "False", "self", ".", "create_tdc_pixel_hist", "=", "False", "self", ".", "create_trigger_error_hist", "=", "False", "self", ".", "create_threshold_hists", "=", "False", "self", ".", "create_threshold_mask", "=", "True", "# Threshold/noise histogram mask: masking all pixels out of bounds", "self", ".", "create_fitted_threshold_mask", "=", "True", "# Fitted threshold/noise histogram mask: masking all pixels out of bounds", "self", ".", "create_fitted_threshold_hists", "=", "False", "self", ".", "create_cluster_hit_table", "=", "False", "self", ".", "create_cluster_table", "=", "False", "self", ".", "create_cluster_size_hist", "=", "False", "self", ".", "create_cluster_tot_hist", "=", "False", "self", ".", "align_at_trigger", "=", "False", "# use the trigger word to align the events", "self", ".", "align_at_tdc", "=", "False", "# use the trigger word to align the events", "self", ".", "trigger_data_format", "=", "0", "# 0: 31bit trigger number, 1: 31bit trigger time stamp, 2: 15bit trigger time stamp + 16bit trigger number", "self", ".", "use_tdc_trigger_time_stamp", "=", "False", "# the tdc time stamp is the difference between trigger and tdc rising edge", "self", ".", "max_tdc_delay", "=", "255", "self", ".", "max_trigger_number", "=", "2", "**", "16", "-", "1", "self", ".", "set_stop_mode", "=", "False" ]
Set all settings to their standard values.
[ "Set", "all", "settings", "to", "their", "standard", "values", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analyze_raw_data.py#L263-L312
SiLab-Bonn/pyBAR
pybar/analysis/analyze_raw_data.py
AnalyzeRawData.trig_count
def trig_count(self, value): """Set the numbers of BCIDs (usually 16) of one event.""" self._trig_count = 16 if value == 0 else value self.interpreter.set_trig_count(self._trig_count)
python
def trig_count(self, value): """Set the numbers of BCIDs (usually 16) of one event.""" self._trig_count = 16 if value == 0 else value self.interpreter.set_trig_count(self._trig_count)
[ "def", "trig_count", "(", "self", ",", "value", ")", ":", "self", ".", "_trig_count", "=", "16", "if", "value", "==", "0", "else", "value", "self", ".", "interpreter", ".", "set_trig_count", "(", "self", ".", "_trig_count", ")" ]
Set the numbers of BCIDs (usually 16) of one event.
[ "Set", "the", "numbers", "of", "BCIDs", "(", "usually", "16", ")", "of", "one", "event", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analyze_raw_data.py#L531-L534
SiLab-Bonn/pyBAR
pybar/analysis/analyze_raw_data.py
AnalyzeRawData.max_tot_value
def max_tot_value(self, value): """Set maximum ToT value that is considered to be a hit""" self._max_tot_value = value self.interpreter.set_max_tot(self._max_tot_value) self.histogram.set_max_tot(self._max_tot_value) self.clusterizer.set_max_hit_charge(self._max_tot_value)
python
def max_tot_value(self, value): """Set maximum ToT value that is considered to be a hit""" self._max_tot_value = value self.interpreter.set_max_tot(self._max_tot_value) self.histogram.set_max_tot(self._max_tot_value) self.clusterizer.set_max_hit_charge(self._max_tot_value)
[ "def", "max_tot_value", "(", "self", ",", "value", ")", ":", "self", ".", "_max_tot_value", "=", "value", "self", ".", "interpreter", ".", "set_max_tot", "(", "self", ".", "_max_tot_value", ")", "self", ".", "histogram", ".", "set_max_tot", "(", "self", ".", "_max_tot_value", ")", "self", ".", "clusterizer", ".", "set_max_hit_charge", "(", "self", ".", "_max_tot_value", ")" ]
Set maximum ToT value that is considered to be a hit
[ "Set", "maximum", "ToT", "value", "that", "is", "considered", "to", "be", "a", "hit" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analyze_raw_data.py#L542-L547
SiLab-Bonn/pyBAR
pybar/analysis/analyze_raw_data.py
AnalyzeRawData.interpret_word_table
def interpret_word_table(self, analyzed_data_file=None, use_settings_from_file=True, fei4b=None): '''Interprets the raw data word table of all given raw data files with the c++ library. Creates the h5 output file and PDF plots. Parameters ---------- analyzed_data_file : string The file name of the output analyzed data file. If None, the output analyzed data file specified during initialization is taken. use_settings_from_file : boolean True if the needed parameters should be extracted from the raw data file fei4b : boolean True if the raw data is from FE-I4B. ''' logging.info('Interpreting raw data file(s): ' + (', ').join(self.files_dict.keys())) if self._create_meta_word_index: meta_word = np.empty((self._chunk_size,), dtype=dtype_from_descr(data_struct.MetaInfoWordTable)) self.interpreter.set_meta_data_word_index(meta_word) self.interpreter.reset_event_variables() self.interpreter.reset_counters() self.meta_data = analysis_utils.combine_meta_data(self.files_dict, meta_data_v2=self.interpreter.meta_table_v2) if self.meta_data is None or self.meta_data.shape[0] == 0: raise analysis_utils.IncompleteInputError('Meta data is empty. Stopping interpretation.') self.interpreter.set_meta_data(self.meta_data) # tell interpreter the word index per readout to be able to calculate the event number per read out meta_data_size = self.meta_data.shape[0] self.meta_event_index = np.zeros((meta_data_size,), dtype=[('metaEventIndex', np.uint64)]) # this array is filled by the interpreter and holds the event number per read out self.interpreter.set_meta_event_data(self.meta_event_index) # tell the interpreter the data container to write the meta event index to if self.scan_parameters is None: self.histogram.set_no_scan_parameter() else: self.scan_parameter_index = analysis_utils.get_scan_parameters_index(self.scan_parameters) # a array that labels unique scan parameter combinations self.histogram.add_scan_parameter(self.scan_parameter_index) # just add an index for the different scan parameter combinations if self._create_cluster_size_hist: # Cluster size result histogram self._cluster_size_hist = np.zeros(shape=(6, ), dtype=np.uint32) if self._create_cluster_tot_hist: # Cluster tot/size result histogram self._cluster_tot_hist = np.zeros(shape=(16, 6), dtype=np.uint32) close_analyzed_data_file = False if analyzed_data_file is not None: # if an output file name is specified create new file for analyzed data if self.is_open(self.out_file_h5) and os.path.abspath(analyzed_data_file) == os.path.abspath(self.out_file_h5.filename): out_file_h5 = self.out_file_h5 else: # normalize path analyzed_data_file = os.path.abspath(analyzed_data_file) if os.path.splitext(analyzed_data_file)[1].lower() != ".h5": analyzed_data_file = os.path.splitext(analyzed_data_file)[0] + ".h5" out_file_h5 = tb.open_file(analyzed_data_file, mode="w", title="Interpreted FE-I4 raw data") close_analyzed_data_file = True elif self.is_open(self.out_file_h5): out_file_h5 = self.out_file_h5 else: out_file_h5 = None tmp_out_file_h5 = self.out_file_h5 if not self.is_open(self.out_file_h5) and self.is_open(out_file_h5): close_analyzed_data_file = False tmp_out_file_h5 = out_file_h5 self.out_file_h5 = out_file_h5 if self.is_open(self.out_file_h5): self._analyzed_data_file = self.out_file_h5.filename else: self._analyzed_data_file is None if self._analyzed_data_file is not None: if self._create_hit_table is True: description = data_struct.HitInfoTable().columns.copy() hit_table = self.out_file_h5.create_table(self.out_file_h5.root, name='Hits', description=description, title='hit_data', filters=self._filter_table, chunkshape=(self._chunk_size / 100,)) if self._create_meta_word_index is True: meta_word_index_table = self.out_file_h5.create_table(self.out_file_h5.root, name='EventMetaData', description=data_struct.MetaInfoWordTable, title='event_meta_data', filters=self._filter_table, chunkshape=(self._chunk_size / 10,)) if self._create_cluster_table: cluster_table = self.out_file_h5.create_table(self.out_file_h5.root, name='Cluster', description=data_struct.ClusterInfoTable, title='Cluster data', filters=self._filter_table, expectedrows=self._chunk_size) if self._create_cluster_hit_table: description = data_struct.ClusterHitInfoTable().columns.copy() cluster_hit_table = self.out_file_h5.create_table(self.out_file_h5.root, name='ClusterHits', description=description, title='cluster_hit_data', filters=self._filter_table, expectedrows=self._chunk_size) logging.info("Interpreting raw data...") progress_bar = progressbar.ProgressBar(widgets=['', progressbar.Percentage(), ' ', progressbar.Bar(marker='*', left='|', right='|'), ' ', progressbar.AdaptiveETA()], maxval=analysis_utils.get_total_n_data_words(self.files_dict), term_width=80) progress_bar.start() total_words = 0 for file_index, raw_data_file in enumerate(self.files_dict.keys()): # loop over all raw data files self.interpreter.reset_meta_data_counter() with tb.open_file(raw_data_file, mode="r") as in_file_h5: if use_settings_from_file: self._deduce_settings_from_file(in_file_h5) else: self.fei4b = fei4b if self.interpreter.meta_table_v2: index_start = in_file_h5.root.meta_data.read(field='index_start') index_stop = in_file_h5.root.meta_data.read(field='index_stop') else: index_start = in_file_h5.root.meta_data.read(field='start_index') index_stop = in_file_h5.root.meta_data.read(field='stop_index') bad_word_index = set() # Check for bad data if self._correct_corrupted_data: tw = 2147483648 # trigger word dh = 15269888 # data header is_fe_data_header = logical_and(is_fe_word, is_data_header) found_first_trigger = False readout_slices = np.column_stack((index_start, index_stop)) previous_prepend_data_headers = None prepend_data_headers = None last_good_readout_index = None last_index_with_event_data = None for read_out_index, (index_start, index_stop) in enumerate(readout_slices): try: raw_data = in_file_h5.root.raw_data.read(index_start, index_stop) except OverflowError, e: pass except tb.exceptions.HDF5ExtError: break # previous data chunk had bad data, check for good data if (index_start - 1) in bad_word_index: bad_data, current_prepend_data_headers, _ , _ = check_bad_data(raw_data, prepend_data_headers=1, trig_count=None) if bad_data: bad_word_index = bad_word_index.union(range(index_start, index_stop)) else: # logging.info("found good data in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, index_start, index_stop, read_out_index, (index_stop - index_start))) if last_good_readout_index + 1 == read_out_index - 1: logging.warning("found bad data in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, readout_slices[last_good_readout_index][1], readout_slices[read_out_index - 1][1], last_good_readout_index + 1, (readout_slices[read_out_index - 1][1] - readout_slices[last_good_readout_index][1]))) else: logging.warning("found bad data in %s from index %d to %d (chunk %d to %d, length %d)" % (in_file_h5.filename, readout_slices[last_good_readout_index][1], readout_slices[read_out_index - 1][1], last_good_readout_index + 1, read_out_index - 1, (readout_slices[read_out_index - 1][1] - readout_slices[last_good_readout_index][1]))) previous_good_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_good_readout_index][0], readout_slices[last_good_readout_index][1] - 1) previous_bad_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_good_readout_index][1] - 1, readout_slices[read_out_index - 1][1]) fixed_raw_data, _ = fix_raw_data(previous_bad_raw_data, lsb_byte=None) fixed_raw_data = np.r_[previous_good_raw_data, fixed_raw_data, raw_data] _, prepend_data_headers, n_triggers, n_dh = check_bad_data(fixed_raw_data, prepend_data_headers=previous_prepend_data_headers, trig_count=self.trig_count) last_good_readout_index = read_out_index if n_triggers != 0 or n_dh != 0: last_index_with_event_data = read_out_index last_event_data_prepend_data_headers = prepend_data_headers fixed_previous_raw_data = np.r_[previous_good_raw_data, fixed_raw_data] _, previous_prepend_data_headers, _ , _ = check_bad_data(fixed_previous_raw_data, prepend_data_headers=previous_prepend_data_headers, trig_count=self.trig_count) # check for bad data else: # workaround for first data chunk, might have missing trigger in some rare cases (already fixed in firmware) if read_out_index == 0 and (np.any(is_trigger_word(raw_data) >= 1) or np.any(is_fe_data_header(raw_data) >= 1)): bad_data, current_prepend_data_headers, n_triggers , n_dh = check_bad_data(raw_data, prepend_data_headers=1, trig_count=None) # check for full last event in data if current_prepend_data_headers == self.trig_count: current_prepend_data_headers = None # usually check for bad data happens here else: bad_data, current_prepend_data_headers, n_triggers , n_dh = check_bad_data(raw_data, prepend_data_headers=prepend_data_headers, trig_count=self.trig_count) # do additional check with follow up data chunk and decide whether current chunk is defect or not if bad_data: if read_out_index == 0: fixed_raw_data_chunk, _ = fix_raw_data(raw_data, lsb_byte=None) fixed_raw_data_list = [fixed_raw_data_chunk] else: previous_raw_data = in_file_h5.root.raw_data.read(*readout_slices[read_out_index - 1]) raw_data_with_previous_data_word = np.r_[previous_raw_data[-1], raw_data] fixed_raw_data_chunk, _ = fix_raw_data(raw_data_with_previous_data_word, lsb_byte=None) fixed_raw_data = np.r_[previous_raw_data[:-1], fixed_raw_data_chunk] # last data word of chunk before broken chunk migh be a trigger word or data header which cannot be recovered fixed_raw_data_with_tw = np.r_[previous_raw_data[:-1], tw, fixed_raw_data_chunk] fixed_raw_data_with_dh = np.r_[previous_raw_data[:-1], dh, fixed_raw_data_chunk] fixed_raw_data_list = [fixed_raw_data, fixed_raw_data_with_tw, fixed_raw_data_with_dh] bad_fixed_data, _, _ , _ = check_bad_data(fixed_raw_data_with_dh, prepend_data_headers=previous_prepend_data_headers, trig_count=self.trig_count) bad_fixed_data = map(lambda data: check_bad_data(data, prepend_data_headers=previous_prepend_data_headers, trig_count=self.trig_count)[0], fixed_raw_data_list) if not all(bad_fixed_data): # good fixed data # last word in chunk before currrent chunk is also bad if index_start != 0: bad_word_index.add(index_start - 1) # adding all word from current chunk bad_word_index = bad_word_index.union(range(index_start, index_stop)) last_good_readout_index = read_out_index - 1 else: # a previous chunk might be broken and the last data word becomes a trigger word, so do additional checks if last_index_with_event_data and last_event_data_prepend_data_headers != read_out_index: before_bad_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_index_with_event_data - 1][0], readout_slices[last_index_with_event_data - 1][1] - 1) previous_bad_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_index_with_event_data][0] - 1, readout_slices[last_index_with_event_data][1]) fixed_raw_data, _ = fix_raw_data(previous_bad_raw_data, lsb_byte=None) previous_good_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_index_with_event_data][1], readout_slices[read_out_index - 1][1]) fixed_raw_data = np.r_[before_bad_raw_data, fixed_raw_data, previous_good_raw_data, raw_data] bad_fixed_previous_data, current_prepend_data_headers, _, _ = check_bad_data(fixed_raw_data, prepend_data_headers=last_event_data_prepend_data_headers, trig_count=self.trig_count) if not bad_fixed_previous_data: logging.warning("found bad data in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, readout_slices[last_index_with_event_data][0], readout_slices[last_index_with_event_data][1], last_index_with_event_data, (readout_slices[last_index_with_event_data][1] - readout_slices[last_index_with_event_data][0]))) bad_word_index = bad_word_index.union(range(readout_slices[last_index_with_event_data][0] - 1, readout_slices[last_index_with_event_data][1])) else: logging.warning("found bad data which cannot be corrected in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, index_start, index_stop, read_out_index, (index_stop - index_start))) else: logging.warning("found bad data which cannot be corrected in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, index_start, index_stop, read_out_index, (index_stop - index_start))) if n_triggers != 0 or n_dh != 0: last_index_with_event_data = read_out_index last_event_data_prepend_data_headers = prepend_data_headers if not bad_data or (bad_data and bad_fixed_data): previous_prepend_data_headers = prepend_data_headers prepend_data_headers = current_prepend_data_headers consecutive_bad_words_list = consecutive(sorted(bad_word_index)) lsb_byte = None # Loop over raw data in chunks for word_index in range(0, in_file_h5.root.raw_data.shape[0], self._chunk_size): # loop over all words in the actual raw data file try: raw_data = in_file_h5.root.raw_data.read(word_index, word_index + self._chunk_size) except OverflowError, e: logging.error('%s: 2^31 xrange() limitation in 32-bit Python', e) except tb.exceptions.HDF5ExtError: logging.warning('Raw data file %s has missing raw data. Continue raw data analysis.', in_file_h5.filename) break total_words += raw_data.shape[0] # fix bad data if self._correct_corrupted_data: # increase word shift for every bad data chunk in raw data chunk word_shift = 0 chunk_indices = np.arange(word_index, word_index + self._chunk_size) for consecutive_bad_word_indices in consecutive_bad_words_list: selected_words = np.intersect1d(consecutive_bad_word_indices, chunk_indices, assume_unique=True) if selected_words.shape[0]: fixed_raw_data, lsb_byte = fix_raw_data(raw_data[selected_words - word_index - word_shift], lsb_byte=lsb_byte) raw_data = np.r_[raw_data[:selected_words[0] - word_index - word_shift], fixed_raw_data, raw_data[selected_words[-1] - word_index + 1 - word_shift:]] # check if last word of bad data chunk in current raw data chunk if consecutive_bad_word_indices[-1] in selected_words: lsb_byte = None # word shift by removing data word at the beginning of each defect chunk word_shift += 1 # bad data chunk is at the end of current raw data chunk else: break self.interpreter.interpret_raw_data(raw_data) # interpret the raw data # store remaining buffered event in the interpreter at the end of the last file if file_index == len(self.files_dict.keys()) - 1 and word_index == range(0, in_file_h5.root.raw_data.shape[0], self._chunk_size)[-1]: # store hits of the latest event of the last file self.interpreter.store_event() hits = self.interpreter.get_hits() if self.scan_parameters is not None: nEventIndex = self.interpreter.get_n_meta_data_event() self.histogram.add_meta_event_index(self.meta_event_index, nEventIndex) if self.is_histogram_hits(): self.histogram_hits(hits) if self.is_cluster_hits(): cluster_hits, clusters = self.cluster_hits(hits) if self._create_cluster_hit_table: cluster_hit_table.append(cluster_hits) if self._create_cluster_table: cluster_table.append(clusters) if self._create_cluster_size_hist: if clusters['size'].shape[0] > 0 and np.max(clusters['size']) + 1 > self._cluster_size_hist.shape[0]: self._cluster_size_hist.resize(np.max(clusters['size']) + 1) self._cluster_size_hist += fast_analysis_utils.hist_1d_index(clusters['size'], shape=self._cluster_size_hist.shape) if self._create_cluster_tot_hist: if clusters['tot'].shape[0] > 0 and np.max(clusters['tot']) + 1 > self._cluster_tot_hist.shape[0]: self._cluster_tot_hist.resize((np.max(clusters['tot']) + 1, self._cluster_tot_hist.shape[1])) if clusters['size'].shape[0] > 0 and np.max(clusters['size']) + 1 > self._cluster_tot_hist.shape[1]: self._cluster_tot_hist.resize((self._cluster_tot_hist.shape[0], np.max(clusters['size']) + 1)) self._cluster_tot_hist += fast_analysis_utils.hist_2d_index(clusters['tot'], clusters['size'], shape=self._cluster_tot_hist.shape) if self._analyzed_data_file is not None and self._create_hit_table: hit_table.append(hits) if self._analyzed_data_file is not None and self._create_meta_word_index: size = self.interpreter.get_n_meta_data_word() meta_word_index_table.append(meta_word[:size]) if total_words <= progress_bar.maxval: # Otherwise exception is thrown progress_bar.update(total_words) self.out_file_h5.flush() progress_bar.finish() self._create_additional_data() if close_analyzed_data_file: self.out_file_h5.close() self.out_file_h5 = None self.out_file_h5 = out_file_h5 if self.is_open(self.out_file_h5): self._analyzed_data_file = self.out_file_h5.filename else: self._analyzed_data_file = None
python
def interpret_word_table(self, analyzed_data_file=None, use_settings_from_file=True, fei4b=None): '''Interprets the raw data word table of all given raw data files with the c++ library. Creates the h5 output file and PDF plots. Parameters ---------- analyzed_data_file : string The file name of the output analyzed data file. If None, the output analyzed data file specified during initialization is taken. use_settings_from_file : boolean True if the needed parameters should be extracted from the raw data file fei4b : boolean True if the raw data is from FE-I4B. ''' logging.info('Interpreting raw data file(s): ' + (', ').join(self.files_dict.keys())) if self._create_meta_word_index: meta_word = np.empty((self._chunk_size,), dtype=dtype_from_descr(data_struct.MetaInfoWordTable)) self.interpreter.set_meta_data_word_index(meta_word) self.interpreter.reset_event_variables() self.interpreter.reset_counters() self.meta_data = analysis_utils.combine_meta_data(self.files_dict, meta_data_v2=self.interpreter.meta_table_v2) if self.meta_data is None or self.meta_data.shape[0] == 0: raise analysis_utils.IncompleteInputError('Meta data is empty. Stopping interpretation.') self.interpreter.set_meta_data(self.meta_data) # tell interpreter the word index per readout to be able to calculate the event number per read out meta_data_size = self.meta_data.shape[0] self.meta_event_index = np.zeros((meta_data_size,), dtype=[('metaEventIndex', np.uint64)]) # this array is filled by the interpreter and holds the event number per read out self.interpreter.set_meta_event_data(self.meta_event_index) # tell the interpreter the data container to write the meta event index to if self.scan_parameters is None: self.histogram.set_no_scan_parameter() else: self.scan_parameter_index = analysis_utils.get_scan_parameters_index(self.scan_parameters) # a array that labels unique scan parameter combinations self.histogram.add_scan_parameter(self.scan_parameter_index) # just add an index for the different scan parameter combinations if self._create_cluster_size_hist: # Cluster size result histogram self._cluster_size_hist = np.zeros(shape=(6, ), dtype=np.uint32) if self._create_cluster_tot_hist: # Cluster tot/size result histogram self._cluster_tot_hist = np.zeros(shape=(16, 6), dtype=np.uint32) close_analyzed_data_file = False if analyzed_data_file is not None: # if an output file name is specified create new file for analyzed data if self.is_open(self.out_file_h5) and os.path.abspath(analyzed_data_file) == os.path.abspath(self.out_file_h5.filename): out_file_h5 = self.out_file_h5 else: # normalize path analyzed_data_file = os.path.abspath(analyzed_data_file) if os.path.splitext(analyzed_data_file)[1].lower() != ".h5": analyzed_data_file = os.path.splitext(analyzed_data_file)[0] + ".h5" out_file_h5 = tb.open_file(analyzed_data_file, mode="w", title="Interpreted FE-I4 raw data") close_analyzed_data_file = True elif self.is_open(self.out_file_h5): out_file_h5 = self.out_file_h5 else: out_file_h5 = None tmp_out_file_h5 = self.out_file_h5 if not self.is_open(self.out_file_h5) and self.is_open(out_file_h5): close_analyzed_data_file = False tmp_out_file_h5 = out_file_h5 self.out_file_h5 = out_file_h5 if self.is_open(self.out_file_h5): self._analyzed_data_file = self.out_file_h5.filename else: self._analyzed_data_file is None if self._analyzed_data_file is not None: if self._create_hit_table is True: description = data_struct.HitInfoTable().columns.copy() hit_table = self.out_file_h5.create_table(self.out_file_h5.root, name='Hits', description=description, title='hit_data', filters=self._filter_table, chunkshape=(self._chunk_size / 100,)) if self._create_meta_word_index is True: meta_word_index_table = self.out_file_h5.create_table(self.out_file_h5.root, name='EventMetaData', description=data_struct.MetaInfoWordTable, title='event_meta_data', filters=self._filter_table, chunkshape=(self._chunk_size / 10,)) if self._create_cluster_table: cluster_table = self.out_file_h5.create_table(self.out_file_h5.root, name='Cluster', description=data_struct.ClusterInfoTable, title='Cluster data', filters=self._filter_table, expectedrows=self._chunk_size) if self._create_cluster_hit_table: description = data_struct.ClusterHitInfoTable().columns.copy() cluster_hit_table = self.out_file_h5.create_table(self.out_file_h5.root, name='ClusterHits', description=description, title='cluster_hit_data', filters=self._filter_table, expectedrows=self._chunk_size) logging.info("Interpreting raw data...") progress_bar = progressbar.ProgressBar(widgets=['', progressbar.Percentage(), ' ', progressbar.Bar(marker='*', left='|', right='|'), ' ', progressbar.AdaptiveETA()], maxval=analysis_utils.get_total_n_data_words(self.files_dict), term_width=80) progress_bar.start() total_words = 0 for file_index, raw_data_file in enumerate(self.files_dict.keys()): # loop over all raw data files self.interpreter.reset_meta_data_counter() with tb.open_file(raw_data_file, mode="r") as in_file_h5: if use_settings_from_file: self._deduce_settings_from_file(in_file_h5) else: self.fei4b = fei4b if self.interpreter.meta_table_v2: index_start = in_file_h5.root.meta_data.read(field='index_start') index_stop = in_file_h5.root.meta_data.read(field='index_stop') else: index_start = in_file_h5.root.meta_data.read(field='start_index') index_stop = in_file_h5.root.meta_data.read(field='stop_index') bad_word_index = set() # Check for bad data if self._correct_corrupted_data: tw = 2147483648 # trigger word dh = 15269888 # data header is_fe_data_header = logical_and(is_fe_word, is_data_header) found_first_trigger = False readout_slices = np.column_stack((index_start, index_stop)) previous_prepend_data_headers = None prepend_data_headers = None last_good_readout_index = None last_index_with_event_data = None for read_out_index, (index_start, index_stop) in enumerate(readout_slices): try: raw_data = in_file_h5.root.raw_data.read(index_start, index_stop) except OverflowError, e: pass except tb.exceptions.HDF5ExtError: break # previous data chunk had bad data, check for good data if (index_start - 1) in bad_word_index: bad_data, current_prepend_data_headers, _ , _ = check_bad_data(raw_data, prepend_data_headers=1, trig_count=None) if bad_data: bad_word_index = bad_word_index.union(range(index_start, index_stop)) else: # logging.info("found good data in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, index_start, index_stop, read_out_index, (index_stop - index_start))) if last_good_readout_index + 1 == read_out_index - 1: logging.warning("found bad data in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, readout_slices[last_good_readout_index][1], readout_slices[read_out_index - 1][1], last_good_readout_index + 1, (readout_slices[read_out_index - 1][1] - readout_slices[last_good_readout_index][1]))) else: logging.warning("found bad data in %s from index %d to %d (chunk %d to %d, length %d)" % (in_file_h5.filename, readout_slices[last_good_readout_index][1], readout_slices[read_out_index - 1][1], last_good_readout_index + 1, read_out_index - 1, (readout_slices[read_out_index - 1][1] - readout_slices[last_good_readout_index][1]))) previous_good_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_good_readout_index][0], readout_slices[last_good_readout_index][1] - 1) previous_bad_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_good_readout_index][1] - 1, readout_slices[read_out_index - 1][1]) fixed_raw_data, _ = fix_raw_data(previous_bad_raw_data, lsb_byte=None) fixed_raw_data = np.r_[previous_good_raw_data, fixed_raw_data, raw_data] _, prepend_data_headers, n_triggers, n_dh = check_bad_data(fixed_raw_data, prepend_data_headers=previous_prepend_data_headers, trig_count=self.trig_count) last_good_readout_index = read_out_index if n_triggers != 0 or n_dh != 0: last_index_with_event_data = read_out_index last_event_data_prepend_data_headers = prepend_data_headers fixed_previous_raw_data = np.r_[previous_good_raw_data, fixed_raw_data] _, previous_prepend_data_headers, _ , _ = check_bad_data(fixed_previous_raw_data, prepend_data_headers=previous_prepend_data_headers, trig_count=self.trig_count) # check for bad data else: # workaround for first data chunk, might have missing trigger in some rare cases (already fixed in firmware) if read_out_index == 0 and (np.any(is_trigger_word(raw_data) >= 1) or np.any(is_fe_data_header(raw_data) >= 1)): bad_data, current_prepend_data_headers, n_triggers , n_dh = check_bad_data(raw_data, prepend_data_headers=1, trig_count=None) # check for full last event in data if current_prepend_data_headers == self.trig_count: current_prepend_data_headers = None # usually check for bad data happens here else: bad_data, current_prepend_data_headers, n_triggers , n_dh = check_bad_data(raw_data, prepend_data_headers=prepend_data_headers, trig_count=self.trig_count) # do additional check with follow up data chunk and decide whether current chunk is defect or not if bad_data: if read_out_index == 0: fixed_raw_data_chunk, _ = fix_raw_data(raw_data, lsb_byte=None) fixed_raw_data_list = [fixed_raw_data_chunk] else: previous_raw_data = in_file_h5.root.raw_data.read(*readout_slices[read_out_index - 1]) raw_data_with_previous_data_word = np.r_[previous_raw_data[-1], raw_data] fixed_raw_data_chunk, _ = fix_raw_data(raw_data_with_previous_data_word, lsb_byte=None) fixed_raw_data = np.r_[previous_raw_data[:-1], fixed_raw_data_chunk] # last data word of chunk before broken chunk migh be a trigger word or data header which cannot be recovered fixed_raw_data_with_tw = np.r_[previous_raw_data[:-1], tw, fixed_raw_data_chunk] fixed_raw_data_with_dh = np.r_[previous_raw_data[:-1], dh, fixed_raw_data_chunk] fixed_raw_data_list = [fixed_raw_data, fixed_raw_data_with_tw, fixed_raw_data_with_dh] bad_fixed_data, _, _ , _ = check_bad_data(fixed_raw_data_with_dh, prepend_data_headers=previous_prepend_data_headers, trig_count=self.trig_count) bad_fixed_data = map(lambda data: check_bad_data(data, prepend_data_headers=previous_prepend_data_headers, trig_count=self.trig_count)[0], fixed_raw_data_list) if not all(bad_fixed_data): # good fixed data # last word in chunk before currrent chunk is also bad if index_start != 0: bad_word_index.add(index_start - 1) # adding all word from current chunk bad_word_index = bad_word_index.union(range(index_start, index_stop)) last_good_readout_index = read_out_index - 1 else: # a previous chunk might be broken and the last data word becomes a trigger word, so do additional checks if last_index_with_event_data and last_event_data_prepend_data_headers != read_out_index: before_bad_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_index_with_event_data - 1][0], readout_slices[last_index_with_event_data - 1][1] - 1) previous_bad_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_index_with_event_data][0] - 1, readout_slices[last_index_with_event_data][1]) fixed_raw_data, _ = fix_raw_data(previous_bad_raw_data, lsb_byte=None) previous_good_raw_data = in_file_h5.root.raw_data.read(readout_slices[last_index_with_event_data][1], readout_slices[read_out_index - 1][1]) fixed_raw_data = np.r_[before_bad_raw_data, fixed_raw_data, previous_good_raw_data, raw_data] bad_fixed_previous_data, current_prepend_data_headers, _, _ = check_bad_data(fixed_raw_data, prepend_data_headers=last_event_data_prepend_data_headers, trig_count=self.trig_count) if not bad_fixed_previous_data: logging.warning("found bad data in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, readout_slices[last_index_with_event_data][0], readout_slices[last_index_with_event_data][1], last_index_with_event_data, (readout_slices[last_index_with_event_data][1] - readout_slices[last_index_with_event_data][0]))) bad_word_index = bad_word_index.union(range(readout_slices[last_index_with_event_data][0] - 1, readout_slices[last_index_with_event_data][1])) else: logging.warning("found bad data which cannot be corrected in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, index_start, index_stop, read_out_index, (index_stop - index_start))) else: logging.warning("found bad data which cannot be corrected in %s from index %d to %d (chunk %d, length %d)" % (in_file_h5.filename, index_start, index_stop, read_out_index, (index_stop - index_start))) if n_triggers != 0 or n_dh != 0: last_index_with_event_data = read_out_index last_event_data_prepend_data_headers = prepend_data_headers if not bad_data or (bad_data and bad_fixed_data): previous_prepend_data_headers = prepend_data_headers prepend_data_headers = current_prepend_data_headers consecutive_bad_words_list = consecutive(sorted(bad_word_index)) lsb_byte = None # Loop over raw data in chunks for word_index in range(0, in_file_h5.root.raw_data.shape[0], self._chunk_size): # loop over all words in the actual raw data file try: raw_data = in_file_h5.root.raw_data.read(word_index, word_index + self._chunk_size) except OverflowError, e: logging.error('%s: 2^31 xrange() limitation in 32-bit Python', e) except tb.exceptions.HDF5ExtError: logging.warning('Raw data file %s has missing raw data. Continue raw data analysis.', in_file_h5.filename) break total_words += raw_data.shape[0] # fix bad data if self._correct_corrupted_data: # increase word shift for every bad data chunk in raw data chunk word_shift = 0 chunk_indices = np.arange(word_index, word_index + self._chunk_size) for consecutive_bad_word_indices in consecutive_bad_words_list: selected_words = np.intersect1d(consecutive_bad_word_indices, chunk_indices, assume_unique=True) if selected_words.shape[0]: fixed_raw_data, lsb_byte = fix_raw_data(raw_data[selected_words - word_index - word_shift], lsb_byte=lsb_byte) raw_data = np.r_[raw_data[:selected_words[0] - word_index - word_shift], fixed_raw_data, raw_data[selected_words[-1] - word_index + 1 - word_shift:]] # check if last word of bad data chunk in current raw data chunk if consecutive_bad_word_indices[-1] in selected_words: lsb_byte = None # word shift by removing data word at the beginning of each defect chunk word_shift += 1 # bad data chunk is at the end of current raw data chunk else: break self.interpreter.interpret_raw_data(raw_data) # interpret the raw data # store remaining buffered event in the interpreter at the end of the last file if file_index == len(self.files_dict.keys()) - 1 and word_index == range(0, in_file_h5.root.raw_data.shape[0], self._chunk_size)[-1]: # store hits of the latest event of the last file self.interpreter.store_event() hits = self.interpreter.get_hits() if self.scan_parameters is not None: nEventIndex = self.interpreter.get_n_meta_data_event() self.histogram.add_meta_event_index(self.meta_event_index, nEventIndex) if self.is_histogram_hits(): self.histogram_hits(hits) if self.is_cluster_hits(): cluster_hits, clusters = self.cluster_hits(hits) if self._create_cluster_hit_table: cluster_hit_table.append(cluster_hits) if self._create_cluster_table: cluster_table.append(clusters) if self._create_cluster_size_hist: if clusters['size'].shape[0] > 0 and np.max(clusters['size']) + 1 > self._cluster_size_hist.shape[0]: self._cluster_size_hist.resize(np.max(clusters['size']) + 1) self._cluster_size_hist += fast_analysis_utils.hist_1d_index(clusters['size'], shape=self._cluster_size_hist.shape) if self._create_cluster_tot_hist: if clusters['tot'].shape[0] > 0 and np.max(clusters['tot']) + 1 > self._cluster_tot_hist.shape[0]: self._cluster_tot_hist.resize((np.max(clusters['tot']) + 1, self._cluster_tot_hist.shape[1])) if clusters['size'].shape[0] > 0 and np.max(clusters['size']) + 1 > self._cluster_tot_hist.shape[1]: self._cluster_tot_hist.resize((self._cluster_tot_hist.shape[0], np.max(clusters['size']) + 1)) self._cluster_tot_hist += fast_analysis_utils.hist_2d_index(clusters['tot'], clusters['size'], shape=self._cluster_tot_hist.shape) if self._analyzed_data_file is not None and self._create_hit_table: hit_table.append(hits) if self._analyzed_data_file is not None and self._create_meta_word_index: size = self.interpreter.get_n_meta_data_word() meta_word_index_table.append(meta_word[:size]) if total_words <= progress_bar.maxval: # Otherwise exception is thrown progress_bar.update(total_words) self.out_file_h5.flush() progress_bar.finish() self._create_additional_data() if close_analyzed_data_file: self.out_file_h5.close() self.out_file_h5 = None self.out_file_h5 = out_file_h5 if self.is_open(self.out_file_h5): self._analyzed_data_file = self.out_file_h5.filename else: self._analyzed_data_file = None
[ "def", "interpret_word_table", "(", "self", ",", "analyzed_data_file", "=", "None", ",", "use_settings_from_file", "=", "True", ",", "fei4b", "=", "None", ")", ":", "logging", ".", "info", "(", "'Interpreting raw data file(s): '", "+", "(", "', '", ")", ".", "join", "(", "self", ".", "files_dict", ".", "keys", "(", ")", ")", ")", "if", "self", ".", "_create_meta_word_index", ":", "meta_word", "=", "np", ".", "empty", "(", "(", "self", ".", "_chunk_size", ",", ")", ",", "dtype", "=", "dtype_from_descr", "(", "data_struct", ".", "MetaInfoWordTable", ")", ")", "self", ".", "interpreter", ".", "set_meta_data_word_index", "(", "meta_word", ")", "self", ".", "interpreter", ".", "reset_event_variables", "(", ")", "self", ".", "interpreter", ".", "reset_counters", "(", ")", "self", ".", "meta_data", "=", "analysis_utils", ".", "combine_meta_data", "(", "self", ".", "files_dict", ",", "meta_data_v2", "=", "self", ".", "interpreter", ".", "meta_table_v2", ")", "if", "self", ".", "meta_data", "is", "None", "or", "self", ".", "meta_data", ".", "shape", "[", "0", "]", "==", "0", ":", "raise", "analysis_utils", ".", "IncompleteInputError", "(", "'Meta data is empty. Stopping interpretation.'", ")", "self", ".", "interpreter", ".", "set_meta_data", "(", "self", ".", "meta_data", ")", "# tell interpreter the word index per readout to be able to calculate the event number per read out", "meta_data_size", "=", "self", ".", "meta_data", ".", "shape", "[", "0", "]", "self", ".", "meta_event_index", "=", "np", ".", "zeros", "(", "(", "meta_data_size", ",", ")", ",", "dtype", "=", "[", "(", "'metaEventIndex'", ",", "np", ".", "uint64", ")", "]", ")", "# this array is filled by the interpreter and holds the event number per read out", "self", ".", "interpreter", ".", "set_meta_event_data", "(", "self", ".", "meta_event_index", ")", "# tell the interpreter the data container to write the meta event index to", "if", "self", ".", "scan_parameters", "is", "None", ":", "self", ".", "histogram", ".", "set_no_scan_parameter", "(", ")", "else", ":", "self", ".", "scan_parameter_index", "=", "analysis_utils", ".", "get_scan_parameters_index", "(", "self", ".", "scan_parameters", ")", "# a array that labels unique scan parameter combinations", "self", ".", "histogram", ".", "add_scan_parameter", "(", "self", ".", "scan_parameter_index", ")", "# just add an index for the different scan parameter combinations", "if", "self", ".", "_create_cluster_size_hist", ":", "# Cluster size result histogram", "self", ".", "_cluster_size_hist", "=", "np", ".", "zeros", "(", "shape", "=", "(", "6", ",", ")", ",", "dtype", "=", "np", ".", "uint32", ")", "if", "self", ".", "_create_cluster_tot_hist", ":", "# Cluster tot/size result histogram", "self", ".", "_cluster_tot_hist", "=", "np", ".", "zeros", "(", "shape", "=", "(", "16", ",", "6", ")", ",", "dtype", "=", "np", ".", "uint32", ")", "close_analyzed_data_file", "=", "False", "if", "analyzed_data_file", "is", "not", "None", ":", "# if an output file name is specified create new file for analyzed data", "if", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", "and", "os", ".", "path", ".", "abspath", "(", "analyzed_data_file", ")", "==", "os", ".", "path", ".", "abspath", "(", "self", ".", "out_file_h5", ".", "filename", ")", ":", "out_file_h5", "=", "self", ".", "out_file_h5", "else", ":", "# normalize path", "analyzed_data_file", "=", "os", ".", "path", ".", "abspath", "(", "analyzed_data_file", ")", "if", "os", ".", "path", ".", "splitext", "(", "analyzed_data_file", ")", "[", "1", "]", ".", "lower", "(", ")", "!=", "\".h5\"", ":", "analyzed_data_file", "=", "os", ".", "path", ".", "splitext", "(", "analyzed_data_file", ")", "[", "0", "]", "+", "\".h5\"", "out_file_h5", "=", "tb", ".", "open_file", "(", "analyzed_data_file", ",", "mode", "=", "\"w\"", ",", "title", "=", "\"Interpreted FE-I4 raw data\"", ")", "close_analyzed_data_file", "=", "True", "elif", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", ":", "out_file_h5", "=", "self", ".", "out_file_h5", "else", ":", "out_file_h5", "=", "None", "tmp_out_file_h5", "=", "self", ".", "out_file_h5", "if", "not", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", "and", "self", ".", "is_open", "(", "out_file_h5", ")", ":", "close_analyzed_data_file", "=", "False", "tmp_out_file_h5", "=", "out_file_h5", "self", ".", "out_file_h5", "=", "out_file_h5", "if", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", ":", "self", ".", "_analyzed_data_file", "=", "self", ".", "out_file_h5", ".", "filename", "else", ":", "self", ".", "_analyzed_data_file", "is", "None", "if", "self", ".", "_analyzed_data_file", "is", "not", "None", ":", "if", "self", ".", "_create_hit_table", "is", "True", ":", "description", "=", "data_struct", ".", "HitInfoTable", "(", ")", ".", "columns", ".", "copy", "(", ")", "hit_table", "=", "self", ".", "out_file_h5", ".", "create_table", "(", "self", ".", "out_file_h5", ".", "root", ",", "name", "=", "'Hits'", ",", "description", "=", "description", ",", "title", "=", "'hit_data'", ",", "filters", "=", "self", ".", "_filter_table", ",", "chunkshape", "=", "(", "self", ".", "_chunk_size", "/", "100", ",", ")", ")", "if", "self", ".", "_create_meta_word_index", "is", "True", ":", "meta_word_index_table", "=", "self", ".", "out_file_h5", ".", "create_table", "(", "self", ".", "out_file_h5", ".", "root", ",", "name", "=", "'EventMetaData'", ",", "description", "=", "data_struct", ".", "MetaInfoWordTable", ",", "title", "=", "'event_meta_data'", ",", "filters", "=", "self", ".", "_filter_table", ",", "chunkshape", "=", "(", "self", ".", "_chunk_size", "/", "10", ",", ")", ")", "if", "self", ".", "_create_cluster_table", ":", "cluster_table", "=", "self", ".", "out_file_h5", ".", "create_table", "(", "self", ".", "out_file_h5", ".", "root", ",", "name", "=", "'Cluster'", ",", "description", "=", "data_struct", ".", "ClusterInfoTable", ",", "title", "=", "'Cluster data'", ",", "filters", "=", "self", ".", "_filter_table", ",", "expectedrows", "=", "self", ".", "_chunk_size", ")", "if", "self", ".", "_create_cluster_hit_table", ":", "description", "=", "data_struct", ".", "ClusterHitInfoTable", "(", ")", ".", "columns", ".", "copy", "(", ")", "cluster_hit_table", "=", "self", ".", "out_file_h5", ".", "create_table", "(", "self", ".", "out_file_h5", ".", "root", ",", "name", "=", "'ClusterHits'", ",", "description", "=", "description", ",", "title", "=", "'cluster_hit_data'", ",", "filters", "=", "self", ".", "_filter_table", ",", "expectedrows", "=", "self", ".", "_chunk_size", ")", "logging", ".", "info", "(", "\"Interpreting raw data...\"", ")", "progress_bar", "=", "progressbar", ".", "ProgressBar", "(", "widgets", "=", "[", "''", ",", "progressbar", ".", "Percentage", "(", ")", ",", "' '", ",", "progressbar", ".", "Bar", "(", "marker", "=", "'*'", ",", "left", "=", "'|'", ",", "right", "=", "'|'", ")", ",", "' '", ",", "progressbar", ".", "AdaptiveETA", "(", ")", "]", ",", "maxval", "=", "analysis_utils", ".", "get_total_n_data_words", "(", "self", ".", "files_dict", ")", ",", "term_width", "=", "80", ")", "progress_bar", ".", "start", "(", ")", "total_words", "=", "0", "for", "file_index", ",", "raw_data_file", "in", "enumerate", "(", "self", ".", "files_dict", ".", "keys", "(", ")", ")", ":", "# loop over all raw data files", "self", ".", "interpreter", ".", "reset_meta_data_counter", "(", ")", "with", "tb", ".", "open_file", "(", "raw_data_file", ",", "mode", "=", "\"r\"", ")", "as", "in_file_h5", ":", "if", "use_settings_from_file", ":", "self", ".", "_deduce_settings_from_file", "(", "in_file_h5", ")", "else", ":", "self", ".", "fei4b", "=", "fei4b", "if", "self", ".", "interpreter", ".", "meta_table_v2", ":", "index_start", "=", "in_file_h5", ".", "root", ".", "meta_data", ".", "read", "(", "field", "=", "'index_start'", ")", "index_stop", "=", "in_file_h5", ".", "root", ".", "meta_data", ".", "read", "(", "field", "=", "'index_stop'", ")", "else", ":", "index_start", "=", "in_file_h5", ".", "root", ".", "meta_data", ".", "read", "(", "field", "=", "'start_index'", ")", "index_stop", "=", "in_file_h5", ".", "root", ".", "meta_data", ".", "read", "(", "field", "=", "'stop_index'", ")", "bad_word_index", "=", "set", "(", ")", "# Check for bad data", "if", "self", ".", "_correct_corrupted_data", ":", "tw", "=", "2147483648", "# trigger word", "dh", "=", "15269888", "# data header", "is_fe_data_header", "=", "logical_and", "(", "is_fe_word", ",", "is_data_header", ")", "found_first_trigger", "=", "False", "readout_slices", "=", "np", ".", "column_stack", "(", "(", "index_start", ",", "index_stop", ")", ")", "previous_prepend_data_headers", "=", "None", "prepend_data_headers", "=", "None", "last_good_readout_index", "=", "None", "last_index_with_event_data", "=", "None", "for", "read_out_index", ",", "(", "index_start", ",", "index_stop", ")", "in", "enumerate", "(", "readout_slices", ")", ":", "try", ":", "raw_data", "=", "in_file_h5", ".", "root", ".", "raw_data", ".", "read", "(", "index_start", ",", "index_stop", ")", "except", "OverflowError", ",", "e", ":", "pass", "except", "tb", ".", "exceptions", ".", "HDF5ExtError", ":", "break", "# previous data chunk had bad data, check for good data", "if", "(", "index_start", "-", "1", ")", "in", "bad_word_index", ":", "bad_data", ",", "current_prepend_data_headers", ",", "_", ",", "_", "=", "check_bad_data", "(", "raw_data", ",", "prepend_data_headers", "=", "1", ",", "trig_count", "=", "None", ")", "if", "bad_data", ":", "bad_word_index", "=", "bad_word_index", ".", "union", "(", "range", "(", "index_start", ",", "index_stop", ")", ")", "else", ":", "# logging.info(\"found good data in %s from index %d to %d (chunk %d, length %d)\" % (in_file_h5.filename, index_start, index_stop, read_out_index, (index_stop - index_start)))", "if", "last_good_readout_index", "+", "1", "==", "read_out_index", "-", "1", ":", "logging", ".", "warning", "(", "\"found bad data in %s from index %d to %d (chunk %d, length %d)\"", "%", "(", "in_file_h5", ".", "filename", ",", "readout_slices", "[", "last_good_readout_index", "]", "[", "1", "]", ",", "readout_slices", "[", "read_out_index", "-", "1", "]", "[", "1", "]", ",", "last_good_readout_index", "+", "1", ",", "(", "readout_slices", "[", "read_out_index", "-", "1", "]", "[", "1", "]", "-", "readout_slices", "[", "last_good_readout_index", "]", "[", "1", "]", ")", ")", ")", "else", ":", "logging", ".", "warning", "(", "\"found bad data in %s from index %d to %d (chunk %d to %d, length %d)\"", "%", "(", "in_file_h5", ".", "filename", ",", "readout_slices", "[", "last_good_readout_index", "]", "[", "1", "]", ",", "readout_slices", "[", "read_out_index", "-", "1", "]", "[", "1", "]", ",", "last_good_readout_index", "+", "1", ",", "read_out_index", "-", "1", ",", "(", "readout_slices", "[", "read_out_index", "-", "1", "]", "[", "1", "]", "-", "readout_slices", "[", "last_good_readout_index", "]", "[", "1", "]", ")", ")", ")", "previous_good_raw_data", "=", "in_file_h5", ".", "root", ".", "raw_data", ".", "read", "(", "readout_slices", "[", "last_good_readout_index", "]", "[", "0", "]", ",", "readout_slices", "[", "last_good_readout_index", "]", "[", "1", "]", "-", "1", ")", "previous_bad_raw_data", "=", "in_file_h5", ".", "root", ".", "raw_data", ".", "read", "(", "readout_slices", "[", "last_good_readout_index", "]", "[", "1", "]", "-", "1", ",", "readout_slices", "[", "read_out_index", "-", "1", "]", "[", "1", "]", ")", "fixed_raw_data", ",", "_", "=", "fix_raw_data", "(", "previous_bad_raw_data", ",", "lsb_byte", "=", "None", ")", "fixed_raw_data", "=", "np", ".", "r_", "[", "previous_good_raw_data", ",", "fixed_raw_data", ",", "raw_data", "]", "_", ",", "prepend_data_headers", ",", "n_triggers", ",", "n_dh", "=", "check_bad_data", "(", "fixed_raw_data", ",", "prepend_data_headers", "=", "previous_prepend_data_headers", ",", "trig_count", "=", "self", ".", "trig_count", ")", "last_good_readout_index", "=", "read_out_index", "if", "n_triggers", "!=", "0", "or", "n_dh", "!=", "0", ":", "last_index_with_event_data", "=", "read_out_index", "last_event_data_prepend_data_headers", "=", "prepend_data_headers", "fixed_previous_raw_data", "=", "np", ".", "r_", "[", "previous_good_raw_data", ",", "fixed_raw_data", "]", "_", ",", "previous_prepend_data_headers", ",", "_", ",", "_", "=", "check_bad_data", "(", "fixed_previous_raw_data", ",", "prepend_data_headers", "=", "previous_prepend_data_headers", ",", "trig_count", "=", "self", ".", "trig_count", ")", "# check for bad data", "else", ":", "# workaround for first data chunk, might have missing trigger in some rare cases (already fixed in firmware)", "if", "read_out_index", "==", "0", "and", "(", "np", ".", "any", "(", "is_trigger_word", "(", "raw_data", ")", ">=", "1", ")", "or", "np", ".", "any", "(", "is_fe_data_header", "(", "raw_data", ")", ">=", "1", ")", ")", ":", "bad_data", ",", "current_prepend_data_headers", ",", "n_triggers", ",", "n_dh", "=", "check_bad_data", "(", "raw_data", ",", "prepend_data_headers", "=", "1", ",", "trig_count", "=", "None", ")", "# check for full last event in data", "if", "current_prepend_data_headers", "==", "self", ".", "trig_count", ":", "current_prepend_data_headers", "=", "None", "# usually check for bad data happens here", "else", ":", "bad_data", ",", "current_prepend_data_headers", ",", "n_triggers", ",", "n_dh", "=", "check_bad_data", "(", "raw_data", ",", "prepend_data_headers", "=", "prepend_data_headers", ",", "trig_count", "=", "self", ".", "trig_count", ")", "# do additional check with follow up data chunk and decide whether current chunk is defect or not", "if", "bad_data", ":", "if", "read_out_index", "==", "0", ":", "fixed_raw_data_chunk", ",", "_", "=", "fix_raw_data", "(", "raw_data", ",", "lsb_byte", "=", "None", ")", "fixed_raw_data_list", "=", "[", "fixed_raw_data_chunk", "]", "else", ":", "previous_raw_data", "=", "in_file_h5", ".", "root", ".", "raw_data", ".", "read", "(", "*", "readout_slices", "[", "read_out_index", "-", "1", "]", ")", "raw_data_with_previous_data_word", "=", "np", ".", "r_", "[", "previous_raw_data", "[", "-", "1", "]", ",", "raw_data", "]", "fixed_raw_data_chunk", ",", "_", "=", "fix_raw_data", "(", "raw_data_with_previous_data_word", ",", "lsb_byte", "=", "None", ")", "fixed_raw_data", "=", "np", ".", "r_", "[", "previous_raw_data", "[", ":", "-", "1", "]", ",", "fixed_raw_data_chunk", "]", "# last data word of chunk before broken chunk migh be a trigger word or data header which cannot be recovered", "fixed_raw_data_with_tw", "=", "np", ".", "r_", "[", "previous_raw_data", "[", ":", "-", "1", "]", ",", "tw", ",", "fixed_raw_data_chunk", "]", "fixed_raw_data_with_dh", "=", "np", ".", "r_", "[", "previous_raw_data", "[", ":", "-", "1", "]", ",", "dh", ",", "fixed_raw_data_chunk", "]", "fixed_raw_data_list", "=", "[", "fixed_raw_data", ",", "fixed_raw_data_with_tw", ",", "fixed_raw_data_with_dh", "]", "bad_fixed_data", ",", "_", ",", "_", ",", "_", "=", "check_bad_data", "(", "fixed_raw_data_with_dh", ",", "prepend_data_headers", "=", "previous_prepend_data_headers", ",", "trig_count", "=", "self", ".", "trig_count", ")", "bad_fixed_data", "=", "map", "(", "lambda", "data", ":", "check_bad_data", "(", "data", ",", "prepend_data_headers", "=", "previous_prepend_data_headers", ",", "trig_count", "=", "self", ".", "trig_count", ")", "[", "0", "]", ",", "fixed_raw_data_list", ")", "if", "not", "all", "(", "bad_fixed_data", ")", ":", "# good fixed data", "# last word in chunk before currrent chunk is also bad", "if", "index_start", "!=", "0", ":", "bad_word_index", ".", "add", "(", "index_start", "-", "1", ")", "# adding all word from current chunk", "bad_word_index", "=", "bad_word_index", ".", "union", "(", "range", "(", "index_start", ",", "index_stop", ")", ")", "last_good_readout_index", "=", "read_out_index", "-", "1", "else", ":", "# a previous chunk might be broken and the last data word becomes a trigger word, so do additional checks", "if", "last_index_with_event_data", "and", "last_event_data_prepend_data_headers", "!=", "read_out_index", ":", "before_bad_raw_data", "=", "in_file_h5", ".", "root", ".", "raw_data", ".", "read", "(", "readout_slices", "[", "last_index_with_event_data", "-", "1", "]", "[", "0", "]", ",", "readout_slices", "[", "last_index_with_event_data", "-", "1", "]", "[", "1", "]", "-", "1", ")", "previous_bad_raw_data", "=", "in_file_h5", ".", "root", ".", "raw_data", ".", "read", "(", "readout_slices", "[", "last_index_with_event_data", "]", "[", "0", "]", "-", "1", ",", "readout_slices", "[", "last_index_with_event_data", "]", "[", "1", "]", ")", "fixed_raw_data", ",", "_", "=", "fix_raw_data", "(", "previous_bad_raw_data", ",", "lsb_byte", "=", "None", ")", "previous_good_raw_data", "=", "in_file_h5", ".", "root", ".", "raw_data", ".", "read", "(", "readout_slices", "[", "last_index_with_event_data", "]", "[", "1", "]", ",", "readout_slices", "[", "read_out_index", "-", "1", "]", "[", "1", "]", ")", "fixed_raw_data", "=", "np", ".", "r_", "[", "before_bad_raw_data", ",", "fixed_raw_data", ",", "previous_good_raw_data", ",", "raw_data", "]", "bad_fixed_previous_data", ",", "current_prepend_data_headers", ",", "_", ",", "_", "=", "check_bad_data", "(", "fixed_raw_data", ",", "prepend_data_headers", "=", "last_event_data_prepend_data_headers", ",", "trig_count", "=", "self", ".", "trig_count", ")", "if", "not", "bad_fixed_previous_data", ":", "logging", ".", "warning", "(", "\"found bad data in %s from index %d to %d (chunk %d, length %d)\"", "%", "(", "in_file_h5", ".", "filename", ",", "readout_slices", "[", "last_index_with_event_data", "]", "[", "0", "]", ",", "readout_slices", "[", "last_index_with_event_data", "]", "[", "1", "]", ",", "last_index_with_event_data", ",", "(", "readout_slices", "[", "last_index_with_event_data", "]", "[", "1", "]", "-", "readout_slices", "[", "last_index_with_event_data", "]", "[", "0", "]", ")", ")", ")", "bad_word_index", "=", "bad_word_index", ".", "union", "(", "range", "(", "readout_slices", "[", "last_index_with_event_data", "]", "[", "0", "]", "-", "1", ",", "readout_slices", "[", "last_index_with_event_data", "]", "[", "1", "]", ")", ")", "else", ":", "logging", ".", "warning", "(", "\"found bad data which cannot be corrected in %s from index %d to %d (chunk %d, length %d)\"", "%", "(", "in_file_h5", ".", "filename", ",", "index_start", ",", "index_stop", ",", "read_out_index", ",", "(", "index_stop", "-", "index_start", ")", ")", ")", "else", ":", "logging", ".", "warning", "(", "\"found bad data which cannot be corrected in %s from index %d to %d (chunk %d, length %d)\"", "%", "(", "in_file_h5", ".", "filename", ",", "index_start", ",", "index_stop", ",", "read_out_index", ",", "(", "index_stop", "-", "index_start", ")", ")", ")", "if", "n_triggers", "!=", "0", "or", "n_dh", "!=", "0", ":", "last_index_with_event_data", "=", "read_out_index", "last_event_data_prepend_data_headers", "=", "prepend_data_headers", "if", "not", "bad_data", "or", "(", "bad_data", "and", "bad_fixed_data", ")", ":", "previous_prepend_data_headers", "=", "prepend_data_headers", "prepend_data_headers", "=", "current_prepend_data_headers", "consecutive_bad_words_list", "=", "consecutive", "(", "sorted", "(", "bad_word_index", ")", ")", "lsb_byte", "=", "None", "# Loop over raw data in chunks", "for", "word_index", "in", "range", "(", "0", ",", "in_file_h5", ".", "root", ".", "raw_data", ".", "shape", "[", "0", "]", ",", "self", ".", "_chunk_size", ")", ":", "# loop over all words in the actual raw data file", "try", ":", "raw_data", "=", "in_file_h5", ".", "root", ".", "raw_data", ".", "read", "(", "word_index", ",", "word_index", "+", "self", ".", "_chunk_size", ")", "except", "OverflowError", ",", "e", ":", "logging", ".", "error", "(", "'%s: 2^31 xrange() limitation in 32-bit Python'", ",", "e", ")", "except", "tb", ".", "exceptions", ".", "HDF5ExtError", ":", "logging", ".", "warning", "(", "'Raw data file %s has missing raw data. Continue raw data analysis.'", ",", "in_file_h5", ".", "filename", ")", "break", "total_words", "+=", "raw_data", ".", "shape", "[", "0", "]", "# fix bad data", "if", "self", ".", "_correct_corrupted_data", ":", "# increase word shift for every bad data chunk in raw data chunk", "word_shift", "=", "0", "chunk_indices", "=", "np", ".", "arange", "(", "word_index", ",", "word_index", "+", "self", ".", "_chunk_size", ")", "for", "consecutive_bad_word_indices", "in", "consecutive_bad_words_list", ":", "selected_words", "=", "np", ".", "intersect1d", "(", "consecutive_bad_word_indices", ",", "chunk_indices", ",", "assume_unique", "=", "True", ")", "if", "selected_words", ".", "shape", "[", "0", "]", ":", "fixed_raw_data", ",", "lsb_byte", "=", "fix_raw_data", "(", "raw_data", "[", "selected_words", "-", "word_index", "-", "word_shift", "]", ",", "lsb_byte", "=", "lsb_byte", ")", "raw_data", "=", "np", ".", "r_", "[", "raw_data", "[", ":", "selected_words", "[", "0", "]", "-", "word_index", "-", "word_shift", "]", ",", "fixed_raw_data", ",", "raw_data", "[", "selected_words", "[", "-", "1", "]", "-", "word_index", "+", "1", "-", "word_shift", ":", "]", "]", "# check if last word of bad data chunk in current raw data chunk", "if", "consecutive_bad_word_indices", "[", "-", "1", "]", "in", "selected_words", ":", "lsb_byte", "=", "None", "# word shift by removing data word at the beginning of each defect chunk", "word_shift", "+=", "1", "# bad data chunk is at the end of current raw data chunk", "else", ":", "break", "self", ".", "interpreter", ".", "interpret_raw_data", "(", "raw_data", ")", "# interpret the raw data", "# store remaining buffered event in the interpreter at the end of the last file", "if", "file_index", "==", "len", "(", "self", ".", "files_dict", ".", "keys", "(", ")", ")", "-", "1", "and", "word_index", "==", "range", "(", "0", ",", "in_file_h5", ".", "root", ".", "raw_data", ".", "shape", "[", "0", "]", ",", "self", ".", "_chunk_size", ")", "[", "-", "1", "]", ":", "# store hits of the latest event of the last file", "self", ".", "interpreter", ".", "store_event", "(", ")", "hits", "=", "self", ".", "interpreter", ".", "get_hits", "(", ")", "if", "self", ".", "scan_parameters", "is", "not", "None", ":", "nEventIndex", "=", "self", ".", "interpreter", ".", "get_n_meta_data_event", "(", ")", "self", ".", "histogram", ".", "add_meta_event_index", "(", "self", ".", "meta_event_index", ",", "nEventIndex", ")", "if", "self", ".", "is_histogram_hits", "(", ")", ":", "self", ".", "histogram_hits", "(", "hits", ")", "if", "self", ".", "is_cluster_hits", "(", ")", ":", "cluster_hits", ",", "clusters", "=", "self", ".", "cluster_hits", "(", "hits", ")", "if", "self", ".", "_create_cluster_hit_table", ":", "cluster_hit_table", ".", "append", "(", "cluster_hits", ")", "if", "self", ".", "_create_cluster_table", ":", "cluster_table", ".", "append", "(", "clusters", ")", "if", "self", ".", "_create_cluster_size_hist", ":", "if", "clusters", "[", "'size'", "]", ".", "shape", "[", "0", "]", ">", "0", "and", "np", ".", "max", "(", "clusters", "[", "'size'", "]", ")", "+", "1", ">", "self", ".", "_cluster_size_hist", ".", "shape", "[", "0", "]", ":", "self", ".", "_cluster_size_hist", ".", "resize", "(", "np", ".", "max", "(", "clusters", "[", "'size'", "]", ")", "+", "1", ")", "self", ".", "_cluster_size_hist", "+=", "fast_analysis_utils", ".", "hist_1d_index", "(", "clusters", "[", "'size'", "]", ",", "shape", "=", "self", ".", "_cluster_size_hist", ".", "shape", ")", "if", "self", ".", "_create_cluster_tot_hist", ":", "if", "clusters", "[", "'tot'", "]", ".", "shape", "[", "0", "]", ">", "0", "and", "np", ".", "max", "(", "clusters", "[", "'tot'", "]", ")", "+", "1", ">", "self", ".", "_cluster_tot_hist", ".", "shape", "[", "0", "]", ":", "self", ".", "_cluster_tot_hist", ".", "resize", "(", "(", "np", ".", "max", "(", "clusters", "[", "'tot'", "]", ")", "+", "1", ",", "self", ".", "_cluster_tot_hist", ".", "shape", "[", "1", "]", ")", ")", "if", "clusters", "[", "'size'", "]", ".", "shape", "[", "0", "]", ">", "0", "and", "np", ".", "max", "(", "clusters", "[", "'size'", "]", ")", "+", "1", ">", "self", ".", "_cluster_tot_hist", ".", "shape", "[", "1", "]", ":", "self", ".", "_cluster_tot_hist", ".", "resize", "(", "(", "self", ".", "_cluster_tot_hist", ".", "shape", "[", "0", "]", ",", "np", ".", "max", "(", "clusters", "[", "'size'", "]", ")", "+", "1", ")", ")", "self", ".", "_cluster_tot_hist", "+=", "fast_analysis_utils", ".", "hist_2d_index", "(", "clusters", "[", "'tot'", "]", ",", "clusters", "[", "'size'", "]", ",", "shape", "=", "self", ".", "_cluster_tot_hist", ".", "shape", ")", "if", "self", ".", "_analyzed_data_file", "is", "not", "None", "and", "self", ".", "_create_hit_table", ":", "hit_table", ".", "append", "(", "hits", ")", "if", "self", ".", "_analyzed_data_file", "is", "not", "None", "and", "self", ".", "_create_meta_word_index", ":", "size", "=", "self", ".", "interpreter", ".", "get_n_meta_data_word", "(", ")", "meta_word_index_table", ".", "append", "(", "meta_word", "[", ":", "size", "]", ")", "if", "total_words", "<=", "progress_bar", ".", "maxval", ":", "# Otherwise exception is thrown", "progress_bar", ".", "update", "(", "total_words", ")", "self", ".", "out_file_h5", ".", "flush", "(", ")", "progress_bar", ".", "finish", "(", ")", "self", ".", "_create_additional_data", "(", ")", "if", "close_analyzed_data_file", ":", "self", ".", "out_file_h5", ".", "close", "(", ")", "self", ".", "out_file_h5", "=", "None", "self", ".", "out_file_h5", "=", "out_file_h5", "if", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", ":", "self", ".", "_analyzed_data_file", "=", "self", ".", "out_file_h5", ".", "filename", "else", ":", "self", ".", "_analyzed_data_file", "=", "None" ]
Interprets the raw data word table of all given raw data files with the c++ library. Creates the h5 output file and PDF plots. Parameters ---------- analyzed_data_file : string The file name of the output analyzed data file. If None, the output analyzed data file specified during initialization is taken. use_settings_from_file : boolean True if the needed parameters should be extracted from the raw data file fei4b : boolean True if the raw data is from FE-I4B.
[ "Interprets", "the", "raw", "data", "word", "table", "of", "all", "given", "raw", "data", "files", "with", "the", "c", "++", "library", ".", "Creates", "the", "h5", "output", "file", "and", "PDF", "plots", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analyze_raw_data.py#L643-L921
SiLab-Bonn/pyBAR
pybar/analysis/analyze_raw_data.py
AnalyzeRawData.analyze_hit_table
def analyze_hit_table(self, analyzed_data_file=None, analyzed_data_out_file=None): '''Analyzes a hit table with the c++ histogrammming/clusterizer. Parameters ---------- analyzed_data_file : string The filename of the analyzed data file. If None, the analyzed data file specified during initialization is taken. Filename extension (.h5) does not need to be provided. analyzed_data_out_file : string The filename of the new analyzed data file. If None, the analyzed data file specified during initialization is taken. Filename extension (.h5) does not need to be provided. ''' close_analyzed_data_file = False if analyzed_data_file is not None: # if an output file name is specified create new file for analyzed data if self.is_open(self.out_file_h5) and os.path.abspath(analyzed_data_file) == os.path.abspath(self.out_file_h5.filename): in_file_h5 = self.out_file_h5 else: # normalize path analyzed_data_file = os.path.abspath(analyzed_data_file) if os.path.splitext(analyzed_data_file)[1].lower() != ".h5": analyzed_data_file = os.path.splitext(analyzed_data_file)[0] + ".h5" in_file_h5 = tb.open_file(analyzed_data_file, mode="r+") close_analyzed_data_file = True elif self.is_open(self.out_file_h5): in_file_h5 = self.out_file_h5 else: raise ValueError('Parameter "analyzed_data_file" not specified.') # set output file if an output file name is given, otherwise check if an output file is already opened close_analyzed_data_out_file = False if analyzed_data_out_file is not None: # if an output file name is specified create new file for analyzed data if self.is_open(self.out_file_h5) and os.path.abspath(analyzed_data_out_file) == os.path.abspath(self.out_file_h5.filename): out_file_h5 = self.out_file_h5 elif self.is_open(in_file_h5) and os.path.abspath(analyzed_data_out_file) == os.path.abspath(in_file_h5.filename): out_file_h5 = in_file_h5 else: # normalize path analyzed_data_out_file = os.path.abspath(analyzed_data_out_file) if os.path.splitext(analyzed_data_out_file)[1].lower() != ".h5": analyzed_data_out_file = os.path.splitext(analyzed_data_out_file)[0] + ".h5" out_file_h5 = tb.open_file(analyzed_data_out_file, mode="w", title="Analyzed FE-I4 hits") close_analyzed_data_out_file = True elif self.is_open(self.out_file_h5): out_file_h5 = self.out_file_h5 else: raise ValueError('Parameter "analyzed_data_out_file" not specified.') tmp_out_file_h5 = self.out_file_h5 if not self.is_open(self.out_file_h5): if os.path.abspath(in_file_h5.filename) == os.path.abspath(out_file_h5.filename): close_analyzed_data_file = False tmp_out_file_h5 = in_file_h5 self.out_file_h5 = out_file_h5 self._analyzed_data_file = self.out_file_h5.filename if self._create_cluster_table: cluster_table = self.out_file_h5.create_table(self.out_file_h5.root, name='Cluster', description=data_struct.ClusterInfoTable, title='cluster_hit_data', filters=self._filter_table, expectedrows=self._chunk_size) if self._create_cluster_hit_table: cluster_hit_table = self.out_file_h5.create_table(self.out_file_h5.root, name='ClusterHits', description=data_struct.ClusterHitInfoTable, title='cluster_hit_data', filters=self._filter_table, expectedrows=self._chunk_size) if self._create_cluster_size_hist: # Cluster size result histogram self._cluster_size_hist = np.zeros(shape=(6, ), dtype=np.uint32) if self._create_cluster_tot_hist: # Cluster tot/size result histogram self._cluster_tot_hist = np.zeros(shape=(16, 6), dtype=np.uint32) try: meta_data_table = in_file_h5.root.meta_data meta_data = meta_data_table[:] self.scan_parameters = analysis_utils.get_unique_scan_parameter_combinations(meta_data, scan_parameter_columns_only=True) if self.scan_parameters is not None: # check if there is an additional column after the error code column, if yes this column has scan parameter infos meta_event_index = np.ascontiguousarray(analysis_utils.get_unique_scan_parameter_combinations(meta_data)['event_number'].astype(np.uint64)) self.histogram.add_meta_event_index(meta_event_index, array_length=len(meta_event_index)) self.scan_parameter_index = analysis_utils.get_scan_parameters_index(self.scan_parameters) # a array that labels unique scan parameter combinations self.histogram.add_scan_parameter(self.scan_parameter_index) # just add an index for the different scan parameter combinations scan_parameter_names = analysis_utils.get_scan_parameter_names(self.scan_parameters) logging.info('Adding scan parameter(s) for analysis: %s', (', ').join(scan_parameter_names) if scan_parameter_names else 'None',) else: logging.info("No scan parameter data provided") self.histogram.set_no_scan_parameter() except tb.exceptions.NoSuchNodeError: logging.info("No meta data provided") self.histogram.set_no_scan_parameter() table_size = in_file_h5.root.Hits.nrows n_hits = 0 # number of hits in actual chunk logging.info('Analyzing hits...') progress_bar = progressbar.ProgressBar(widgets=['', progressbar.Percentage(), ' ', progressbar.Bar(marker='*', left='|', right='|'), ' ', progressbar.AdaptiveETA()], maxval=table_size, term_width=80) progress_bar.start() for hits, index in analysis_utils.data_aligned_at_events(in_file_h5.root.Hits, chunk_size=self._chunk_size): n_hits += hits.shape[0] if self.is_cluster_hits(): cluster_hits, clusters = self.cluster_hits(hits) if self.is_histogram_hits(): self.histogram_hits(hits) if self._analyzed_data_file is not None and self._create_cluster_hit_table: cluster_hit_table.append(cluster_hits) if self._analyzed_data_file is not None and self._create_cluster_table: cluster_table.append(clusters) if self._create_cluster_size_hist: if clusters['size'].shape[0] > 0 and np.max(clusters['size']) + 1 > self._cluster_size_hist.shape[0]: self._cluster_size_hist.resize(np.max(clusters['size']) + 1) self._cluster_size_hist += fast_analysis_utils.hist_1d_index(clusters['size'], shape=self._cluster_size_hist.shape) if self._create_cluster_tot_hist: if clusters['tot'].shape[0] > 0 and np.max(clusters['tot']) + 1 > self._cluster_tot_hist.shape[0]: self._cluster_tot_hist.resize((np.max(clusters['tot']) + 1, self._cluster_tot_hist.shape[1])) if clusters['size'].shape[0] > 0 and np.max(clusters['size']) + 1 > self._cluster_tot_hist.shape[1]: self._cluster_tot_hist.resize((self._cluster_tot_hist.shape[0], np.max(clusters['size']) + 1)) self._cluster_tot_hist += fast_analysis_utils.hist_2d_index(clusters['tot'], clusters['size'], shape=self._cluster_tot_hist.shape) self.out_file_h5.flush() progress_bar.update(index) progress_bar.finish() if table_size == 0: logging.warning('Found no hits') if n_hits != table_size: raise analysis_utils.AnalysisError('Tables have different sizes. Not all hits were analyzed.') self._create_additional_hit_data() self._create_additional_cluster_data() if close_analyzed_data_out_file: out_file_h5.close() if close_analyzed_data_file: in_file_h5.close() else: self.out_file_h5 = tmp_out_file_h5 if self.is_open(self.out_file_h5): self._analyzed_data_file = self.out_file_h5.filename else: self._analyzed_data_file = None
python
def analyze_hit_table(self, analyzed_data_file=None, analyzed_data_out_file=None): '''Analyzes a hit table with the c++ histogrammming/clusterizer. Parameters ---------- analyzed_data_file : string The filename of the analyzed data file. If None, the analyzed data file specified during initialization is taken. Filename extension (.h5) does not need to be provided. analyzed_data_out_file : string The filename of the new analyzed data file. If None, the analyzed data file specified during initialization is taken. Filename extension (.h5) does not need to be provided. ''' close_analyzed_data_file = False if analyzed_data_file is not None: # if an output file name is specified create new file for analyzed data if self.is_open(self.out_file_h5) and os.path.abspath(analyzed_data_file) == os.path.abspath(self.out_file_h5.filename): in_file_h5 = self.out_file_h5 else: # normalize path analyzed_data_file = os.path.abspath(analyzed_data_file) if os.path.splitext(analyzed_data_file)[1].lower() != ".h5": analyzed_data_file = os.path.splitext(analyzed_data_file)[0] + ".h5" in_file_h5 = tb.open_file(analyzed_data_file, mode="r+") close_analyzed_data_file = True elif self.is_open(self.out_file_h5): in_file_h5 = self.out_file_h5 else: raise ValueError('Parameter "analyzed_data_file" not specified.') # set output file if an output file name is given, otherwise check if an output file is already opened close_analyzed_data_out_file = False if analyzed_data_out_file is not None: # if an output file name is specified create new file for analyzed data if self.is_open(self.out_file_h5) and os.path.abspath(analyzed_data_out_file) == os.path.abspath(self.out_file_h5.filename): out_file_h5 = self.out_file_h5 elif self.is_open(in_file_h5) and os.path.abspath(analyzed_data_out_file) == os.path.abspath(in_file_h5.filename): out_file_h5 = in_file_h5 else: # normalize path analyzed_data_out_file = os.path.abspath(analyzed_data_out_file) if os.path.splitext(analyzed_data_out_file)[1].lower() != ".h5": analyzed_data_out_file = os.path.splitext(analyzed_data_out_file)[0] + ".h5" out_file_h5 = tb.open_file(analyzed_data_out_file, mode="w", title="Analyzed FE-I4 hits") close_analyzed_data_out_file = True elif self.is_open(self.out_file_h5): out_file_h5 = self.out_file_h5 else: raise ValueError('Parameter "analyzed_data_out_file" not specified.') tmp_out_file_h5 = self.out_file_h5 if not self.is_open(self.out_file_h5): if os.path.abspath(in_file_h5.filename) == os.path.abspath(out_file_h5.filename): close_analyzed_data_file = False tmp_out_file_h5 = in_file_h5 self.out_file_h5 = out_file_h5 self._analyzed_data_file = self.out_file_h5.filename if self._create_cluster_table: cluster_table = self.out_file_h5.create_table(self.out_file_h5.root, name='Cluster', description=data_struct.ClusterInfoTable, title='cluster_hit_data', filters=self._filter_table, expectedrows=self._chunk_size) if self._create_cluster_hit_table: cluster_hit_table = self.out_file_h5.create_table(self.out_file_h5.root, name='ClusterHits', description=data_struct.ClusterHitInfoTable, title='cluster_hit_data', filters=self._filter_table, expectedrows=self._chunk_size) if self._create_cluster_size_hist: # Cluster size result histogram self._cluster_size_hist = np.zeros(shape=(6, ), dtype=np.uint32) if self._create_cluster_tot_hist: # Cluster tot/size result histogram self._cluster_tot_hist = np.zeros(shape=(16, 6), dtype=np.uint32) try: meta_data_table = in_file_h5.root.meta_data meta_data = meta_data_table[:] self.scan_parameters = analysis_utils.get_unique_scan_parameter_combinations(meta_data, scan_parameter_columns_only=True) if self.scan_parameters is not None: # check if there is an additional column after the error code column, if yes this column has scan parameter infos meta_event_index = np.ascontiguousarray(analysis_utils.get_unique_scan_parameter_combinations(meta_data)['event_number'].astype(np.uint64)) self.histogram.add_meta_event_index(meta_event_index, array_length=len(meta_event_index)) self.scan_parameter_index = analysis_utils.get_scan_parameters_index(self.scan_parameters) # a array that labels unique scan parameter combinations self.histogram.add_scan_parameter(self.scan_parameter_index) # just add an index for the different scan parameter combinations scan_parameter_names = analysis_utils.get_scan_parameter_names(self.scan_parameters) logging.info('Adding scan parameter(s) for analysis: %s', (', ').join(scan_parameter_names) if scan_parameter_names else 'None',) else: logging.info("No scan parameter data provided") self.histogram.set_no_scan_parameter() except tb.exceptions.NoSuchNodeError: logging.info("No meta data provided") self.histogram.set_no_scan_parameter() table_size = in_file_h5.root.Hits.nrows n_hits = 0 # number of hits in actual chunk logging.info('Analyzing hits...') progress_bar = progressbar.ProgressBar(widgets=['', progressbar.Percentage(), ' ', progressbar.Bar(marker='*', left='|', right='|'), ' ', progressbar.AdaptiveETA()], maxval=table_size, term_width=80) progress_bar.start() for hits, index in analysis_utils.data_aligned_at_events(in_file_h5.root.Hits, chunk_size=self._chunk_size): n_hits += hits.shape[0] if self.is_cluster_hits(): cluster_hits, clusters = self.cluster_hits(hits) if self.is_histogram_hits(): self.histogram_hits(hits) if self._analyzed_data_file is not None and self._create_cluster_hit_table: cluster_hit_table.append(cluster_hits) if self._analyzed_data_file is not None and self._create_cluster_table: cluster_table.append(clusters) if self._create_cluster_size_hist: if clusters['size'].shape[0] > 0 and np.max(clusters['size']) + 1 > self._cluster_size_hist.shape[0]: self._cluster_size_hist.resize(np.max(clusters['size']) + 1) self._cluster_size_hist += fast_analysis_utils.hist_1d_index(clusters['size'], shape=self._cluster_size_hist.shape) if self._create_cluster_tot_hist: if clusters['tot'].shape[0] > 0 and np.max(clusters['tot']) + 1 > self._cluster_tot_hist.shape[0]: self._cluster_tot_hist.resize((np.max(clusters['tot']) + 1, self._cluster_tot_hist.shape[1])) if clusters['size'].shape[0] > 0 and np.max(clusters['size']) + 1 > self._cluster_tot_hist.shape[1]: self._cluster_tot_hist.resize((self._cluster_tot_hist.shape[0], np.max(clusters['size']) + 1)) self._cluster_tot_hist += fast_analysis_utils.hist_2d_index(clusters['tot'], clusters['size'], shape=self._cluster_tot_hist.shape) self.out_file_h5.flush() progress_bar.update(index) progress_bar.finish() if table_size == 0: logging.warning('Found no hits') if n_hits != table_size: raise analysis_utils.AnalysisError('Tables have different sizes. Not all hits were analyzed.') self._create_additional_hit_data() self._create_additional_cluster_data() if close_analyzed_data_out_file: out_file_h5.close() if close_analyzed_data_file: in_file_h5.close() else: self.out_file_h5 = tmp_out_file_h5 if self.is_open(self.out_file_h5): self._analyzed_data_file = self.out_file_h5.filename else: self._analyzed_data_file = None
[ "def", "analyze_hit_table", "(", "self", ",", "analyzed_data_file", "=", "None", ",", "analyzed_data_out_file", "=", "None", ")", ":", "close_analyzed_data_file", "=", "False", "if", "analyzed_data_file", "is", "not", "None", ":", "# if an output file name is specified create new file for analyzed data", "if", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", "and", "os", ".", "path", ".", "abspath", "(", "analyzed_data_file", ")", "==", "os", ".", "path", ".", "abspath", "(", "self", ".", "out_file_h5", ".", "filename", ")", ":", "in_file_h5", "=", "self", ".", "out_file_h5", "else", ":", "# normalize path", "analyzed_data_file", "=", "os", ".", "path", ".", "abspath", "(", "analyzed_data_file", ")", "if", "os", ".", "path", ".", "splitext", "(", "analyzed_data_file", ")", "[", "1", "]", ".", "lower", "(", ")", "!=", "\".h5\"", ":", "analyzed_data_file", "=", "os", ".", "path", ".", "splitext", "(", "analyzed_data_file", ")", "[", "0", "]", "+", "\".h5\"", "in_file_h5", "=", "tb", ".", "open_file", "(", "analyzed_data_file", ",", "mode", "=", "\"r+\"", ")", "close_analyzed_data_file", "=", "True", "elif", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", ":", "in_file_h5", "=", "self", ".", "out_file_h5", "else", ":", "raise", "ValueError", "(", "'Parameter \"analyzed_data_file\" not specified.'", ")", "# set output file if an output file name is given, otherwise check if an output file is already opened", "close_analyzed_data_out_file", "=", "False", "if", "analyzed_data_out_file", "is", "not", "None", ":", "# if an output file name is specified create new file for analyzed data", "if", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", "and", "os", ".", "path", ".", "abspath", "(", "analyzed_data_out_file", ")", "==", "os", ".", "path", ".", "abspath", "(", "self", ".", "out_file_h5", ".", "filename", ")", ":", "out_file_h5", "=", "self", ".", "out_file_h5", "elif", "self", ".", "is_open", "(", "in_file_h5", ")", "and", "os", ".", "path", ".", "abspath", "(", "analyzed_data_out_file", ")", "==", "os", ".", "path", ".", "abspath", "(", "in_file_h5", ".", "filename", ")", ":", "out_file_h5", "=", "in_file_h5", "else", ":", "# normalize path", "analyzed_data_out_file", "=", "os", ".", "path", ".", "abspath", "(", "analyzed_data_out_file", ")", "if", "os", ".", "path", ".", "splitext", "(", "analyzed_data_out_file", ")", "[", "1", "]", ".", "lower", "(", ")", "!=", "\".h5\"", ":", "analyzed_data_out_file", "=", "os", ".", "path", ".", "splitext", "(", "analyzed_data_out_file", ")", "[", "0", "]", "+", "\".h5\"", "out_file_h5", "=", "tb", ".", "open_file", "(", "analyzed_data_out_file", ",", "mode", "=", "\"w\"", ",", "title", "=", "\"Analyzed FE-I4 hits\"", ")", "close_analyzed_data_out_file", "=", "True", "elif", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", ":", "out_file_h5", "=", "self", ".", "out_file_h5", "else", ":", "raise", "ValueError", "(", "'Parameter \"analyzed_data_out_file\" not specified.'", ")", "tmp_out_file_h5", "=", "self", ".", "out_file_h5", "if", "not", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", ":", "if", "os", ".", "path", ".", "abspath", "(", "in_file_h5", ".", "filename", ")", "==", "os", ".", "path", ".", "abspath", "(", "out_file_h5", ".", "filename", ")", ":", "close_analyzed_data_file", "=", "False", "tmp_out_file_h5", "=", "in_file_h5", "self", ".", "out_file_h5", "=", "out_file_h5", "self", ".", "_analyzed_data_file", "=", "self", ".", "out_file_h5", ".", "filename", "if", "self", ".", "_create_cluster_table", ":", "cluster_table", "=", "self", ".", "out_file_h5", ".", "create_table", "(", "self", ".", "out_file_h5", ".", "root", ",", "name", "=", "'Cluster'", ",", "description", "=", "data_struct", ".", "ClusterInfoTable", ",", "title", "=", "'cluster_hit_data'", ",", "filters", "=", "self", ".", "_filter_table", ",", "expectedrows", "=", "self", ".", "_chunk_size", ")", "if", "self", ".", "_create_cluster_hit_table", ":", "cluster_hit_table", "=", "self", ".", "out_file_h5", ".", "create_table", "(", "self", ".", "out_file_h5", ".", "root", ",", "name", "=", "'ClusterHits'", ",", "description", "=", "data_struct", ".", "ClusterHitInfoTable", ",", "title", "=", "'cluster_hit_data'", ",", "filters", "=", "self", ".", "_filter_table", ",", "expectedrows", "=", "self", ".", "_chunk_size", ")", "if", "self", ".", "_create_cluster_size_hist", ":", "# Cluster size result histogram", "self", ".", "_cluster_size_hist", "=", "np", ".", "zeros", "(", "shape", "=", "(", "6", ",", ")", ",", "dtype", "=", "np", ".", "uint32", ")", "if", "self", ".", "_create_cluster_tot_hist", ":", "# Cluster tot/size result histogram", "self", ".", "_cluster_tot_hist", "=", "np", ".", "zeros", "(", "shape", "=", "(", "16", ",", "6", ")", ",", "dtype", "=", "np", ".", "uint32", ")", "try", ":", "meta_data_table", "=", "in_file_h5", ".", "root", ".", "meta_data", "meta_data", "=", "meta_data_table", "[", ":", "]", "self", ".", "scan_parameters", "=", "analysis_utils", ".", "get_unique_scan_parameter_combinations", "(", "meta_data", ",", "scan_parameter_columns_only", "=", "True", ")", "if", "self", ".", "scan_parameters", "is", "not", "None", ":", "# check if there is an additional column after the error code column, if yes this column has scan parameter infos", "meta_event_index", "=", "np", ".", "ascontiguousarray", "(", "analysis_utils", ".", "get_unique_scan_parameter_combinations", "(", "meta_data", ")", "[", "'event_number'", "]", ".", "astype", "(", "np", ".", "uint64", ")", ")", "self", ".", "histogram", ".", "add_meta_event_index", "(", "meta_event_index", ",", "array_length", "=", "len", "(", "meta_event_index", ")", ")", "self", ".", "scan_parameter_index", "=", "analysis_utils", ".", "get_scan_parameters_index", "(", "self", ".", "scan_parameters", ")", "# a array that labels unique scan parameter combinations", "self", ".", "histogram", ".", "add_scan_parameter", "(", "self", ".", "scan_parameter_index", ")", "# just add an index for the different scan parameter combinations", "scan_parameter_names", "=", "analysis_utils", ".", "get_scan_parameter_names", "(", "self", ".", "scan_parameters", ")", "logging", ".", "info", "(", "'Adding scan parameter(s) for analysis: %s'", ",", "(", "', '", ")", ".", "join", "(", "scan_parameter_names", ")", "if", "scan_parameter_names", "else", "'None'", ",", ")", "else", ":", "logging", ".", "info", "(", "\"No scan parameter data provided\"", ")", "self", ".", "histogram", ".", "set_no_scan_parameter", "(", ")", "except", "tb", ".", "exceptions", ".", "NoSuchNodeError", ":", "logging", ".", "info", "(", "\"No meta data provided\"", ")", "self", ".", "histogram", ".", "set_no_scan_parameter", "(", ")", "table_size", "=", "in_file_h5", ".", "root", ".", "Hits", ".", "nrows", "n_hits", "=", "0", "# number of hits in actual chunk", "logging", ".", "info", "(", "'Analyzing hits...'", ")", "progress_bar", "=", "progressbar", ".", "ProgressBar", "(", "widgets", "=", "[", "''", ",", "progressbar", ".", "Percentage", "(", ")", ",", "' '", ",", "progressbar", ".", "Bar", "(", "marker", "=", "'*'", ",", "left", "=", "'|'", ",", "right", "=", "'|'", ")", ",", "' '", ",", "progressbar", ".", "AdaptiveETA", "(", ")", "]", ",", "maxval", "=", "table_size", ",", "term_width", "=", "80", ")", "progress_bar", ".", "start", "(", ")", "for", "hits", ",", "index", "in", "analysis_utils", ".", "data_aligned_at_events", "(", "in_file_h5", ".", "root", ".", "Hits", ",", "chunk_size", "=", "self", ".", "_chunk_size", ")", ":", "n_hits", "+=", "hits", ".", "shape", "[", "0", "]", "if", "self", ".", "is_cluster_hits", "(", ")", ":", "cluster_hits", ",", "clusters", "=", "self", ".", "cluster_hits", "(", "hits", ")", "if", "self", ".", "is_histogram_hits", "(", ")", ":", "self", ".", "histogram_hits", "(", "hits", ")", "if", "self", ".", "_analyzed_data_file", "is", "not", "None", "and", "self", ".", "_create_cluster_hit_table", ":", "cluster_hit_table", ".", "append", "(", "cluster_hits", ")", "if", "self", ".", "_analyzed_data_file", "is", "not", "None", "and", "self", ".", "_create_cluster_table", ":", "cluster_table", ".", "append", "(", "clusters", ")", "if", "self", ".", "_create_cluster_size_hist", ":", "if", "clusters", "[", "'size'", "]", ".", "shape", "[", "0", "]", ">", "0", "and", "np", ".", "max", "(", "clusters", "[", "'size'", "]", ")", "+", "1", ">", "self", ".", "_cluster_size_hist", ".", "shape", "[", "0", "]", ":", "self", ".", "_cluster_size_hist", ".", "resize", "(", "np", ".", "max", "(", "clusters", "[", "'size'", "]", ")", "+", "1", ")", "self", ".", "_cluster_size_hist", "+=", "fast_analysis_utils", ".", "hist_1d_index", "(", "clusters", "[", "'size'", "]", ",", "shape", "=", "self", ".", "_cluster_size_hist", ".", "shape", ")", "if", "self", ".", "_create_cluster_tot_hist", ":", "if", "clusters", "[", "'tot'", "]", ".", "shape", "[", "0", "]", ">", "0", "and", "np", ".", "max", "(", "clusters", "[", "'tot'", "]", ")", "+", "1", ">", "self", ".", "_cluster_tot_hist", ".", "shape", "[", "0", "]", ":", "self", ".", "_cluster_tot_hist", ".", "resize", "(", "(", "np", ".", "max", "(", "clusters", "[", "'tot'", "]", ")", "+", "1", ",", "self", ".", "_cluster_tot_hist", ".", "shape", "[", "1", "]", ")", ")", "if", "clusters", "[", "'size'", "]", ".", "shape", "[", "0", "]", ">", "0", "and", "np", ".", "max", "(", "clusters", "[", "'size'", "]", ")", "+", "1", ">", "self", ".", "_cluster_tot_hist", ".", "shape", "[", "1", "]", ":", "self", ".", "_cluster_tot_hist", ".", "resize", "(", "(", "self", ".", "_cluster_tot_hist", ".", "shape", "[", "0", "]", ",", "np", ".", "max", "(", "clusters", "[", "'size'", "]", ")", "+", "1", ")", ")", "self", ".", "_cluster_tot_hist", "+=", "fast_analysis_utils", ".", "hist_2d_index", "(", "clusters", "[", "'tot'", "]", ",", "clusters", "[", "'size'", "]", ",", "shape", "=", "self", ".", "_cluster_tot_hist", ".", "shape", ")", "self", ".", "out_file_h5", ".", "flush", "(", ")", "progress_bar", ".", "update", "(", "index", ")", "progress_bar", ".", "finish", "(", ")", "if", "table_size", "==", "0", ":", "logging", ".", "warning", "(", "'Found no hits'", ")", "if", "n_hits", "!=", "table_size", ":", "raise", "analysis_utils", ".", "AnalysisError", "(", "'Tables have different sizes. Not all hits were analyzed.'", ")", "self", ".", "_create_additional_hit_data", "(", ")", "self", ".", "_create_additional_cluster_data", "(", ")", "if", "close_analyzed_data_out_file", ":", "out_file_h5", ".", "close", "(", ")", "if", "close_analyzed_data_file", ":", "in_file_h5", ".", "close", "(", ")", "else", ":", "self", ".", "out_file_h5", "=", "tmp_out_file_h5", "if", "self", ".", "is_open", "(", "self", ".", "out_file_h5", ")", ":", "self", ".", "_analyzed_data_file", "=", "self", ".", "out_file_h5", ".", "filename", "else", ":", "self", ".", "_analyzed_data_file", "=", "None" ]
Analyzes a hit table with the c++ histogrammming/clusterizer. Parameters ---------- analyzed_data_file : string The filename of the analyzed data file. If None, the analyzed data file specified during initialization is taken. Filename extension (.h5) does not need to be provided. analyzed_data_out_file : string The filename of the new analyzed data file. If None, the analyzed data file specified during initialization is taken. Filename extension (.h5) does not need to be provided.
[ "Analyzes", "a", "hit", "table", "with", "the", "c", "++", "histogrammming", "/", "clusterizer", "." ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analyze_raw_data.py#L1067-L1204
SiLab-Bonn/pyBAR
pybar/analysis/analyze_raw_data.py
AnalyzeRawData._deduce_settings_from_file
def _deduce_settings_from_file(self, opened_raw_data_file): # TODO: parse better '''Tries to get the scan parameters needed for analysis from the raw data file ''' try: # take infos raw data files (not avalable in old files) flavor = opened_raw_data_file.root.configuration.miscellaneous[:][np.where(opened_raw_data_file.root.configuration.miscellaneous[:]['name'] == 'Flavor')]['value'][0] self._settings_from_file_set = True # adding this for special cases e.g., stop-mode scan if "trig_count" in opened_raw_data_file.root.configuration.run_conf[:]['name']: trig_count = opened_raw_data_file.root.configuration.run_conf[:][np.where(opened_raw_data_file.root.configuration.run_conf[:]['name'] == 'trig_count')]['value'][0] else: trig_count = opened_raw_data_file.root.configuration.global_register[:][np.where(opened_raw_data_file.root.configuration.global_register[:]['name'] == 'Trig_Count')]['value'][0] vcal_c0 = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'Vcal_Coeff_0')]['value'][0] vcal_c1 = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'Vcal_Coeff_1')]['value'][0] c_low = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'C_Inj_Low')]['value'][0] c_mid = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'C_Inj_Med')]['value'][0] c_high = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'C_Inj_High')]['value'][0] self.c_low_mask = opened_raw_data_file.root.configuration.C_Low[:] self.c_high_mask = opened_raw_data_file.root.configuration.C_High[:] self.fei4b = False if str(flavor) == 'fei4a' else True self.trig_count = int(trig_count) self.vcal_c0 = float(vcal_c0) self.vcal_c1 = float(vcal_c1) self.c_low = float(c_low) self.c_mid = float(c_mid) self.c_high = float(c_high) self.n_injections = int(opened_raw_data_file.root.configuration.run_conf[:][np.where(opened_raw_data_file.root.configuration.run_conf[:]['name'] == 'n_injections')]['value'][0]) except tb.exceptions.NoSuchNodeError: if not self._settings_from_file_set: logging.warning('No settings stored in raw data file %s, use standard settings', opened_raw_data_file.filename) else: logging.info('No settings provided in raw data file %s, use already set settings', opened_raw_data_file.filename) except IndexError: # happens if setting is not available (e.g. repeat_command) pass
python
def _deduce_settings_from_file(self, opened_raw_data_file): # TODO: parse better '''Tries to get the scan parameters needed for analysis from the raw data file ''' try: # take infos raw data files (not avalable in old files) flavor = opened_raw_data_file.root.configuration.miscellaneous[:][np.where(opened_raw_data_file.root.configuration.miscellaneous[:]['name'] == 'Flavor')]['value'][0] self._settings_from_file_set = True # adding this for special cases e.g., stop-mode scan if "trig_count" in opened_raw_data_file.root.configuration.run_conf[:]['name']: trig_count = opened_raw_data_file.root.configuration.run_conf[:][np.where(opened_raw_data_file.root.configuration.run_conf[:]['name'] == 'trig_count')]['value'][0] else: trig_count = opened_raw_data_file.root.configuration.global_register[:][np.where(opened_raw_data_file.root.configuration.global_register[:]['name'] == 'Trig_Count')]['value'][0] vcal_c0 = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'Vcal_Coeff_0')]['value'][0] vcal_c1 = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'Vcal_Coeff_1')]['value'][0] c_low = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'C_Inj_Low')]['value'][0] c_mid = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'C_Inj_Med')]['value'][0] c_high = opened_raw_data_file.root.configuration.calibration_parameters[:][np.where(opened_raw_data_file.root.configuration.calibration_parameters[:]['name'] == 'C_Inj_High')]['value'][0] self.c_low_mask = opened_raw_data_file.root.configuration.C_Low[:] self.c_high_mask = opened_raw_data_file.root.configuration.C_High[:] self.fei4b = False if str(flavor) == 'fei4a' else True self.trig_count = int(trig_count) self.vcal_c0 = float(vcal_c0) self.vcal_c1 = float(vcal_c1) self.c_low = float(c_low) self.c_mid = float(c_mid) self.c_high = float(c_high) self.n_injections = int(opened_raw_data_file.root.configuration.run_conf[:][np.where(opened_raw_data_file.root.configuration.run_conf[:]['name'] == 'n_injections')]['value'][0]) except tb.exceptions.NoSuchNodeError: if not self._settings_from_file_set: logging.warning('No settings stored in raw data file %s, use standard settings', opened_raw_data_file.filename) else: logging.info('No settings provided in raw data file %s, use already set settings', opened_raw_data_file.filename) except IndexError: # happens if setting is not available (e.g. repeat_command) pass
[ "def", "_deduce_settings_from_file", "(", "self", ",", "opened_raw_data_file", ")", ":", "# TODO: parse better", "try", ":", "# take infos raw data files (not avalable in old files)", "flavor", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "miscellaneous", "[", ":", "]", "[", "np", ".", "where", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "miscellaneous", "[", ":", "]", "[", "'name'", "]", "==", "'Flavor'", ")", "]", "[", "'value'", "]", "[", "0", "]", "self", ".", "_settings_from_file_set", "=", "True", "# adding this for special cases e.g., stop-mode scan", "if", "\"trig_count\"", "in", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "run_conf", "[", ":", "]", "[", "'name'", "]", ":", "trig_count", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "run_conf", "[", ":", "]", "[", "np", ".", "where", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "run_conf", "[", ":", "]", "[", "'name'", "]", "==", "'trig_count'", ")", "]", "[", "'value'", "]", "[", "0", "]", "else", ":", "trig_count", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "global_register", "[", ":", "]", "[", "np", ".", "where", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "global_register", "[", ":", "]", "[", "'name'", "]", "==", "'Trig_Count'", ")", "]", "[", "'value'", "]", "[", "0", "]", "vcal_c0", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "np", ".", "where", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "'name'", "]", "==", "'Vcal_Coeff_0'", ")", "]", "[", "'value'", "]", "[", "0", "]", "vcal_c1", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "np", ".", "where", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "'name'", "]", "==", "'Vcal_Coeff_1'", ")", "]", "[", "'value'", "]", "[", "0", "]", "c_low", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "np", ".", "where", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "'name'", "]", "==", "'C_Inj_Low'", ")", "]", "[", "'value'", "]", "[", "0", "]", "c_mid", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "np", ".", "where", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "'name'", "]", "==", "'C_Inj_Med'", ")", "]", "[", "'value'", "]", "[", "0", "]", "c_high", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "np", ".", "where", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "calibration_parameters", "[", ":", "]", "[", "'name'", "]", "==", "'C_Inj_High'", ")", "]", "[", "'value'", "]", "[", "0", "]", "self", ".", "c_low_mask", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "C_Low", "[", ":", "]", "self", ".", "c_high_mask", "=", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "C_High", "[", ":", "]", "self", ".", "fei4b", "=", "False", "if", "str", "(", "flavor", ")", "==", "'fei4a'", "else", "True", "self", ".", "trig_count", "=", "int", "(", "trig_count", ")", "self", ".", "vcal_c0", "=", "float", "(", "vcal_c0", ")", "self", ".", "vcal_c1", "=", "float", "(", "vcal_c1", ")", "self", ".", "c_low", "=", "float", "(", "c_low", ")", "self", ".", "c_mid", "=", "float", "(", "c_mid", ")", "self", ".", "c_high", "=", "float", "(", "c_high", ")", "self", ".", "n_injections", "=", "int", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "run_conf", "[", ":", "]", "[", "np", ".", "where", "(", "opened_raw_data_file", ".", "root", ".", "configuration", ".", "run_conf", "[", ":", "]", "[", "'name'", "]", "==", "'n_injections'", ")", "]", "[", "'value'", "]", "[", "0", "]", ")", "except", "tb", ".", "exceptions", ".", "NoSuchNodeError", ":", "if", "not", "self", ".", "_settings_from_file_set", ":", "logging", ".", "warning", "(", "'No settings stored in raw data file %s, use standard settings'", ",", "opened_raw_data_file", ".", "filename", ")", "else", ":", "logging", ".", "info", "(", "'No settings provided in raw data file %s, use already set settings'", ",", "opened_raw_data_file", ".", "filename", ")", "except", "IndexError", ":", "# happens if setting is not available (e.g. repeat_command)", "pass" ]
Tries to get the scan parameters needed for analysis from the raw data file
[ "Tries", "to", "get", "the", "scan", "parameters", "needed", "for", "analysis", "from", "the", "raw", "data", "file" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analyze_raw_data.py#L1399-L1431
SiLab-Bonn/pyBAR
pybar/analysis/analyze_raw_data.py
AnalyzeRawData._get_plsr_dac_charge
def _get_plsr_dac_charge(self, plsr_dac_array, no_offset=False): '''Takes the PlsrDAC calibration and the stored C-high/C-low mask to calculate the charge from the PlsrDAC array on a pixel basis ''' charge = np.zeros_like(self.c_low_mask, dtype=np.float16) # charge in electrons if self.vcal_c0 is not None and self.vcal_c1 is not None and self.c_low is not None and self.c_mid is not None and self.c_high is not None: voltage = self.vcal_c1 * plsr_dac_array if no_offset else self.vcal_c0 + self.vcal_c1 * plsr_dac_array charge[np.logical_and(self.c_low_mask, ~self.c_high_mask)] = voltage[np.logical_and(self.c_low_mask, ~self.c_high_mask)] * self.c_low / 0.16022 charge[np.logical_and(~self.c_low_mask, self.c_high_mask)] = voltage[np.logical_and(self.c_low_mask, ~self.c_high_mask)] * self.c_mid / 0.16022 charge[np.logical_and(self.c_low_mask, self.c_high_mask)] = voltage[np.logical_and(self.c_low_mask, ~self.c_high_mask)] * self.c_high / 0.16022 return charge
python
def _get_plsr_dac_charge(self, plsr_dac_array, no_offset=False): '''Takes the PlsrDAC calibration and the stored C-high/C-low mask to calculate the charge from the PlsrDAC array on a pixel basis ''' charge = np.zeros_like(self.c_low_mask, dtype=np.float16) # charge in electrons if self.vcal_c0 is not None and self.vcal_c1 is not None and self.c_low is not None and self.c_mid is not None and self.c_high is not None: voltage = self.vcal_c1 * plsr_dac_array if no_offset else self.vcal_c0 + self.vcal_c1 * plsr_dac_array charge[np.logical_and(self.c_low_mask, ~self.c_high_mask)] = voltage[np.logical_and(self.c_low_mask, ~self.c_high_mask)] * self.c_low / 0.16022 charge[np.logical_and(~self.c_low_mask, self.c_high_mask)] = voltage[np.logical_and(self.c_low_mask, ~self.c_high_mask)] * self.c_mid / 0.16022 charge[np.logical_and(self.c_low_mask, self.c_high_mask)] = voltage[np.logical_and(self.c_low_mask, ~self.c_high_mask)] * self.c_high / 0.16022 return charge
[ "def", "_get_plsr_dac_charge", "(", "self", ",", "plsr_dac_array", ",", "no_offset", "=", "False", ")", ":", "charge", "=", "np", ".", "zeros_like", "(", "self", ".", "c_low_mask", ",", "dtype", "=", "np", ".", "float16", ")", "# charge in electrons", "if", "self", ".", "vcal_c0", "is", "not", "None", "and", "self", ".", "vcal_c1", "is", "not", "None", "and", "self", ".", "c_low", "is", "not", "None", "and", "self", ".", "c_mid", "is", "not", "None", "and", "self", ".", "c_high", "is", "not", "None", ":", "voltage", "=", "self", ".", "vcal_c1", "*", "plsr_dac_array", "if", "no_offset", "else", "self", ".", "vcal_c0", "+", "self", ".", "vcal_c1", "*", "plsr_dac_array", "charge", "[", "np", ".", "logical_and", "(", "self", ".", "c_low_mask", ",", "~", "self", ".", "c_high_mask", ")", "]", "=", "voltage", "[", "np", ".", "logical_and", "(", "self", ".", "c_low_mask", ",", "~", "self", ".", "c_high_mask", ")", "]", "*", "self", ".", "c_low", "/", "0.16022", "charge", "[", "np", ".", "logical_and", "(", "~", "self", ".", "c_low_mask", ",", "self", ".", "c_high_mask", ")", "]", "=", "voltage", "[", "np", ".", "logical_and", "(", "self", ".", "c_low_mask", ",", "~", "self", ".", "c_high_mask", ")", "]", "*", "self", ".", "c_mid", "/", "0.16022", "charge", "[", "np", ".", "logical_and", "(", "self", ".", "c_low_mask", ",", "self", ".", "c_high_mask", ")", "]", "=", "voltage", "[", "np", ".", "logical_and", "(", "self", ".", "c_low_mask", ",", "~", "self", ".", "c_high_mask", ")", "]", "*", "self", ".", "c_high", "/", "0.16022", "return", "charge" ]
Takes the PlsrDAC calibration and the stored C-high/C-low mask to calculate the charge from the PlsrDAC array on a pixel basis
[ "Takes", "the", "PlsrDAC", "calibration", "and", "the", "stored", "C", "-", "high", "/", "C", "-", "low", "mask", "to", "calculate", "the", "charge", "from", "the", "PlsrDAC", "array", "on", "a", "pixel", "basis" ]
train
https://github.com/SiLab-Bonn/pyBAR/blob/5ad95bbcd41cd358825823fb78f396cfce23593e/pybar/analysis/analyze_raw_data.py#L1433-L1442
yougov/solr-doc-manager
mongo_connector/doc_managers/solr_doc_manager.py
DocManager._parse_fields
def _parse_fields(self, result, field_name): """ If Schema access, parse fields and build respective lists """ field_list = [] for key, value in result.get('schema', {}).get(field_name, {}).items(): if key not in field_list: field_list.append(key) return field_list
python
def _parse_fields(self, result, field_name): """ If Schema access, parse fields and build respective lists """ field_list = [] for key, value in result.get('schema', {}).get(field_name, {}).items(): if key not in field_list: field_list.append(key) return field_list
[ "def", "_parse_fields", "(", "self", ",", "result", ",", "field_name", ")", ":", "field_list", "=", "[", "]", "for", "key", ",", "value", "in", "result", ".", "get", "(", "'schema'", ",", "{", "}", ")", ".", "get", "(", "field_name", ",", "{", "}", ")", ".", "items", "(", ")", ":", "if", "key", "not", "in", "field_list", ":", "field_list", ".", "append", "(", "key", ")", "return", "field_list" ]
If Schema access, parse fields and build respective lists
[ "If", "Schema", "access", "parse", "fields", "and", "build", "respective", "lists" ]
train
https://github.com/yougov/solr-doc-manager/blob/1978bf6f3387b1afd6dd6b41a1bbaea9932d60fd/mongo_connector/doc_managers/solr_doc_manager.py#L83-L90