source_code
stringlengths 45
7.24k
| function_name
stringlengths 1
111
| messages
list |
|---|---|---|
def x(self, msg=None):
if not self.stop_run.is_set():
if msg:
logging.info('%s%s Stopping run...', msg, '' if msg[-1] in
punctuation else '.')
else:
logging.info('Stopping run...')
self.stop_run.set()
|
stop
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, msg=None):\n if not self.stop_run.is_set():\n if msg:\n logging.info('%s%s Stopping run...', msg, '' if msg[-1] in\n punctuation else '.')\n else:\n logging.info('Stopping run...')\n self.stop_run.set()\n",
"role": "user"
},
{
"content": "stop",
"role": "assistant"
}
] |
def x(self, msg=None):
if not self.abort_run.is_set():
if msg:
logging.error('%s%s Aborting run...', msg, '' if msg[-1] in
punctuation else '.')
else:
logging.error('Aborting run...')
self.abort_run.set()
self.stop_run.set()
|
abort
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, msg=None):\n if not self.abort_run.is_set():\n if msg:\n logging.error('%s%s Aborting run...', msg, '' if msg[-1] in\n punctuation else '.')\n else:\n logging.error('Aborting run...')\n self.abort_run.set()\n self.stop_run.set()\n",
"role": "user"
},
{
"content": "abort",
"role": "assistant"
}
] |
def x(self, run, conf=None, run_conf=None, use_thread=False,
catch_exception=True):
if isinstance(conf, basestring) and os.path.isfile(conf):
logging.info('Updating configuration from file %s', os.path.abspath
(conf))
elif conf is not None:
logging.info('Updating configuration')
conf = self.open_conf(conf)
self._conf.update(conf)
if isclass(run):
run = run(conf=self._conf)
local_run_conf = {}
if 'run_conf' in self._conf:
logging.info(
'Updating run configuration using run_conf key from configuration')
local_run_conf.update(self._conf['run_conf'])
if run.__class__.__name__ in self._conf:
logging.info(
'Updating run configuration using %s key from configuration' %
(run.__class__.__name__,))
local_run_conf.update(self._conf[run.__class__.__name__])
if isinstance(run_conf, basestring) and os.path.isfile(run_conf):
logging.info('Updating run configuration from file %s', os.path.
abspath(run_conf))
elif run_conf is not None:
logging.info('Updating run configuration')
run_conf = self.open_conf(run_conf)
if run.__class__.__name__ in run_conf:
run_conf = run_conf[run.__class__.__name__]
local_run_conf.update(run_conf)
if use_thread:
self.current_run = run
@thunkify(thread_name='RunThread', daemon=True, default_func=self.
current_run.get_run_status)
def x():
return run.run(run_conf=local_run_conf)
signal.signal(signal.SIGINT, self._signal_handler)
logging.info('Press Ctrl-C to stop run')
return run_run_in_thread()
else:
self.current_run = run
status = run.run(run_conf=local_run_conf)
if not catch_exception and status != run_status.finished:
raise RuntimeError('Exception occurred. Please read the log.')
return status
|
run_run
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, run, conf=None, run_conf=None, use_thread=False,\n catch_exception=True):\n if isinstance(conf, basestring) and os.path.isfile(conf):\n logging.info('Updating configuration from file %s', os.path.abspath\n (conf))\n elif conf is not None:\n logging.info('Updating configuration')\n conf = self.open_conf(conf)\n self._conf.update(conf)\n if isclass(run):\n run = run(conf=self._conf)\n local_run_conf = {}\n if 'run_conf' in self._conf:\n logging.info(\n 'Updating run configuration using run_conf key from configuration')\n local_run_conf.update(self._conf['run_conf'])\n if run.__class__.__name__ in self._conf:\n logging.info(\n 'Updating run configuration using %s key from configuration' %\n (run.__class__.__name__,))\n local_run_conf.update(self._conf[run.__class__.__name__])\n if isinstance(run_conf, basestring) and os.path.isfile(run_conf):\n logging.info('Updating run configuration from file %s', os.path.\n abspath(run_conf))\n elif run_conf is not None:\n logging.info('Updating run configuration')\n run_conf = self.open_conf(run_conf)\n if run.__class__.__name__ in run_conf:\n run_conf = run_conf[run.__class__.__name__]\n local_run_conf.update(run_conf)\n if use_thread:\n self.current_run = run\n\n @thunkify(thread_name='RunThread', daemon=True, default_func=self.\n current_run.get_run_status)\n def x():\n return run.run(run_conf=local_run_conf)\n signal.signal(signal.SIGINT, self._signal_handler)\n logging.info('Press Ctrl-C to stop run')\n return run_run_in_thread()\n else:\n self.current_run = run\n status = run.run(run_conf=local_run_conf)\n if not catch_exception and status != run_status.finished:\n raise RuntimeError('Exception occurred. Please read the log.')\n return status\n",
"role": "user"
},
{
"content": "run_run",
"role": "assistant"
}
] |
def x(self, primlist, skip_remaining=False):
runlist = self.open_primlist(primlist)
for index, run in enumerate(runlist):
logging.info('Progressing with run %i out of %i...', index + 1, len
(runlist))
join = self.run_run(run, use_thread=True)
status = join()
if skip_remaining and not status == run_status.finished:
logging.error(
'Exited run %i with status %s: Skipping all remaining runs.',
run.run_number, status)
break
|
run_primlist
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, primlist, skip_remaining=False):\n runlist = self.open_primlist(primlist)\n for index, run in enumerate(runlist):\n logging.info('Progressing with run %i out of %i...', index + 1, len\n (runlist))\n join = self.run_run(run, use_thread=True)\n status = join()\n if skip_remaining and not status == run_status.finished:\n logging.error(\n 'Exited run %i with status %s: Skipping all remaining runs.',\n run.run_number, status)\n break\n",
"role": "user"
},
{
"content": "run_primlist",
"role": "assistant"
}
] |
def x(scan_base, combine_n_readouts=1000, chunk_size=10000000,
plot_occupancy_hists=False, output_pdf=None, output_file=None):
time_stamp = []
x = []
y = []
for data_file in scan_base:
with tb.open_file(data_file + '_interpreted.h5', mode='r+'
) as in_hit_file_h5:
meta_data_array = in_hit_file_h5.root.meta_data[:]
hit_table = in_hit_file_h5.root.Hits
parameter_ranges = np.column_stack((analysis_utils.
get_ranges_from_array(meta_data_array['timestamp_start'][::
combine_n_readouts]), analysis_utils.get_ranges_from_array(
meta_data_array['event_number'][::combine_n_readouts])))
analysis_utils.index_event_number(hit_table)
analyze_data = AnalyzeRawData()
analyze_data.create_tot_hist = False
analyze_data.create_bcid_hist = False
analyze_data.histogram.set_no_scan_parameter()
index = 0
best_chunk_size = chunk_size
progress_bar = progressbar.ProgressBar(widgets=['', progressbar
.Percentage(), ' ', progressbar.Bar(marker='*', left='|',
right='|'), ' ', progressbar.AdaptiveETA()], maxval=
hit_table.shape[0], term_width=80)
progress_bar.start()
for parameter_index, parameter_range in enumerate(parameter_ranges
):
logging.debug('Analyze time stamp ' + str(parameter_range[0
]) + ' and data from events = [' + str(parameter_range[
2]) + ',' + str(parameter_range[3]) + '[ ' + str(int(
float(float(parameter_index) / float(len(
parameter_ranges)) * 100.0))) + '%')
analyze_data.reset()
readout_hit_len = 0
for hits, index in analysis_utils.data_aligned_at_events(
hit_table, start_event_number=parameter_range[2],
stop_event_number=parameter_range[3], start_index=index,
chunk_size=best_chunk_size):
analyze_data.analyze_hits(hits)
readout_hit_len += hits.shape[0]
progress_bar.update(index)
best_chunk_size = int(1.5 * readout_hit_len) if int(1.05 *
readout_hit_len) < chunk_size else chunk_size
occupancy_array = analyze_data.histogram.get_occupancy()
projection_x = np.sum(occupancy_array, axis=0).ravel()
projection_y = np.sum(occupancy_array, axis=1).ravel()
x.append(analysis_utils.get_mean_from_histogram(
projection_x, bin_positions=range(0, 80)))
y.append(analysis_utils.get_mean_from_histogram(
projection_y, bin_positions=range(0, 336)))
time_stamp.append(parameter_range[0])
if plot_occupancy_hists:
plotting.plot_occupancy(occupancy_array[:, :, 0], title
='Occupancy for events between ' + time.strftime(
'%H:%M:%S', time.localtime(parameter_range[0])) +
' and ' + time.strftime('%H:%M:%S', time.localtime(
parameter_range[1])), filename=output_pdf)
progress_bar.finish()
plotting.plot_scatter([(i * 250) for i in x], [(i * 50) for i in y],
title='Mean beam position', x_label='x [um]', y_label='y [um]',
marker_style='-o', filename=output_pdf)
if output_file:
with tb.open_file(output_file, mode='a') as out_file_h5:
rec_array = np.array(zip(time_stamp, x, y), dtype=[(
'time_stamp', float), ('x', float), ('y', float)])
try:
beam_spot_table = out_file_h5.create_table(out_file_h5.root,
name='Beamspot', description=rec_array, title=
'Beam spot position', filters=tb.Filters(complib=
'blosc', complevel=5, fletcher32=False))
beam_spot_table[:] = rec_array
except tb.exceptions.NodeError:
logging.warning(output_file +
' has already a Beamspot note, do not overwrite existing.')
return time_stamp, x, y
|
analyze_beam_spot
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(scan_base, combine_n_readouts=1000, chunk_size=10000000,\n plot_occupancy_hists=False, output_pdf=None, output_file=None):\n time_stamp = []\n x = []\n y = []\n for data_file in scan_base:\n with tb.open_file(data_file + '_interpreted.h5', mode='r+'\n ) as in_hit_file_h5:\n meta_data_array = in_hit_file_h5.root.meta_data[:]\n hit_table = in_hit_file_h5.root.Hits\n parameter_ranges = np.column_stack((analysis_utils.\n get_ranges_from_array(meta_data_array['timestamp_start'][::\n combine_n_readouts]), analysis_utils.get_ranges_from_array(\n meta_data_array['event_number'][::combine_n_readouts])))\n analysis_utils.index_event_number(hit_table)\n analyze_data = AnalyzeRawData()\n analyze_data.create_tot_hist = False\n analyze_data.create_bcid_hist = False\n analyze_data.histogram.set_no_scan_parameter()\n index = 0\n best_chunk_size = chunk_size\n progress_bar = progressbar.ProgressBar(widgets=['', progressbar\n .Percentage(), ' ', progressbar.Bar(marker='*', left='|',\n right='|'), ' ', progressbar.AdaptiveETA()], maxval=\n hit_table.shape[0], term_width=80)\n progress_bar.start()\n for parameter_index, parameter_range in enumerate(parameter_ranges\n ):\n logging.debug('Analyze time stamp ' + str(parameter_range[0\n ]) + ' and data from events = [' + str(parameter_range[\n 2]) + ',' + str(parameter_range[3]) + '[ ' + str(int(\n float(float(parameter_index) / float(len(\n parameter_ranges)) * 100.0))) + '%')\n analyze_data.reset()\n readout_hit_len = 0\n for hits, index in analysis_utils.data_aligned_at_events(\n hit_table, start_event_number=parameter_range[2],\n stop_event_number=parameter_range[3], start_index=index,\n chunk_size=best_chunk_size):\n analyze_data.analyze_hits(hits)\n readout_hit_len += hits.shape[0]\n progress_bar.update(index)\n best_chunk_size = int(1.5 * readout_hit_len) if int(1.05 *\n readout_hit_len) < chunk_size else chunk_size\n occupancy_array = analyze_data.histogram.get_occupancy()\n projection_x = np.sum(occupancy_array, axis=0).ravel()\n projection_y = np.sum(occupancy_array, axis=1).ravel()\n x.append(analysis_utils.get_mean_from_histogram(\n projection_x, bin_positions=range(0, 80)))\n y.append(analysis_utils.get_mean_from_histogram(\n projection_y, bin_positions=range(0, 336)))\n time_stamp.append(parameter_range[0])\n if plot_occupancy_hists:\n plotting.plot_occupancy(occupancy_array[:, :, 0], title\n ='Occupancy for events between ' + time.strftime(\n '%H:%M:%S', time.localtime(parameter_range[0])) +\n ' and ' + time.strftime('%H:%M:%S', time.localtime(\n parameter_range[1])), filename=output_pdf)\n progress_bar.finish()\n plotting.plot_scatter([(i * 250) for i in x], [(i * 50) for i in y],\n title='Mean beam position', x_label='x [um]', y_label='y [um]',\n marker_style='-o', filename=output_pdf)\n if output_file:\n with tb.open_file(output_file, mode='a') as out_file_h5:\n rec_array = np.array(zip(time_stamp, x, y), dtype=[(\n 'time_stamp', float), ('x', float), ('y', float)])\n try:\n beam_spot_table = out_file_h5.create_table(out_file_h5.root,\n name='Beamspot', description=rec_array, title=\n 'Beam spot position', filters=tb.Filters(complib=\n 'blosc', complevel=5, fletcher32=False))\n beam_spot_table[:] = rec_array\n except tb.exceptions.NodeError:\n logging.warning(output_file +\n ' has already a Beamspot note, do not overwrite existing.')\n return time_stamp, x, y\n",
"role": "user"
},
{
"content": "analyze_beam_spot",
"role": "assistant"
}
] |
def x(scan_base, combine_n_readouts=1000, time_line_absolute=True,
output_pdf=None, output_file=None):
time_stamp = []
rate = []
start_time_set = False
for data_file in scan_base:
with tb.open_file(data_file + '_interpreted.h5', mode='r'
) as in_file_h5:
meta_data_array = in_file_h5.root.meta_data[:]
parameter_ranges = np.column_stack((analysis_utils.
get_ranges_from_array(meta_data_array['timestamp_start'][::
combine_n_readouts]), analysis_utils.get_ranges_from_array(
meta_data_array['event_number'][::combine_n_readouts])))
if time_line_absolute:
time_stamp.extend(parameter_ranges[:-1, 0])
else:
if not start_time_set:
start_time = parameter_ranges[0, 0]
start_time_set = True
time_stamp.extend((parameter_ranges[:-1, 0] - start_time) /
60.0)
rate.extend((parameter_ranges[:-1, 3] - parameter_ranges[:-1, 2
]) / (parameter_ranges[:-1, 1] - parameter_ranges[:-1, 0]))
if time_line_absolute:
plotting.plot_scatter_time(time_stamp, rate, title=
'Event rate [Hz]', marker_style='o', filename=output_pdf)
else:
plotting.plot_scatter(time_stamp, rate, title='Events per time',
x_label='Progressed time [min.]', y_label='Events rate [Hz]',
marker_style='o', filename=output_pdf)
if output_file:
with tb.open_file(output_file, mode='a') as out_file_h5:
rec_array = np.array(zip(time_stamp, rate), dtype=[(
'time_stamp', float), ('rate', float)]).view(np.recarray)
try:
rate_table = out_file_h5.create_table(out_file_h5.root,
name='Eventrate', description=rec_array, title=
'Event rate', filters=tb.Filters(complib='blosc',
complevel=5, fletcher32=False))
rate_table[:] = rec_array
except tb.exceptions.NodeError:
logging.warning(output_file +
' has already a Eventrate note, do not overwrite existing.'
)
return time_stamp, rate
|
analyze_event_rate
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(scan_base, combine_n_readouts=1000, time_line_absolute=True,\n output_pdf=None, output_file=None):\n time_stamp = []\n rate = []\n start_time_set = False\n for data_file in scan_base:\n with tb.open_file(data_file + '_interpreted.h5', mode='r'\n ) as in_file_h5:\n meta_data_array = in_file_h5.root.meta_data[:]\n parameter_ranges = np.column_stack((analysis_utils.\n get_ranges_from_array(meta_data_array['timestamp_start'][::\n combine_n_readouts]), analysis_utils.get_ranges_from_array(\n meta_data_array['event_number'][::combine_n_readouts])))\n if time_line_absolute:\n time_stamp.extend(parameter_ranges[:-1, 0])\n else:\n if not start_time_set:\n start_time = parameter_ranges[0, 0]\n start_time_set = True\n time_stamp.extend((parameter_ranges[:-1, 0] - start_time) /\n 60.0)\n rate.extend((parameter_ranges[:-1, 3] - parameter_ranges[:-1, 2\n ]) / (parameter_ranges[:-1, 1] - parameter_ranges[:-1, 0]))\n if time_line_absolute:\n plotting.plot_scatter_time(time_stamp, rate, title=\n 'Event rate [Hz]', marker_style='o', filename=output_pdf)\n else:\n plotting.plot_scatter(time_stamp, rate, title='Events per time',\n x_label='Progressed time [min.]', y_label='Events rate [Hz]',\n marker_style='o', filename=output_pdf)\n if output_file:\n with tb.open_file(output_file, mode='a') as out_file_h5:\n rec_array = np.array(zip(time_stamp, rate), dtype=[(\n 'time_stamp', float), ('rate', float)]).view(np.recarray)\n try:\n rate_table = out_file_h5.create_table(out_file_h5.root,\n name='Eventrate', description=rec_array, title=\n 'Event rate', filters=tb.Filters(complib='blosc',\n complevel=5, fletcher32=False))\n rate_table[:] = rec_array\n except tb.exceptions.NodeError:\n logging.warning(output_file +\n ' has already a Eventrate note, do not overwrite existing.'\n )\n return time_stamp, rate\n",
"role": "user"
},
{
"content": "analyze_event_rate",
"role": "assistant"
}
] |
def x(scan_base, include_no_cluster=False, time_line_absolute=True,
combine_n_readouts=1000, chunk_size=10000000, plot_n_cluster_hists=
False, output_pdf=None, output_file=None):
time_stamp = []
n_cluster = []
start_time_set = False
for data_file in scan_base:
with tb.open_file(data_file + '_interpreted.h5', mode='r+'
) as in_cluster_file_h5:
meta_data_array = in_cluster_file_h5.root.meta_data[:]
cluster_table = in_cluster_file_h5.root.Cluster
parameter_ranges = np.column_stack((analysis_utils.
get_ranges_from_array(meta_data_array['timestamp_start'][::
combine_n_readouts]), analysis_utils.get_ranges_from_array(
meta_data_array['event_number'][::combine_n_readouts])))
analysis_utils.index_event_number(cluster_table)
analyze_data = AnalyzeRawData()
analyze_data.create_tot_hist = False
analyze_data.create_bcid_hist = False
index = 0
best_chunk_size = chunk_size
total_cluster = cluster_table.shape[0]
progress_bar = progressbar.ProgressBar(widgets=['', progressbar
.Percentage(), ' ', progressbar.Bar(marker='*', left='|',
right='|'), ' ', progressbar.AdaptiveETA()], maxval=
total_cluster, term_width=80)
progress_bar.start()
for parameter_index, parameter_range in enumerate(parameter_ranges
):
logging.debug('Analyze time stamp ' + str(parameter_range[0
]) + ' and data from events = [' + str(parameter_range[
2]) + ',' + str(parameter_range[3]) + '[ ' + str(int(
float(float(parameter_index) / float(len(
parameter_ranges)) * 100.0))) + '%')
analyze_data.reset()
readout_cluster_len = 0
hist = None
for clusters, index in analysis_utils.data_aligned_at_events(
cluster_table, start_event_number=parameter_range[2],
stop_event_number=parameter_range[3], start_index=index,
chunk_size=best_chunk_size):
n_cluster_per_event = (analysis_utils.
get_n_cluster_in_events(clusters['event_number'])[:, 1]
)
if hist is None:
hist = np.histogram(n_cluster_per_event, bins=10,
range=(0, 10))[0]
else:
hist = np.add(hist, np.histogram(
n_cluster_per_event, bins=10, range=(0, 10))[0])
if include_no_cluster and parameter_range[3] is not None:
hist[0] = parameter_range[3] - parameter_range[2
] - len(n_cluster_per_event)
readout_cluster_len += clusters.shape[0]
total_cluster -= len(clusters)
progress_bar.update(index)
best_chunk_size = int(1.5 * readout_cluster_len) if int(
1.05 * readout_cluster_len) < chunk_size else chunk_size
if plot_n_cluster_hists:
plotting.plot_1d_hist(hist, title=
'Number of cluster per event at ' + str(
parameter_range[0]), x_axis_title=
'Number of cluster', y_axis_title='#', log_y=True,
filename=output_pdf)
hist = hist.astype('f4') / np.sum(hist)
if time_line_absolute:
time_stamp.append(parameter_range[0])
else:
if not start_time_set:
start_time = parameter_ranges[0, 0]
start_time_set = True
time_stamp.append((parameter_range[0] - start_time) / 60.0)
n_cluster.append(hist)
progress_bar.finish()
if total_cluster != 0:
logging.warning(
'Not all clusters were selected during analysis. Analysis is therefore not exact'
)
if time_line_absolute:
plotting.plot_scatter_time(time_stamp, n_cluster, title=
'Number of cluster per event as a function of time',
marker_style='o', filename=output_pdf, legend=('0 cluster',
'1 cluster', '2 cluster', '3 cluster') if include_no_cluster else
('0 cluster not plotted', '1 cluster', '2 cluster', '3 cluster'))
else:
plotting.plot_scatter(time_stamp, n_cluster, title=
'Number of cluster per event as a function of time', x_label=
'time [min.]', marker_style='o', filename=output_pdf, legend=(
'0 cluster', '1 cluster', '2 cluster', '3 cluster') if
include_no_cluster else ('0 cluster not plotted', '1 cluster',
'2 cluster', '3 cluster'))
if output_file:
with tb.open_file(output_file, mode='a') as out_file_h5:
cluster_array = np.array(n_cluster)
rec_array = np.array(zip(time_stamp, cluster_array[:, 0],
cluster_array[:, 1], cluster_array[:, 2], cluster_array[:,
3], cluster_array[:, 4], cluster_array[:, 5]), dtype=[(
'time_stamp', float), ('cluster_0', float), ('cluster_1',
float), ('cluster_2', float), ('cluster_3', float), (
'cluster_4', float), ('cluster_5', float)]).view(np.recarray)
try:
n_cluster_table = out_file_h5.create_table(out_file_h5.root,
name='n_cluster', description=rec_array, title=
'Cluster per event', filters=tb.Filters(complib='blosc',
complevel=5, fletcher32=False))
n_cluster_table[:] = rec_array
except tb.exceptions.NodeError:
logging.warning(output_file +
' has already a Beamspot note, do not overwrite existing.')
return time_stamp, n_cluster
|
analyse_n_cluster_per_event
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(scan_base, include_no_cluster=False, time_line_absolute=True,\n combine_n_readouts=1000, chunk_size=10000000, plot_n_cluster_hists=\n False, output_pdf=None, output_file=None):\n time_stamp = []\n n_cluster = []\n start_time_set = False\n for data_file in scan_base:\n with tb.open_file(data_file + '_interpreted.h5', mode='r+'\n ) as in_cluster_file_h5:\n meta_data_array = in_cluster_file_h5.root.meta_data[:]\n cluster_table = in_cluster_file_h5.root.Cluster\n parameter_ranges = np.column_stack((analysis_utils.\n get_ranges_from_array(meta_data_array['timestamp_start'][::\n combine_n_readouts]), analysis_utils.get_ranges_from_array(\n meta_data_array['event_number'][::combine_n_readouts])))\n analysis_utils.index_event_number(cluster_table)\n analyze_data = AnalyzeRawData()\n analyze_data.create_tot_hist = False\n analyze_data.create_bcid_hist = False\n index = 0\n best_chunk_size = chunk_size\n total_cluster = cluster_table.shape[0]\n progress_bar = progressbar.ProgressBar(widgets=['', progressbar\n .Percentage(), ' ', progressbar.Bar(marker='*', left='|',\n right='|'), ' ', progressbar.AdaptiveETA()], maxval=\n total_cluster, term_width=80)\n progress_bar.start()\n for parameter_index, parameter_range in enumerate(parameter_ranges\n ):\n logging.debug('Analyze time stamp ' + str(parameter_range[0\n ]) + ' and data from events = [' + str(parameter_range[\n 2]) + ',' + str(parameter_range[3]) + '[ ' + str(int(\n float(float(parameter_index) / float(len(\n parameter_ranges)) * 100.0))) + '%')\n analyze_data.reset()\n readout_cluster_len = 0\n hist = None\n for clusters, index in analysis_utils.data_aligned_at_events(\n cluster_table, start_event_number=parameter_range[2],\n stop_event_number=parameter_range[3], start_index=index,\n chunk_size=best_chunk_size):\n n_cluster_per_event = (analysis_utils.\n get_n_cluster_in_events(clusters['event_number'])[:, 1]\n )\n if hist is None:\n hist = np.histogram(n_cluster_per_event, bins=10,\n range=(0, 10))[0]\n else:\n hist = np.add(hist, np.histogram(\n n_cluster_per_event, bins=10, range=(0, 10))[0])\n if include_no_cluster and parameter_range[3] is not None:\n hist[0] = parameter_range[3] - parameter_range[2\n ] - len(n_cluster_per_event)\n readout_cluster_len += clusters.shape[0]\n total_cluster -= len(clusters)\n progress_bar.update(index)\n best_chunk_size = int(1.5 * readout_cluster_len) if int(\n 1.05 * readout_cluster_len) < chunk_size else chunk_size\n if plot_n_cluster_hists:\n plotting.plot_1d_hist(hist, title=\n 'Number of cluster per event at ' + str(\n parameter_range[0]), x_axis_title=\n 'Number of cluster', y_axis_title='#', log_y=True,\n filename=output_pdf)\n hist = hist.astype('f4') / np.sum(hist)\n if time_line_absolute:\n time_stamp.append(parameter_range[0])\n else:\n if not start_time_set:\n start_time = parameter_ranges[0, 0]\n start_time_set = True\n time_stamp.append((parameter_range[0] - start_time) / 60.0)\n n_cluster.append(hist)\n progress_bar.finish()\n if total_cluster != 0:\n logging.warning(\n 'Not all clusters were selected during analysis. Analysis is therefore not exact'\n )\n if time_line_absolute:\n plotting.plot_scatter_time(time_stamp, n_cluster, title=\n 'Number of cluster per event as a function of time',\n marker_style='o', filename=output_pdf, legend=('0 cluster',\n '1 cluster', '2 cluster', '3 cluster') if include_no_cluster else\n ('0 cluster not plotted', '1 cluster', '2 cluster', '3 cluster'))\n else:\n plotting.plot_scatter(time_stamp, n_cluster, title=\n 'Number of cluster per event as a function of time', x_label=\n 'time [min.]', marker_style='o', filename=output_pdf, legend=(\n '0 cluster', '1 cluster', '2 cluster', '3 cluster') if\n include_no_cluster else ('0 cluster not plotted', '1 cluster',\n '2 cluster', '3 cluster'))\n if output_file:\n with tb.open_file(output_file, mode='a') as out_file_h5:\n cluster_array = np.array(n_cluster)\n rec_array = np.array(zip(time_stamp, cluster_array[:, 0],\n cluster_array[:, 1], cluster_array[:, 2], cluster_array[:, \n 3], cluster_array[:, 4], cluster_array[:, 5]), dtype=[(\n 'time_stamp', float), ('cluster_0', float), ('cluster_1',\n float), ('cluster_2', float), ('cluster_3', float), (\n 'cluster_4', float), ('cluster_5', float)]).view(np.recarray)\n try:\n n_cluster_table = out_file_h5.create_table(out_file_h5.root,\n name='n_cluster', description=rec_array, title=\n 'Cluster per event', filters=tb.Filters(complib='blosc',\n complevel=5, fletcher32=False))\n n_cluster_table[:] = rec_array\n except tb.exceptions.NodeError:\n logging.warning(output_file +\n ' has already a Beamspot note, do not overwrite existing.')\n return time_stamp, n_cluster\n",
"role": "user"
},
{
"content": "analyse_n_cluster_per_event",
"role": "assistant"
}
] |
def x(input_file_hits, output_file_hits, cluster_size_condition,
n_cluster_condition, chunk_size=4000000):
logging.info('Write hits of events from ' + str(input_file_hits) +
' with ' + cluster_size_condition + ' and ' + n_cluster_condition +
' into ' + str(output_file_hits))
with tb.open_file(input_file_hits, mode='r+') as in_hit_file_h5:
analysis_utils.index_event_number(in_hit_file_h5.root.Hits)
analysis_utils.index_event_number(in_hit_file_h5.root.Cluster)
with tb.open_file(output_file_hits, mode='w') as out_hit_file_h5:
hit_table_out = out_hit_file_h5.create_table(out_hit_file_h5.
root, name='Hits', description=data_struct.HitInfoTable,
title='hit_data', filters=tb.Filters(complib='blosc',
complevel=5, fletcher32=False))
cluster_table = in_hit_file_h5.root.Cluster
last_word_number = 0
progress_bar = progressbar.ProgressBar(widgets=['', progressbar
.Percentage(), ' ', progressbar.Bar(marker='*', left='|',
right='|'), ' ', progressbar.AdaptiveETA()], maxval=
cluster_table.shape[0], term_width=80)
progress_bar.start()
for data, index in analysis_utils.data_aligned_at_events(
cluster_table, chunk_size=chunk_size):
selected_events_1 = (analysis_utils.
get_events_with_cluster_size(event_number=data[
'event_number'], cluster_size=data['size'], condition=
cluster_size_condition))
selected_events_2 = analysis_utils.get_events_with_n_cluster(
event_number=data['event_number'], condition=
n_cluster_condition)
selected_events = analysis_utils.get_events_in_both_arrays(
selected_events_1, selected_events_2)
logging.debug('Selected ' + str(len(selected_events)) +
' events with ' + n_cluster_condition + ' and ' +
cluster_size_condition)
last_word_number = analysis_utils.write_hits_in_events(
hit_table_in=in_hit_file_h5.root.Hits, hit_table_out=
hit_table_out, events=selected_events, start_hit_word=
last_word_number)
progress_bar.update(index)
progress_bar.finish()
in_hit_file_h5.root.meta_data.copy(out_hit_file_h5.root)
|
select_hits_from_cluster_info
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(input_file_hits, output_file_hits, cluster_size_condition,\n n_cluster_condition, chunk_size=4000000):\n logging.info('Write hits of events from ' + str(input_file_hits) +\n ' with ' + cluster_size_condition + ' and ' + n_cluster_condition +\n ' into ' + str(output_file_hits))\n with tb.open_file(input_file_hits, mode='r+') as in_hit_file_h5:\n analysis_utils.index_event_number(in_hit_file_h5.root.Hits)\n analysis_utils.index_event_number(in_hit_file_h5.root.Cluster)\n with tb.open_file(output_file_hits, mode='w') as out_hit_file_h5:\n hit_table_out = out_hit_file_h5.create_table(out_hit_file_h5.\n root, name='Hits', description=data_struct.HitInfoTable,\n title='hit_data', filters=tb.Filters(complib='blosc',\n complevel=5, fletcher32=False))\n cluster_table = in_hit_file_h5.root.Cluster\n last_word_number = 0\n progress_bar = progressbar.ProgressBar(widgets=['', progressbar\n .Percentage(), ' ', progressbar.Bar(marker='*', left='|',\n right='|'), ' ', progressbar.AdaptiveETA()], maxval=\n cluster_table.shape[0], term_width=80)\n progress_bar.start()\n for data, index in analysis_utils.data_aligned_at_events(\n cluster_table, chunk_size=chunk_size):\n selected_events_1 = (analysis_utils.\n get_events_with_cluster_size(event_number=data[\n 'event_number'], cluster_size=data['size'], condition=\n cluster_size_condition))\n selected_events_2 = analysis_utils.get_events_with_n_cluster(\n event_number=data['event_number'], condition=\n n_cluster_condition)\n selected_events = analysis_utils.get_events_in_both_arrays(\n selected_events_1, selected_events_2)\n logging.debug('Selected ' + str(len(selected_events)) +\n ' events with ' + n_cluster_condition + ' and ' +\n cluster_size_condition)\n last_word_number = analysis_utils.write_hits_in_events(\n hit_table_in=in_hit_file_h5.root.Hits, hit_table_out=\n hit_table_out, events=selected_events, start_hit_word=\n last_word_number)\n progress_bar.update(index)\n progress_bar.finish()\n in_hit_file_h5.root.meta_data.copy(out_hit_file_h5.root)\n",
"role": "user"
},
{
"content": "select_hits_from_cluster_info",
"role": "assistant"
}
] |
def x(input_file_hits, output_file_hits, condition=None,
cluster_size_condition=None, n_cluster_condition=None, chunk_size=5000000):
logging.info('Write hits with ' + condition + ' into ' + str(
output_file_hits))
if cluster_size_condition is None and n_cluster_condition is None:
with tb.open_file(input_file_hits, mode='r+') as in_hit_file_h5:
analysis_utils.index_event_number(in_hit_file_h5.root.Hits)
with tb.open_file(output_file_hits, mode='w') as out_hit_file_h5:
hit_table_out = out_hit_file_h5.create_table(out_hit_file_h5
.root, name='Hits', description=data_struct.
HitInfoTable, title='hit_data', filters=tb.Filters(
complib='blosc', complevel=5, fletcher32=False))
analysis_utils.write_hits_in_event_range(hit_table_in=
in_hit_file_h5.root.Hits, hit_table_out=hit_table_out,
condition=condition)
in_hit_file_h5.root.meta_data.copy(out_hit_file_h5.root)
else:
with tb.open_file(input_file_hits, mode='r+') as in_hit_file_h5:
analysis_utils.index_event_number(in_hit_file_h5.root.Hits)
analysis_utils.index_event_number(in_hit_file_h5.root.Cluster)
with tb.open_file(output_file_hits, mode='w') as out_hit_file_h5:
hit_table_out = out_hit_file_h5.create_table(out_hit_file_h5
.root, name='Hits', description=data_struct.
HitInfoTable, title='hit_data', filters=tb.Filters(
complib='blosc', complevel=5, fletcher32=False))
cluster_table = in_hit_file_h5.root.Cluster
last_word_number = 0
progress_bar = progressbar.ProgressBar(widgets=['',
progressbar.Percentage(), ' ', progressbar.Bar(marker=
'*', left='|', right='|'), ' ', progressbar.AdaptiveETA
()], maxval=cluster_table.shape[0], term_width=80)
progress_bar.start()
for data, index in analysis_utils.data_aligned_at_events(
cluster_table, chunk_size=chunk_size):
if cluster_size_condition is not None:
selected_events = (analysis_utils.
get_events_with_cluster_size(event_number=data[
'event_number'], cluster_size=data['size'],
condition='cluster_size == ' + str(
cluster_size_condition)))
if n_cluster_condition is not None:
selected_events_2 = (analysis_utils.
get_events_with_n_cluster(event_number=data
['event_number'], condition='n_cluster == ' +
str(n_cluster_condition)))
selected_events = selected_events[analysis_utils
.in1d_events(selected_events,
selected_events_2)]
elif n_cluster_condition is not None:
selected_events = (analysis_utils.
get_events_with_n_cluster(event_number=data[
'event_number'], condition='n_cluster == ' +
str(n_cluster_condition)))
else:
raise RuntimeError(
'Cannot understand cluster selection criterion')
last_word_number = analysis_utils.write_hits_in_events(
hit_table_in=in_hit_file_h5.root.Hits,
hit_table_out=hit_table_out, events=selected_events,
start_hit_word=last_word_number, condition=
condition, chunk_size=chunk_size)
progress_bar.update(index)
progress_bar.finish()
in_hit_file_h5.root.meta_data.copy(out_hit_file_h5.root)
|
select_hits
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(input_file_hits, output_file_hits, condition=None,\n cluster_size_condition=None, n_cluster_condition=None, chunk_size=5000000):\n logging.info('Write hits with ' + condition + ' into ' + str(\n output_file_hits))\n if cluster_size_condition is None and n_cluster_condition is None:\n with tb.open_file(input_file_hits, mode='r+') as in_hit_file_h5:\n analysis_utils.index_event_number(in_hit_file_h5.root.Hits)\n with tb.open_file(output_file_hits, mode='w') as out_hit_file_h5:\n hit_table_out = out_hit_file_h5.create_table(out_hit_file_h5\n .root, name='Hits', description=data_struct.\n HitInfoTable, title='hit_data', filters=tb.Filters(\n complib='blosc', complevel=5, fletcher32=False))\n analysis_utils.write_hits_in_event_range(hit_table_in=\n in_hit_file_h5.root.Hits, hit_table_out=hit_table_out,\n condition=condition)\n in_hit_file_h5.root.meta_data.copy(out_hit_file_h5.root)\n else:\n with tb.open_file(input_file_hits, mode='r+') as in_hit_file_h5:\n analysis_utils.index_event_number(in_hit_file_h5.root.Hits)\n analysis_utils.index_event_number(in_hit_file_h5.root.Cluster)\n with tb.open_file(output_file_hits, mode='w') as out_hit_file_h5:\n hit_table_out = out_hit_file_h5.create_table(out_hit_file_h5\n .root, name='Hits', description=data_struct.\n HitInfoTable, title='hit_data', filters=tb.Filters(\n complib='blosc', complevel=5, fletcher32=False))\n cluster_table = in_hit_file_h5.root.Cluster\n last_word_number = 0\n progress_bar = progressbar.ProgressBar(widgets=['',\n progressbar.Percentage(), ' ', progressbar.Bar(marker=\n '*', left='|', right='|'), ' ', progressbar.AdaptiveETA\n ()], maxval=cluster_table.shape[0], term_width=80)\n progress_bar.start()\n for data, index in analysis_utils.data_aligned_at_events(\n cluster_table, chunk_size=chunk_size):\n if cluster_size_condition is not None:\n selected_events = (analysis_utils.\n get_events_with_cluster_size(event_number=data[\n 'event_number'], cluster_size=data['size'],\n condition='cluster_size == ' + str(\n cluster_size_condition)))\n if n_cluster_condition is not None:\n selected_events_2 = (analysis_utils.\n get_events_with_n_cluster(event_number=data\n ['event_number'], condition='n_cluster == ' +\n str(n_cluster_condition)))\n selected_events = selected_events[analysis_utils\n .in1d_events(selected_events,\n selected_events_2)]\n elif n_cluster_condition is not None:\n selected_events = (analysis_utils.\n get_events_with_n_cluster(event_number=data[\n 'event_number'], condition='n_cluster == ' +\n str(n_cluster_condition)))\n else:\n raise RuntimeError(\n 'Cannot understand cluster selection criterion')\n last_word_number = analysis_utils.write_hits_in_events(\n hit_table_in=in_hit_file_h5.root.Hits,\n hit_table_out=hit_table_out, events=selected_events,\n start_hit_word=last_word_number, condition=\n condition, chunk_size=chunk_size)\n progress_bar.update(index)\n progress_bar.finish()\n in_hit_file_h5.root.meta_data.copy(out_hit_file_h5.root)\n",
"role": "user"
},
{
"content": "select_hits",
"role": "assistant"
}
] |
def x(analyzed_data_file, output_file, chunk_size=10000000):
with tb.open_file(analyzed_data_file, mode='r') as in_file_h5:
with tb.open_file(output_file, mode='w') as out_file_h5:
histogram = PyDataHistograming()
histogram.create_occupancy_hist(True)
scan_parameters = None
event_number_indices = None
scan_parameter_indices = None
try:
meta_data = in_file_h5.root.meta_data[:]
scan_parameters = (analysis_utils.
get_unique_scan_parameter_combinations(meta_data))
if scan_parameters is not None:
scan_parameter_indices = np.array(range(0, len(
scan_parameters)), dtype='u4')
event_number_indices = np.ascontiguousarray(scan_parameters
['event_number']).astype(np.uint64)
histogram.add_meta_event_index(event_number_indices,
array_length=len(scan_parameters['event_number']))
histogram.add_scan_parameter(scan_parameter_indices)
logging.info(
'Add %d different scan parameter(s) for analysis',
len(scan_parameters))
else:
logging.info('No scan parameter data provided')
histogram.set_no_scan_parameter()
except tb.exceptions.NoSuchNodeError:
logging.info('No meta data provided, use no scan parameter')
histogram.set_no_scan_parameter()
logging.info('Histogram cluster seeds...')
progress_bar = progressbar.ProgressBar(widgets=['', progressbar
.Percentage(), ' ', progressbar.Bar(marker='*', left='|',
right='|'), ' ', progressbar.AdaptiveETA()], maxval=
in_file_h5.root.Cluster.shape[0], term_width=80)
progress_bar.start()
total_cluster = 0
for cluster, index in analysis_utils.data_aligned_at_events(
in_file_h5.root.Cluster, chunk_size=chunk_size):
total_cluster += len(cluster)
histogram.add_cluster_seed_hits(cluster, len(cluster))
progress_bar.update(index)
progress_bar.finish()
filter_table = tb.Filters(complib='blosc', complevel=5,
fletcher32=False)
occupancy_array = histogram.get_occupancy().T
occupancy_array_table = out_file_h5.create_carray(out_file_h5.
root, name='HistOcc', title='Occupancy Histogram', atom=tb.
Atom.from_dtype(occupancy_array.dtype), shape=
occupancy_array.shape, filters=filter_table)
occupancy_array_table[:] = occupancy_array
if total_cluster != np.sum(occupancy_array):
logging.warning(
'Analysis shows inconsistent number of cluster used. Check needed!'
)
in_file_h5.root.meta_data.copy(out_file_h5.root)
|
histogram_cluster_table
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(analyzed_data_file, output_file, chunk_size=10000000):\n with tb.open_file(analyzed_data_file, mode='r') as in_file_h5:\n with tb.open_file(output_file, mode='w') as out_file_h5:\n histogram = PyDataHistograming()\n histogram.create_occupancy_hist(True)\n scan_parameters = None\n event_number_indices = None\n scan_parameter_indices = None\n try:\n meta_data = in_file_h5.root.meta_data[:]\n scan_parameters = (analysis_utils.\n get_unique_scan_parameter_combinations(meta_data))\n if scan_parameters is not None:\n scan_parameter_indices = np.array(range(0, len(\n scan_parameters)), dtype='u4')\n event_number_indices = np.ascontiguousarray(scan_parameters\n ['event_number']).astype(np.uint64)\n histogram.add_meta_event_index(event_number_indices,\n array_length=len(scan_parameters['event_number']))\n histogram.add_scan_parameter(scan_parameter_indices)\n logging.info(\n 'Add %d different scan parameter(s) for analysis',\n len(scan_parameters))\n else:\n logging.info('No scan parameter data provided')\n histogram.set_no_scan_parameter()\n except tb.exceptions.NoSuchNodeError:\n logging.info('No meta data provided, use no scan parameter')\n histogram.set_no_scan_parameter()\n logging.info('Histogram cluster seeds...')\n progress_bar = progressbar.ProgressBar(widgets=['', progressbar\n .Percentage(), ' ', progressbar.Bar(marker='*', left='|',\n right='|'), ' ', progressbar.AdaptiveETA()], maxval=\n in_file_h5.root.Cluster.shape[0], term_width=80)\n progress_bar.start()\n total_cluster = 0\n for cluster, index in analysis_utils.data_aligned_at_events(\n in_file_h5.root.Cluster, chunk_size=chunk_size):\n total_cluster += len(cluster)\n histogram.add_cluster_seed_hits(cluster, len(cluster))\n progress_bar.update(index)\n progress_bar.finish()\n filter_table = tb.Filters(complib='blosc', complevel=5,\n fletcher32=False)\n occupancy_array = histogram.get_occupancy().T\n occupancy_array_table = out_file_h5.create_carray(out_file_h5.\n root, name='HistOcc', title='Occupancy Histogram', atom=tb.\n Atom.from_dtype(occupancy_array.dtype), shape=\n occupancy_array.shape, filters=filter_table)\n occupancy_array_table[:] = occupancy_array\n if total_cluster != np.sum(occupancy_array):\n logging.warning(\n 'Analysis shows inconsistent number of cluster used. Check needed!'\n )\n in_file_h5.root.meta_data.copy(out_file_h5.root)\n",
"role": "user"
},
{
"content": "histogram_cluster_table",
"role": "assistant"
}
] |
def x(analyze_data, scan_parameters=None, chunk_size=50000):
if (analyze_data.out_file_h5 is None or analyze_data.out_file_h5.isopen ==
0):
in_hit_file_h5 = tb.open_file(analyze_data._analyzed_data_file, 'r+')
close_file = True
else:
in_hit_file_h5 = analyze_data.out_file_h5
close_file = False
meta_data = in_hit_file_h5.root.meta_data[:]
try:
hit_table = in_hit_file_h5.root.Hits
except tb.NoSuchNodeError:
logging.error(
'analyze_hits_per_scan_parameter needs a hit table, but no hit table found.'
)
return
meta_data_table_at_scan_parameter = (analysis_utils.
get_unique_scan_parameter_combinations(meta_data, scan_parameters=
scan_parameters))
parameter_values = analysis_utils.get_scan_parameters_table_from_meta_data(
meta_data_table_at_scan_parameter, scan_parameters)
event_number_ranges = analysis_utils.get_ranges_from_array(
meta_data_table_at_scan_parameter['event_number'])
analysis_utils.index_event_number(hit_table)
index = 0
best_chunk_size = chunk_size
for parameter_index, (start_event_number, stop_event_number) in enumerate(
event_number_ranges):
logging.info('Analyze hits for ' + str(scan_parameters) + ' = ' +
str(parameter_values[parameter_index]))
analyze_data.reset()
readout_hit_len = 0
for hits, index in analysis_utils.data_aligned_at_events(hit_table,
start_event_number=start_event_number, stop_event_number=
stop_event_number, start_index=index, chunk_size=best_chunk_size):
analyze_data.analyze_hits(hits, scan_parameter=False)
readout_hit_len += hits.shape[0]
best_chunk_size = int(1.5 * readout_hit_len) if int(1.05 *
readout_hit_len) < chunk_size and int(1.05 * readout_hit_len
) > 1000.0 else chunk_size
file_name = ' '.join(re.findall('[a-zA-Z0-9]+', str(scan_parameters))
) + '_' + ' '.join(re.findall('[a-zA-Z0-9]+', str(
parameter_values[parameter_index])))
analyze_data._create_additional_hit_data(safe_to_file=False)
analyze_data._create_additional_cluster_data(safe_to_file=False)
yield analyze_data, file_name
if close_file:
in_hit_file_h5.close()
|
analyze_hits_per_scan_parameter
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(analyze_data, scan_parameters=None, chunk_size=50000):\n if (analyze_data.out_file_h5 is None or analyze_data.out_file_h5.isopen ==\n 0):\n in_hit_file_h5 = tb.open_file(analyze_data._analyzed_data_file, 'r+')\n close_file = True\n else:\n in_hit_file_h5 = analyze_data.out_file_h5\n close_file = False\n meta_data = in_hit_file_h5.root.meta_data[:]\n try:\n hit_table = in_hit_file_h5.root.Hits\n except tb.NoSuchNodeError:\n logging.error(\n 'analyze_hits_per_scan_parameter needs a hit table, but no hit table found.'\n )\n return\n meta_data_table_at_scan_parameter = (analysis_utils.\n get_unique_scan_parameter_combinations(meta_data, scan_parameters=\n scan_parameters))\n parameter_values = analysis_utils.get_scan_parameters_table_from_meta_data(\n meta_data_table_at_scan_parameter, scan_parameters)\n event_number_ranges = analysis_utils.get_ranges_from_array(\n meta_data_table_at_scan_parameter['event_number'])\n analysis_utils.index_event_number(hit_table)\n index = 0\n best_chunk_size = chunk_size\n for parameter_index, (start_event_number, stop_event_number) in enumerate(\n event_number_ranges):\n logging.info('Analyze hits for ' + str(scan_parameters) + ' = ' +\n str(parameter_values[parameter_index]))\n analyze_data.reset()\n readout_hit_len = 0\n for hits, index in analysis_utils.data_aligned_at_events(hit_table,\n start_event_number=start_event_number, stop_event_number=\n stop_event_number, start_index=index, chunk_size=best_chunk_size):\n analyze_data.analyze_hits(hits, scan_parameter=False)\n readout_hit_len += hits.shape[0]\n best_chunk_size = int(1.5 * readout_hit_len) if int(1.05 *\n readout_hit_len) < chunk_size and int(1.05 * readout_hit_len\n ) > 1000.0 else chunk_size\n file_name = ' '.join(re.findall('[a-zA-Z0-9]+', str(scan_parameters))\n ) + '_' + ' '.join(re.findall('[a-zA-Z0-9]+', str(\n parameter_values[parameter_index])))\n analyze_data._create_additional_hit_data(safe_to_file=False)\n analyze_data._create_additional_cluster_data(safe_to_file=False)\n yield analyze_data, file_name\n if close_file:\n in_hit_file_h5.close()\n",
"role": "user"
},
{
"content": "analyze_hits_per_scan_parameter",
"role": "assistant"
}
] |
def x(preamble, data):
voltage = np.array(data, dtype=np.float)
meta_data = preamble.split(',')[5].split(';')
time_unit = meta_data[3][1:-1]
XZEro = float(meta_data[5])
XINcr = float(meta_data[4])
PT_Off = float(meta_data[6])
voltage_unit = meta_data[7][1:-1]
YZEro = float(meta_data[10])
YMUlt = float(meta_data[8])
YOFf = float(meta_data[9])
time = XZEro + XINcr * (np.arange(0, voltage.size) - PT_Off)
voltage = YZEro + YMUlt * (voltage - YOFf)
return time, voltage, time_unit, voltage_unit
|
interpret_data_from_tektronix
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(preamble, data):\n voltage = np.array(data, dtype=np.float)\n meta_data = preamble.split(',')[5].split(';')\n time_unit = meta_data[3][1:-1]\n XZEro = float(meta_data[5])\n XINcr = float(meta_data[4])\n PT_Off = float(meta_data[6])\n voltage_unit = meta_data[7][1:-1]\n YZEro = float(meta_data[10])\n YMUlt = float(meta_data[8])\n YOFf = float(meta_data[9])\n time = XZEro + XINcr * (np.arange(0, voltage.size) - PT_Off)\n voltage = YZEro + YMUlt * (voltage - YOFf)\n return time, voltage, time_unit, voltage_unit\n",
"role": "user"
},
{
"content": "interpret_data_from_tektronix",
"role": "assistant"
}
] |
def x(self):
commands = []
commands.extend(self.register.get_commands('ConfMode'))
self.register_utils.send_commands(commands)
with self.readout(fill_buffer=True, callback=None, errback=None):
if self.register.fei4b:
commands = []
self.register.set_global_register_value('Efuse_Sense', 1)
commands.extend(self.register.get_commands('WrRegister', name=[
'Efuse_Sense']))
commands.extend(self.register.get_commands('GlobalPulse', Width=0))
self.register.set_global_register_value('Efuse_Sense', 0)
commands.extend(self.register.get_commands('WrRegister', name=[
'Efuse_Sense']))
self.register_utils.send_commands(commands)
commands = []
self.register.set_global_register_value('Conf_AddrEnable', 1)
commands.extend(self.register.get_commands('WrRegister', name=[
'Conf_AddrEnable']))
chip_sn_address = self.register.get_global_register_attributes(
'addresses', name='Chip_SN')
commands.extend(self.register.get_commands('RdRegister', addresses=
chip_sn_address))
self.register_utils.send_commands(commands)
data = self.read_data()
if data.shape[0] == 0:
logging.error('Chip S/N: No data')
return
read_values = []
for index, word in enumerate(np.nditer(data)):
fei4_data_word = FEI4Record(word, self.register.chip_flavor)
if fei4_data_word == 'AR':
fei4_next_data_word = FEI4Record(data[index + 1], self.register
.chip_flavor)
if fei4_next_data_word == 'VR':
read_value = fei4_next_data_word['value']
read_values.append(read_value)
if len(read_values) == 0:
logging.error('No Chip S/N was found')
elif len(read_values) == 1:
logging.info('Chip S/N: %d', read_values[0])
else:
logging.warning('Ambiguous Chip S/N: %s', read_values)
|
read_chip_sn
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n self.register_utils.send_commands(commands)\n with self.readout(fill_buffer=True, callback=None, errback=None):\n if self.register.fei4b:\n commands = []\n self.register.set_global_register_value('Efuse_Sense', 1)\n commands.extend(self.register.get_commands('WrRegister', name=[\n 'Efuse_Sense']))\n commands.extend(self.register.get_commands('GlobalPulse', Width=0))\n self.register.set_global_register_value('Efuse_Sense', 0)\n commands.extend(self.register.get_commands('WrRegister', name=[\n 'Efuse_Sense']))\n self.register_utils.send_commands(commands)\n commands = []\n self.register.set_global_register_value('Conf_AddrEnable', 1)\n commands.extend(self.register.get_commands('WrRegister', name=[\n 'Conf_AddrEnable']))\n chip_sn_address = self.register.get_global_register_attributes(\n 'addresses', name='Chip_SN')\n commands.extend(self.register.get_commands('RdRegister', addresses=\n chip_sn_address))\n self.register_utils.send_commands(commands)\n data = self.read_data()\n if data.shape[0] == 0:\n logging.error('Chip S/N: No data')\n return\n read_values = []\n for index, word in enumerate(np.nditer(data)):\n fei4_data_word = FEI4Record(word, self.register.chip_flavor)\n if fei4_data_word == 'AR':\n fei4_next_data_word = FEI4Record(data[index + 1], self.register\n .chip_flavor)\n if fei4_next_data_word == 'VR':\n read_value = fei4_next_data_word['value']\n read_values.append(read_value)\n if len(read_values) == 0:\n logging.error('No Chip S/N was found')\n elif len(read_values) == 1:\n logging.info('Chip S/N: %d', read_values[0])\n else:\n logging.warning('Ambiguous Chip S/N: %s', read_values)\n",
"role": "user"
},
{
"content": "read_chip_sn",
"role": "assistant"
}
] |
def x(self, name, overwrite_config=False):
self.register_utils.send_commands(self.register.get_commands('ConfMode'))
with self.readout(fill_buffer=True, callback=None, errback=None):
commands = []
commands.extend(self.register.get_commands('RdRegister', name=name))
self.register_utils.send_commands(commands)
data = self.read_data()
register_object = self.register.get_global_register_objects(name=[name])[0]
value = BitLogic(register_object['addresses'] * 16)
index = 0
vr_count = 0
for word in np.nditer(data):
fei4_data_word = FEI4Record(word, self.register.chip_flavor)
if fei4_data_word == 'AR':
address_value = fei4_data_word['address']
if address_value != register_object['address'] + index:
raise Exception(
'Unexpected address from Address Record: read: %d, expected: %d'
% (address_value, register_object['address'] + index))
elif fei4_data_word == 'VR':
vr_count += 1
if vr_count >= 2:
raise RuntimeError('Read more than 2 value records')
read_value = BitLogic.from_value(fei4_data_word['value'], size=16)
if register_object['register_littleendian']:
read_value.reverse()
value[index * 16 + 15:index * 16] = read_value
index += 1
value = value[register_object['bitlength'] + register_object['offset'] -
1:register_object['offset']]
if register_object['littleendian']:
value.reverse()
value = value.tovalue()
if overwrite_config:
self.register.set_global_register(name, value)
return value
|
read_global_register
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, name, overwrite_config=False):\n self.register_utils.send_commands(self.register.get_commands('ConfMode'))\n with self.readout(fill_buffer=True, callback=None, errback=None):\n commands = []\n commands.extend(self.register.get_commands('RdRegister', name=name))\n self.register_utils.send_commands(commands)\n data = self.read_data()\n register_object = self.register.get_global_register_objects(name=[name])[0]\n value = BitLogic(register_object['addresses'] * 16)\n index = 0\n vr_count = 0\n for word in np.nditer(data):\n fei4_data_word = FEI4Record(word, self.register.chip_flavor)\n if fei4_data_word == 'AR':\n address_value = fei4_data_word['address']\n if address_value != register_object['address'] + index:\n raise Exception(\n 'Unexpected address from Address Record: read: %d, expected: %d'\n % (address_value, register_object['address'] + index))\n elif fei4_data_word == 'VR':\n vr_count += 1\n if vr_count >= 2:\n raise RuntimeError('Read more than 2 value records')\n read_value = BitLogic.from_value(fei4_data_word['value'], size=16)\n if register_object['register_littleendian']:\n read_value.reverse()\n value[index * 16 + 15:index * 16] = read_value\n index += 1\n value = value[register_object['bitlength'] + register_object['offset'] -\n 1:register_object['offset']]\n if register_object['littleendian']:\n value.reverse()\n value = value.tovalue()\n if overwrite_config:\n self.register.set_global_register(name, value)\n return value\n",
"role": "user"
},
{
"content": "read_global_register",
"role": "assistant"
}
] |
def x(self, pix_regs=None, dcs=range(40), overwrite_config=False):
if pix_regs is None:
pix_regs = ['EnableDigInj', 'Imon', 'Enable', 'C_High', 'C_Low',
'TDAC', 'FDAC']
self.register_utils.send_commands(self.register.get_commands('ConfMode'))
result = []
for pix_reg in pix_regs:
pixel_data = np.ma.masked_array(np.zeros(shape=(80, 336), dtype=np.
uint32), mask=True)
for dc in dcs:
with self.readout(fill_buffer=True, callback=None, errback=None):
self.register_utils.send_commands(self.register.
get_commands('RdFrontEnd', name=[pix_reg], dcs=[dc]))
data = self.read_data()
interpret_pixel_data(data, dc, pixel_data, invert=False if
pix_reg == 'EnableDigInj' else True)
if overwrite_config:
self.register.set_pixel_register(pix_reg, pixel_data.data)
result.append(pixel_data)
return result
|
read_pixel_register
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, pix_regs=None, dcs=range(40), overwrite_config=False):\n if pix_regs is None:\n pix_regs = ['EnableDigInj', 'Imon', 'Enable', 'C_High', 'C_Low',\n 'TDAC', 'FDAC']\n self.register_utils.send_commands(self.register.get_commands('ConfMode'))\n result = []\n for pix_reg in pix_regs:\n pixel_data = np.ma.masked_array(np.zeros(shape=(80, 336), dtype=np.\n uint32), mask=True)\n for dc in dcs:\n with self.readout(fill_buffer=True, callback=None, errback=None):\n self.register_utils.send_commands(self.register.\n get_commands('RdFrontEnd', name=[pix_reg], dcs=[dc]))\n data = self.read_data()\n interpret_pixel_data(data, dc, pixel_data, invert=False if \n pix_reg == 'EnableDigInj' else True)\n if overwrite_config:\n self.register.set_pixel_register(pix_reg, pixel_data.data)\n result.append(pixel_data)\n return result\n",
"role": "user"
},
{
"content": "read_pixel_register",
"role": "assistant"
}
] |
def x(self):
with self.readout(fill_buffer=True, callback=None, errback=None):
commands = []
commands.extend(self.register.get_commands('ConfMode'))
commands.extend(self.register.get_commands('RdRegister', address=[1]))
self.register_utils.send_commands(commands)
data = self.read_data()
if len(data) != 0:
return True if FEI4Record(data[-1], self.register.chip_flavor
) == 'VR' else False
else:
return False
|
is_fe_ready
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n with self.readout(fill_buffer=True, callback=None, errback=None):\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n commands.extend(self.register.get_commands('RdRegister', address=[1]))\n self.register_utils.send_commands(commands)\n data = self.read_data()\n if len(data) != 0:\n return True if FEI4Record(data[-1], self.register.chip_flavor\n ) == 'VR' else False\n else:\n return False\n",
"role": "user"
},
{
"content": "is_fe_ready",
"role": "assistant"
}
] |
def x(mask):
inverted_mask = np.ones(shape=(80, 336), dtype=np.dtype('>u1'))
inverted_mask[mask >= 1] = 0
return inverted_mask
|
invert_pixel_mask
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(mask):\n inverted_mask = np.ones(shape=(80, 336), dtype=np.dtype('>u1'))\n inverted_mask[mask >= 1] = 0\n return inverted_mask\n",
"role": "user"
},
{
"content": "invert_pixel_mask",
"role": "assistant"
}
] |
def x(steps, shift, default=0, value=1, enable_columns=None, mask=None):
shape = 80, 336
mask_array = np.full(shape, default, dtype=np.uint8)
if enable_columns:
odd_columns = [(odd - 1) for odd in enable_columns if odd % 2 != 0]
even_columns = [(even - 1) for even in enable_columns if even % 2 == 0]
else:
odd_columns = range(0, 80, 2)
even_columns = range(1, 80, 2)
odd_rows = np.arange(shift % steps, 336, steps)
even_row_offset = (steps // 2 + shift) % steps
even_rows = np.arange(even_row_offset, 336, steps)
if odd_columns:
odd_col_rows = itertools.product(odd_columns, odd_rows)
for odd_col_row in odd_col_rows:
mask_array[odd_col_row[0], odd_col_row[1]] = value
if even_columns:
even_col_rows = itertools.product(even_columns, even_rows)
for even_col_row in even_col_rows:
mask_array[even_col_row[0], even_col_row[1]] = value
if mask is not None:
mask_array = np.ma.array(mask_array, mask=mask, fill_value=default)
mask_array = mask_array.filled()
return mask_array
|
make_pixel_mask
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(steps, shift, default=0, value=1, enable_columns=None, mask=None):\n shape = 80, 336\n mask_array = np.full(shape, default, dtype=np.uint8)\n if enable_columns:\n odd_columns = [(odd - 1) for odd in enable_columns if odd % 2 != 0]\n even_columns = [(even - 1) for even in enable_columns if even % 2 == 0]\n else:\n odd_columns = range(0, 80, 2)\n even_columns = range(1, 80, 2)\n odd_rows = np.arange(shift % steps, 336, steps)\n even_row_offset = (steps // 2 + shift) % steps\n even_rows = np.arange(even_row_offset, 336, steps)\n if odd_columns:\n odd_col_rows = itertools.product(odd_columns, odd_rows)\n for odd_col_row in odd_col_rows:\n mask_array[odd_col_row[0], odd_col_row[1]] = value\n if even_columns:\n even_col_rows = itertools.product(even_columns, even_rows)\n for even_col_row in even_col_rows:\n mask_array[even_col_row[0], even_col_row[1]] = value\n if mask is not None:\n mask_array = np.ma.array(mask_array, mask=mask, fill_value=default)\n mask_array = mask_array.filled()\n return mask_array\n",
"role": "user"
},
{
"content": "make_pixel_mask",
"role": "assistant"
}
] |
def x(column, row, default=0, value=1):
col_array = np.array(column) - 1
row_array = np.array(row) - 1
if np.any(col_array >= 80) or np.any(col_array < 0) or np.any(row_array >=
336) or np.any(row_array < 0):
raise ValueError('Column and/or row out of range')
shape = 80, 336
mask = np.full(shape, default, dtype=np.uint8)
mask[col_array, row_array] = value
return mask
|
make_pixel_mask_from_col_row
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(column, row, default=0, value=1):\n col_array = np.array(column) - 1\n row_array = np.array(row) - 1\n if np.any(col_array >= 80) or np.any(col_array < 0) or np.any(row_array >=\n 336) or np.any(row_array < 0):\n raise ValueError('Column and/or row out of range')\n shape = 80, 336\n mask = np.full(shape, default, dtype=np.uint8)\n mask[col_array, row_array] = value\n return mask\n",
"role": "user"
},
{
"content": "make_pixel_mask_from_col_row",
"role": "assistant"
}
] |
def x(column, row, default=0, value=1):
col_array = np.array(column) - 1
row_array = np.array(row) - 1
if np.any(col_array >= 80) or np.any(col_array < 0) or np.any(row_array >=
336) or np.any(row_array < 0):
raise ValueError('Column and/or row out of range')
shape = 80, 336
mask = np.full(shape, default, dtype=np.uint8)
if column and row:
mask[col_array.min():col_array.max() + 1, row_array.min():row_array
.max() + 1] = value
return mask
|
make_box_pixel_mask_from_col_row
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(column, row, default=0, value=1):\n col_array = np.array(column) - 1\n row_array = np.array(row) - 1\n if np.any(col_array >= 80) or np.any(col_array < 0) or np.any(row_array >=\n 336) or np.any(row_array < 0):\n raise ValueError('Column and/or row out of range')\n shape = 80, 336\n mask = np.full(shape, default, dtype=np.uint8)\n if column and row:\n mask[col_array.min():col_array.max() + 1, row_array.min():row_array\n .max() + 1] = value\n return mask\n",
"role": "user"
},
{
"content": "make_box_pixel_mask_from_col_row",
"role": "assistant"
}
] |
def x(mask):
col, row = mask.nonzero()
row_plus_one = row + 1
del_index = np.where(row_plus_one > 335)
row_plus_one = np.delete(row_plus_one, del_index)
col_plus_one = np.delete(col.copy(), del_index)
row_minus_one = row - 1
del_index = np.where(row_minus_one > 335)
row_minus_one = np.delete(row_minus_one, del_index)
col_minus_one = np.delete(col.copy(), del_index)
col = np.concatenate((col_plus_one, col_minus_one))
row = np.concatenate((row_plus_one, row_minus_one))
return make_pixel_mask_from_col_row(col + 1, row + 1)
|
make_xtalk_mask
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(mask):\n col, row = mask.nonzero()\n row_plus_one = row + 1\n del_index = np.where(row_plus_one > 335)\n row_plus_one = np.delete(row_plus_one, del_index)\n col_plus_one = np.delete(col.copy(), del_index)\n row_minus_one = row - 1\n del_index = np.where(row_minus_one > 335)\n row_minus_one = np.delete(row_minus_one, del_index)\n col_minus_one = np.delete(col.copy(), del_index)\n col = np.concatenate((col_plus_one, col_minus_one))\n row = np.concatenate((row_plus_one, row_minus_one))\n return make_pixel_mask_from_col_row(col + 1, row + 1)\n",
"role": "user"
},
{
"content": "make_xtalk_mask",
"role": "assistant"
}
] |
def x(column_distance, row_distance, column_offset=0, row_offset=0, default
=0, value=1):
col_shape = 336,
col = np.full(col_shape, fill_value=default, dtype=np.uint8)
col[::row_distance] = value
shape = 80, 336
chessboard_mask = np.full(shape, fill_value=default, dtype=np.uint8)
chessboard_mask[column_offset::column_distance * 2] = np.roll(col,
row_offset)
chessboard_mask[column_distance + column_offset::column_distance * 2
] = np.roll(col, row_distance / 2 + row_offset)
return chessboard_mask
|
make_checkerboard_mask
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(column_distance, row_distance, column_offset=0, row_offset=0, default\n =0, value=1):\n col_shape = 336,\n col = np.full(col_shape, fill_value=default, dtype=np.uint8)\n col[::row_distance] = value\n shape = 80, 336\n chessboard_mask = np.full(shape, fill_value=default, dtype=np.uint8)\n chessboard_mask[column_offset::column_distance * 2] = np.roll(col,\n row_offset)\n chessboard_mask[column_distance + column_offset::column_distance * 2\n ] = np.roll(col, row_distance / 2 + row_offset)\n return chessboard_mask\n",
"role": "user"
},
{
"content": "make_checkerboard_mask",
"role": "assistant"
}
] |
def x(self):
logging.info('Resetting Service Records')
commands = []
commands.extend(self.register.get_commands('ConfMode'))
self.register.set_global_register_value('ReadErrorReq', 1)
commands.extend(self.register.get_commands('WrRegister', name=[
'ReadErrorReq']))
commands.extend(self.register.get_commands('GlobalPulse', Width=0))
self.register.set_global_register_value('ReadErrorReq', 0)
commands.extend(self.register.get_commands('WrRegister', name=[
'ReadErrorReq']))
commands.extend(self.register.get_commands('RunMode'))
commands.extend(self.register.get_commands('ConfMode'))
self.send_commands(commands)
|
reset_service_records
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n logging.info('Resetting Service Records')\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n self.register.set_global_register_value('ReadErrorReq', 1)\n commands.extend(self.register.get_commands('WrRegister', name=[\n 'ReadErrorReq']))\n commands.extend(self.register.get_commands('GlobalPulse', Width=0))\n self.register.set_global_register_value('ReadErrorReq', 0)\n commands.extend(self.register.get_commands('WrRegister', name=[\n 'ReadErrorReq']))\n commands.extend(self.register.get_commands('RunMode'))\n commands.extend(self.register.get_commands('ConfMode'))\n self.send_commands(commands)\n",
"role": "user"
},
{
"content": "reset_service_records",
"role": "assistant"
}
] |
def x(self):
logging.info('Resetting Bunch Counter')
commands = []
commands.extend(self.register.get_commands('RunMode'))
commands.extend(self.register.get_commands('BCR'))
self.send_commands(commands)
time.sleep(0.1)
commands = []
commands.extend(self.register.get_commands('ConfMode'))
self.send_commands(commands)
|
reset_bunch_counter
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n logging.info('Resetting Bunch Counter')\n commands = []\n commands.extend(self.register.get_commands('RunMode'))\n commands.extend(self.register.get_commands('BCR'))\n self.send_commands(commands)\n time.sleep(0.1)\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n self.send_commands(commands)\n",
"role": "user"
},
{
"content": "reset_bunch_counter",
"role": "assistant"
}
] |
def x(hist):
masked_array = np.ma.masked_values(hist, 0)
masked_array = np.ma.masked_greater(masked_array, 10 * np.ma.median(hist))
logging.info('Masking %d pixel(s)', np.ma.count_masked(masked_array))
return np.ma.getmaskarray(masked_array)
|
generate_threshold_mask
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hist):\n masked_array = np.ma.masked_values(hist, 0)\n masked_array = np.ma.masked_greater(masked_array, 10 * np.ma.median(hist))\n logging.info('Masking %d pixel(s)', np.ma.count_masked(masked_array))\n return np.ma.getmaskarray(masked_array)\n",
"role": "user"
},
{
"content": "generate_threshold_mask",
"role": "assistant"
}
] |
def x(array, use_columns=None, selected_columns_only=False):
if array.dtype.names is None:
if use_columns is not None:
a_cut = array[:, use_columns]
else:
a_cut = array
if len(use_columns) > 1:
b = np.ascontiguousarray(a_cut).view(np.dtype((np.void, a_cut.
dtype.itemsize * a_cut.shape[1])))
else:
b = np.ascontiguousarray(a_cut)
_, index = np.unique(b, return_index=True)
if not selected_columns_only:
return array[np.sort(index)]
else:
return a_cut[np.sort(index)]
else:
names = list(array.dtype.names)
if use_columns is not None:
new_names = [names[i] for i in use_columns]
else:
new_names = names
a_cut, index = np.unique(array[new_names], return_index=True)
if not selected_columns_only:
return array[np.sort(index)]
else:
return array[np.sort(index)][new_names]
|
unique_row
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(array, use_columns=None, selected_columns_only=False):\n if array.dtype.names is None:\n if use_columns is not None:\n a_cut = array[:, use_columns]\n else:\n a_cut = array\n if len(use_columns) > 1:\n b = np.ascontiguousarray(a_cut).view(np.dtype((np.void, a_cut.\n dtype.itemsize * a_cut.shape[1])))\n else:\n b = np.ascontiguousarray(a_cut)\n _, index = np.unique(b, return_index=True)\n if not selected_columns_only:\n return array[np.sort(index)]\n else:\n return a_cut[np.sort(index)]\n else:\n names = list(array.dtype.names)\n if use_columns is not None:\n new_names = [names[i] for i in use_columns]\n else:\n new_names = names\n a_cut, index = np.unique(array[new_names], return_index=True)\n if not selected_columns_only:\n return array[np.sort(index)]\n else:\n return array[np.sort(index)][new_names]\n",
"role": "user"
},
{
"content": "unique_row",
"role": "assistant"
}
] |
def x(arr, append_last=True):
right = arr[1:]
if append_last:
left = arr[:]
right = np.append(right, None)
else:
left = arr[:-1]
return np.column_stack((left, right))
|
get_ranges_from_array
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(arr, append_last=True):\n right = arr[1:]\n if append_last:\n left = arr[:]\n right = np.append(right, None)\n else:\n left = arr[:-1]\n return np.column_stack((left, right))\n",
"role": "user"
},
{
"content": "get_ranges_from_array",
"role": "assistant"
}
] |
def x(ar1, ar2):
if ar1.shape[0] == 0 or ar2.shape[0] == 0:
return []
inds = ar2.searchsorted(ar1)
inds[inds == len(ar2)] = 0
return ar2[inds] == ar1
|
in1d_sorted
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(ar1, ar2):\n if ar1.shape[0] == 0 or ar2.shape[0] == 0:\n return []\n inds = ar2.searchsorted(ar1)\n inds[inds == len(ar2)] = 0\n return ar2[inds] == ar1\n",
"role": "user"
},
{
"content": "in1d_sorted",
"role": "assistant"
}
] |
def x(x, y):
if len(x) != len(y):
raise ValueError('x, y must have the same length')
z1 = np.hstack((y[0], y[:-1]))
z2 = np.hstack((y[1:], y[-1]))
dx1 = np.hstack((0, np.diff(x)))
dx2 = np.hstack((np.diff(x), 0))
return (z2 - z1) / (dx2 + dx1)
|
central_difference
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(x, y):\n if len(x) != len(y):\n raise ValueError('x, y must have the same length')\n z1 = np.hstack((y[0], y[:-1]))\n z2 = np.hstack((y[1:], y[-1]))\n dx1 = np.hstack((0, np.diff(x)))\n dx2 = np.hstack((np.diff(x), 0))\n return (z2 - z1) / (dx2 + dx1)\n",
"role": "user"
},
{
"content": "central_difference",
"role": "assistant"
}
] |
def x(x, y, n_bins=100):
if len(x) != len(y):
raise ValueError('x and y dimensions have to be the same')
y = y.astype(np.float32)
n, bin_edges = np.histogram(x, bins=n_bins)
sy = np.histogram(x, bins=n_bins, weights=y)[0]
sy2 = np.histogram(x, bins=n_bins, weights=y * y)[0]
bin_centers = (bin_edges[1:] + bin_edges[:-1]) / 2
mean = sy / n
std = np.sqrt(sy2 / n - mean * mean)
std_mean = std / np.sqrt(n - 1)
mean[np.isnan(mean)] = 0.0
std_mean[np.isnan(std_mean)] = 0.0
return bin_centers, mean, std_mean
|
get_profile_histogram
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(x, y, n_bins=100):\n if len(x) != len(y):\n raise ValueError('x and y dimensions have to be the same')\n y = y.astype(np.float32)\n n, bin_edges = np.histogram(x, bins=n_bins)\n sy = np.histogram(x, bins=n_bins, weights=y)[0]\n sy2 = np.histogram(x, bins=n_bins, weights=y * y)[0]\n bin_centers = (bin_edges[1:] + bin_edges[:-1]) / 2\n mean = sy / n\n std = np.sqrt(sy2 / n - mean * mean)\n std_mean = std / np.sqrt(n - 1)\n mean[np.isnan(mean)] = 0.0\n std_mean[np.isnan(std_mean)] = 0.0\n return bin_centers, mean, std_mean\n",
"role": "user"
},
{
"content": "get_profile_histogram",
"role": "assistant"
}
] |
def x(hit_file, parameter, reference='event', cluster_file=None, plot=False,
chunk_size=500000):
logging.info('Calculate the rate normalization')
with tb.open_file(hit_file, mode='r+') as in_hit_file_h5:
meta_data = in_hit_file_h5.root.meta_data[:]
scan_parameter = get_scan_parameter(meta_data)[parameter]
event_numbers = get_meta_data_at_scan_parameter(meta_data, parameter)[
'event_number']
event_range = get_ranges_from_array(event_numbers)
normalization_rate = []
normalization_multiplicity = []
try:
event_range[-1, 1] = in_hit_file_h5.root.Hits[-1]['event_number'
] + 1
except tb.NoSuchNodeError:
logging.error('Cannot find hits table')
return
if reference == 'event':
n_events = event_range[:, 1] - event_range[:, 0]
normalization_rate.extend(n_events)
elif reference == 'time':
time_start = get_meta_data_at_scan_parameter(meta_data, parameter)[
'timestamp_start']
time_spend = np.diff(time_start)
time_spend = np.append(time_spend, meta_data[-1][
'timestamp_stop'] - time_start[-1])
normalization_rate.extend(time_spend)
else:
raise NotImplementedError('The normalization reference ' +
reference + ' is not implemented')
if cluster_file:
cluster_table = in_hit_file_h5.root.Cluster
index_event_number(cluster_table)
index = 0
best_chunk_size = chunk_size
total_cluster = 0
progress_bar = progressbar.ProgressBar(widgets=['', progressbar
.Percentage(), ' ', progressbar.Bar(marker='*', left='|',
right='|'), ' ', progressbar.AdaptiveETA()], maxval=
cluster_table.shape[0], term_width=80)
progress_bar.start()
for start_event, stop_event in event_range:
readout_cluster_len = 0
n_cluster_per_event = None
for clusters, index in data_aligned_at_events(cluster_table,
start_event_number=start_event, stop_event_number=
stop_event, start_index=index, chunk_size=best_chunk_size):
if n_cluster_per_event is None:
n_cluster_per_event = (analysis_utils.
get_n_cluster_in_events(clusters['event_number'
])[:, 1])
else:
n_cluster_per_event = np.append(n_cluster_per_event,
analysis_utils.get_n_cluster_in_events(clusters
['event_number'])[:, 1])
readout_cluster_len += clusters.shape[0]
total_cluster += clusters.shape[0]
progress_bar.update(index)
best_chunk_size = int(1.5 * readout_cluster_len) if int(
1.05 * readout_cluster_len) < chunk_size else chunk_size
normalization_multiplicity.append(np.mean(n_cluster_per_event))
progress_bar.finish()
if total_cluster != cluster_table.shape[0]:
logging.warning(
'Analysis shows inconsistent number of cluster (%d != %d). Check needed!'
, total_cluster, cluster_table.shape[0])
if plot:
x = scan_parameter
if reference == 'event':
plotting.plot_scatter(x, normalization_rate, title=
'Events per ' + parameter + ' setting', x_label=parameter,
y_label='# events', log_x=True, filename=os.path.splitext(
hit_file)[0] + '_n_event_normalization.pdf')
elif reference == 'time':
plotting.plot_scatter(x, normalization_rate, title=
'Measuring time per GDAC setting', x_label=parameter,
y_label='time [s]', log_x=True, filename=os.path.splitext(
hit_file)[0] + '_time_normalization.pdf')
if cluster_file:
plotting.plot_scatter(x, normalization_multiplicity, title=
'Mean number of particles per event', x_label=parameter,
y_label='number of hits per event', log_x=True, filename=os
.path.splitext(hit_file)[0] + '_n_particles_normalization.pdf')
if cluster_file:
normalization_rate = np.array(normalization_rate)
normalization_multiplicity = np.array(normalization_multiplicity)
return np.amax(normalization_rate * normalization_multiplicity).astype(
'f16') / (normalization_rate * normalization_multiplicity)
return np.amax(np.array(normalization_rate)).astype('f16') / np.array(
normalization_rate)
|
get_rate_normalization
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hit_file, parameter, reference='event', cluster_file=None, plot=False,\n chunk_size=500000):\n logging.info('Calculate the rate normalization')\n with tb.open_file(hit_file, mode='r+') as in_hit_file_h5:\n meta_data = in_hit_file_h5.root.meta_data[:]\n scan_parameter = get_scan_parameter(meta_data)[parameter]\n event_numbers = get_meta_data_at_scan_parameter(meta_data, parameter)[\n 'event_number']\n event_range = get_ranges_from_array(event_numbers)\n normalization_rate = []\n normalization_multiplicity = []\n try:\n event_range[-1, 1] = in_hit_file_h5.root.Hits[-1]['event_number'\n ] + 1\n except tb.NoSuchNodeError:\n logging.error('Cannot find hits table')\n return\n if reference == 'event':\n n_events = event_range[:, 1] - event_range[:, 0]\n normalization_rate.extend(n_events)\n elif reference == 'time':\n time_start = get_meta_data_at_scan_parameter(meta_data, parameter)[\n 'timestamp_start']\n time_spend = np.diff(time_start)\n time_spend = np.append(time_spend, meta_data[-1][\n 'timestamp_stop'] - time_start[-1])\n normalization_rate.extend(time_spend)\n else:\n raise NotImplementedError('The normalization reference ' +\n reference + ' is not implemented')\n if cluster_file:\n cluster_table = in_hit_file_h5.root.Cluster\n index_event_number(cluster_table)\n index = 0\n best_chunk_size = chunk_size\n total_cluster = 0\n progress_bar = progressbar.ProgressBar(widgets=['', progressbar\n .Percentage(), ' ', progressbar.Bar(marker='*', left='|',\n right='|'), ' ', progressbar.AdaptiveETA()], maxval=\n cluster_table.shape[0], term_width=80)\n progress_bar.start()\n for start_event, stop_event in event_range:\n readout_cluster_len = 0\n n_cluster_per_event = None\n for clusters, index in data_aligned_at_events(cluster_table,\n start_event_number=start_event, stop_event_number=\n stop_event, start_index=index, chunk_size=best_chunk_size):\n if n_cluster_per_event is None:\n n_cluster_per_event = (analysis_utils.\n get_n_cluster_in_events(clusters['event_number'\n ])[:, 1])\n else:\n n_cluster_per_event = np.append(n_cluster_per_event,\n analysis_utils.get_n_cluster_in_events(clusters\n ['event_number'])[:, 1])\n readout_cluster_len += clusters.shape[0]\n total_cluster += clusters.shape[0]\n progress_bar.update(index)\n best_chunk_size = int(1.5 * readout_cluster_len) if int(\n 1.05 * readout_cluster_len) < chunk_size else chunk_size\n normalization_multiplicity.append(np.mean(n_cluster_per_event))\n progress_bar.finish()\n if total_cluster != cluster_table.shape[0]:\n logging.warning(\n 'Analysis shows inconsistent number of cluster (%d != %d). Check needed!'\n , total_cluster, cluster_table.shape[0])\n if plot:\n x = scan_parameter\n if reference == 'event':\n plotting.plot_scatter(x, normalization_rate, title=\n 'Events per ' + parameter + ' setting', x_label=parameter,\n y_label='# events', log_x=True, filename=os.path.splitext(\n hit_file)[0] + '_n_event_normalization.pdf')\n elif reference == 'time':\n plotting.plot_scatter(x, normalization_rate, title=\n 'Measuring time per GDAC setting', x_label=parameter,\n y_label='time [s]', log_x=True, filename=os.path.splitext(\n hit_file)[0] + '_time_normalization.pdf')\n if cluster_file:\n plotting.plot_scatter(x, normalization_multiplicity, title=\n 'Mean number of particles per event', x_label=parameter,\n y_label='number of hits per event', log_x=True, filename=os\n .path.splitext(hit_file)[0] + '_n_particles_normalization.pdf')\n if cluster_file:\n normalization_rate = np.array(normalization_rate)\n normalization_multiplicity = np.array(normalization_multiplicity)\n return np.amax(normalization_rate * normalization_multiplicity).astype(\n 'f16') / (normalization_rate * normalization_multiplicity)\n return np.amax(np.array(normalization_rate)).astype('f16') / np.array(\n normalization_rate)\n",
"role": "user"
},
{
"content": "get_rate_normalization",
"role": "assistant"
}
] |
def x(files, parameters=None, unique=False, sort=True):
logging.debug('Get the parameter: ' + str(parameters) +
' values from the file names of ' + str(len(files)) + ' files')
files_dict = collections.OrderedDict()
if parameters is None:
return files_dict
if isinstance(parameters, basestring):
parameters = parameters,
search_string = '_'.join(parameters)
for _ in parameters:
search_string += '_(-?\\d+)'
result = {}
for one_file in files:
parameter_values = re.findall(search_string, one_file)
if parameter_values:
if isinstance(parameter_values[0], tuple):
parameter_values = list(reduce(lambda t1, t2: t1 + t2,
parameter_values))
parameter_values = [[int(i)] for i in parameter_values]
files_dict[one_file] = dict(zip(parameters, parameter_values))
if unique:
for key, value in files_dict.items():
if value not in result.values():
result[key] = value
else:
result[one_file] = files_dict[one_file]
return collections.OrderedDict(sorted(result.iteritems(), key=
itemgetter(1)) if sort else files_dict)
|
get_parameter_value_from_file_names
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(files, parameters=None, unique=False, sort=True):\n logging.debug('Get the parameter: ' + str(parameters) +\n ' values from the file names of ' + str(len(files)) + ' files')\n files_dict = collections.OrderedDict()\n if parameters is None:\n return files_dict\n if isinstance(parameters, basestring):\n parameters = parameters,\n search_string = '_'.join(parameters)\n for _ in parameters:\n search_string += '_(-?\\\\d+)'\n result = {}\n for one_file in files:\n parameter_values = re.findall(search_string, one_file)\n if parameter_values:\n if isinstance(parameter_values[0], tuple):\n parameter_values = list(reduce(lambda t1, t2: t1 + t2,\n parameter_values))\n parameter_values = [[int(i)] for i in parameter_values]\n files_dict[one_file] = dict(zip(parameters, parameter_values))\n if unique:\n for key, value in files_dict.items():\n if value not in result.values():\n result[key] = value\n else:\n result[one_file] = files_dict[one_file]\n return collections.OrderedDict(sorted(result.iteritems(), key=\n itemgetter(1)) if sort else files_dict)\n",
"role": "user"
},
{
"content": "get_parameter_value_from_file_names",
"role": "assistant"
}
] |
def x(scan_base, filter_str=['_analyzed.h5', '_interpreted.h5', '_cut.h5',
'_result.h5', '_hists.h5'], sort_by_time=True, meta_data_v2=True):
data_files = []
if scan_base is None:
return data_files
if isinstance(scan_base, basestring):
scan_base = [scan_base]
for scan_base_str in scan_base:
if '.h5' == os.path.splitext(scan_base_str)[1]:
data_files.append(scan_base_str)
else:
data_files.extend(glob.glob(scan_base_str + '*.h5'))
if filter_str:
if isinstance(filter_str, basestring):
filter_str = [filter_str]
data_files = filter(lambda data_file: not any([(True if x in
data_file else False) for x in filter_str]), data_files)
if sort_by_time and len(data_files) > 1:
f_list = {}
for data_file in data_files:
with tb.open_file(data_file, mode='r') as h5_file:
try:
meta_data = h5_file.root.meta_data
except tb.NoSuchNodeError:
logging.warning('File %s is missing meta_data' %
h5_file.filename)
else:
try:
if meta_data_v2:
timestamp = meta_data[0]['timestamp_start']
else:
timestamp = meta_data[0]['timestamp']
except IndexError:
logging.info('File %s has empty meta_data' %
h5_file.filename)
else:
f_list[data_file] = timestamp
data_files = list(sorted(f_list, key=f_list.__getitem__, reverse=False)
)
return data_files
|
get_data_file_names_from_scan_base
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(scan_base, filter_str=['_analyzed.h5', '_interpreted.h5', '_cut.h5',\n '_result.h5', '_hists.h5'], sort_by_time=True, meta_data_v2=True):\n data_files = []\n if scan_base is None:\n return data_files\n if isinstance(scan_base, basestring):\n scan_base = [scan_base]\n for scan_base_str in scan_base:\n if '.h5' == os.path.splitext(scan_base_str)[1]:\n data_files.append(scan_base_str)\n else:\n data_files.extend(glob.glob(scan_base_str + '*.h5'))\n if filter_str:\n if isinstance(filter_str, basestring):\n filter_str = [filter_str]\n data_files = filter(lambda data_file: not any([(True if x in\n data_file else False) for x in filter_str]), data_files)\n if sort_by_time and len(data_files) > 1:\n f_list = {}\n for data_file in data_files:\n with tb.open_file(data_file, mode='r') as h5_file:\n try:\n meta_data = h5_file.root.meta_data\n except tb.NoSuchNodeError:\n logging.warning('File %s is missing meta_data' %\n h5_file.filename)\n else:\n try:\n if meta_data_v2:\n timestamp = meta_data[0]['timestamp_start']\n else:\n timestamp = meta_data[0]['timestamp']\n except IndexError:\n logging.info('File %s has empty meta_data' %\n h5_file.filename)\n else:\n f_list[data_file] = timestamp\n data_files = list(sorted(f_list, key=f_list.__getitem__, reverse=False)\n )\n return data_files\n",
"role": "user"
},
{
"content": "get_data_file_names_from_scan_base",
"role": "assistant"
}
] |
def x(files, parameters=None, unique=False, sort=True):
logging.debug('Get the parameter ' + str(parameters) + ' values from ' +
str(len(files)) + ' files')
files_dict = collections.OrderedDict()
if isinstance(files, basestring):
files = files,
if isinstance(parameters, basestring):
parameters = parameters,
parameter_values_from_file_names_dict = (
get_parameter_value_from_file_names(files, parameters, unique=
unique, sort=sort))
for file_name in files:
with tb.open_file(file_name, mode='r') as in_file_h5:
scan_parameter_values = collections.OrderedDict()
try:
scan_parameters = in_file_h5.root.scan_parameters[:]
if parameters is None:
parameters = get_scan_parameter_names(scan_parameters)
for parameter in parameters:
try:
scan_parameter_values[parameter] = np.unique(
scan_parameters[parameter]).tolist()
except ValueError:
pass
except tb.NoSuchNodeError:
try:
scan_parameters = get_scan_parameter(in_file_h5.root.
meta_data[:])
if scan_parameters:
try:
scan_parameter_values = np.unique(scan_parameters
[parameters]).tolist()
except ValueError:
pass
except tb.NoSuchNodeError:
pass
if not scan_parameter_values:
try:
scan_parameter_values = (
parameter_values_from_file_names_dict[file_name])
except KeyError:
scan_parameter_values = None
else:
try:
for key, value in scan_parameter_values.items():
if value and value[0
] != parameter_values_from_file_names_dict[
file_name][key][0]:
logging.warning(
'Parameter values in the file name and in the file differ. Take '
+ str(key) + ' parameters ' + str(value) +
' found in %s.', file_name)
except KeyError:
pass
except IndexError:
raise IncompleteInputError('Something wrong check!')
if unique and scan_parameter_values is not None:
existing = False
for parameter in scan_parameter_values:
all_par_values = [values[parameter] for values in
files_dict.values()]
if any(x in [scan_parameter_values[parameter]] for x in
all_par_values):
existing = True
break
if not existing:
files_dict[file_name] = scan_parameter_values
else:
logging.warning(
'Scan parameter value(s) from %s exists already, do not add to result'
, file_name)
else:
files_dict[file_name] = scan_parameter_values
return collections.OrderedDict(sorted(files_dict.iteritems(), key=
itemgetter(1)) if sort else files_dict)
|
get_parameter_from_files
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(files, parameters=None, unique=False, sort=True):\n logging.debug('Get the parameter ' + str(parameters) + ' values from ' +\n str(len(files)) + ' files')\n files_dict = collections.OrderedDict()\n if isinstance(files, basestring):\n files = files,\n if isinstance(parameters, basestring):\n parameters = parameters,\n parameter_values_from_file_names_dict = (\n get_parameter_value_from_file_names(files, parameters, unique=\n unique, sort=sort))\n for file_name in files:\n with tb.open_file(file_name, mode='r') as in_file_h5:\n scan_parameter_values = collections.OrderedDict()\n try:\n scan_parameters = in_file_h5.root.scan_parameters[:]\n if parameters is None:\n parameters = get_scan_parameter_names(scan_parameters)\n for parameter in parameters:\n try:\n scan_parameter_values[parameter] = np.unique(\n scan_parameters[parameter]).tolist()\n except ValueError:\n pass\n except tb.NoSuchNodeError:\n try:\n scan_parameters = get_scan_parameter(in_file_h5.root.\n meta_data[:])\n if scan_parameters:\n try:\n scan_parameter_values = np.unique(scan_parameters\n [parameters]).tolist()\n except ValueError:\n pass\n except tb.NoSuchNodeError:\n pass\n if not scan_parameter_values:\n try:\n scan_parameter_values = (\n parameter_values_from_file_names_dict[file_name])\n except KeyError:\n scan_parameter_values = None\n else:\n try:\n for key, value in scan_parameter_values.items():\n if value and value[0\n ] != parameter_values_from_file_names_dict[\n file_name][key][0]:\n logging.warning(\n 'Parameter values in the file name and in the file differ. Take '\n + str(key) + ' parameters ' + str(value) +\n ' found in %s.', file_name)\n except KeyError:\n pass\n except IndexError:\n raise IncompleteInputError('Something wrong check!')\n if unique and scan_parameter_values is not None:\n existing = False\n for parameter in scan_parameter_values:\n all_par_values = [values[parameter] for values in\n files_dict.values()]\n if any(x in [scan_parameter_values[parameter]] for x in\n all_par_values):\n existing = True\n break\n if not existing:\n files_dict[file_name] = scan_parameter_values\n else:\n logging.warning(\n 'Scan parameter value(s) from %s exists already, do not add to result'\n , file_name)\n else:\n files_dict[file_name] = scan_parameter_values\n return collections.OrderedDict(sorted(files_dict.iteritems(), key=\n itemgetter(1)) if sort else files_dict)\n",
"role": "user"
},
{
"content": "get_parameter_from_files",
"role": "assistant"
}
] |
def x(files_dict):
try:
parameter_names = files_dict.itervalues().next().keys()
except AttributeError:
if any(i is not None for i in files_dict.itervalues()):
return False
else:
return True
if any(parameter_names != i.keys() for i in files_dict.itervalues()):
return False
return True
|
check_parameter_similarity
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(files_dict):\n try:\n parameter_names = files_dict.itervalues().next().keys()\n except AttributeError:\n if any(i is not None for i in files_dict.itervalues()):\n return False\n else:\n return True\n if any(parameter_names != i.keys() for i in files_dict.itervalues()):\n return False\n return True\n",
"role": "user"
},
{
"content": "check_parameter_similarity",
"role": "assistant"
}
] |
def x(files_dict, meta_data_v2=True):
if len(files_dict) > 10:
logging.info('Combine the meta data from %d files', len(files_dict))
total_length = 0
for file_name in files_dict.iterkeys():
with tb.open_file(file_name, mode='r') as in_file_h5:
total_length += in_file_h5.root.meta_data.shape[0]
if meta_data_v2:
meta_data_combined = np.empty((total_length,), dtype=[(
'index_start', np.uint32), ('index_stop', np.uint32), (
'data_length', np.uint32), ('timestamp_start', np.float64), (
'timestamp_stop', np.float64), ('error', np.uint32)])
else:
meta_data_combined = np.empty((total_length,), dtype=[(
'start_index', np.uint32), ('stop_index', np.uint32), ('length',
np.uint32), ('timestamp', np.float64), ('error', np.uint32)])
if len(files_dict) > 10:
progress_bar = progressbar.ProgressBar(widgets=['', progressbar.
Percentage(), ' ', progressbar.Bar(marker='*', left='|', right=
'|'), ' ', progressbar.AdaptiveETA()], maxval=total_length,
term_width=80)
progress_bar.start()
index = 0
for file_name in files_dict.iterkeys():
with tb.open_file(file_name, mode='r') as in_file_h5:
array_length = in_file_h5.root.meta_data.shape[0]
meta_data_combined[index:index + array_length
] = in_file_h5.root.meta_data[:]
index += array_length
if len(files_dict) > 10:
progress_bar.update(index)
if len(files_dict) > 10:
progress_bar.finish()
return meta_data_combined
|
combine_meta_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(files_dict, meta_data_v2=True):\n if len(files_dict) > 10:\n logging.info('Combine the meta data from %d files', len(files_dict))\n total_length = 0\n for file_name in files_dict.iterkeys():\n with tb.open_file(file_name, mode='r') as in_file_h5:\n total_length += in_file_h5.root.meta_data.shape[0]\n if meta_data_v2:\n meta_data_combined = np.empty((total_length,), dtype=[(\n 'index_start', np.uint32), ('index_stop', np.uint32), (\n 'data_length', np.uint32), ('timestamp_start', np.float64), (\n 'timestamp_stop', np.float64), ('error', np.uint32)])\n else:\n meta_data_combined = np.empty((total_length,), dtype=[(\n 'start_index', np.uint32), ('stop_index', np.uint32), ('length',\n np.uint32), ('timestamp', np.float64), ('error', np.uint32)])\n if len(files_dict) > 10:\n progress_bar = progressbar.ProgressBar(widgets=['', progressbar.\n Percentage(), ' ', progressbar.Bar(marker='*', left='|', right=\n '|'), ' ', progressbar.AdaptiveETA()], maxval=total_length,\n term_width=80)\n progress_bar.start()\n index = 0\n for file_name in files_dict.iterkeys():\n with tb.open_file(file_name, mode='r') as in_file_h5:\n array_length = in_file_h5.root.meta_data.shape[0]\n meta_data_combined[index:index + array_length\n ] = in_file_h5.root.meta_data[:]\n index += array_length\n if len(files_dict) > 10:\n progress_bar.update(index)\n if len(files_dict) > 10:\n progress_bar.finish()\n return meta_data_combined\n",
"role": "user"
},
{
"content": "combine_meta_data",
"role": "assistant"
}
] |
def x(x, y, weigths=None, order=5, smoothness=3, derivation=1):
if len(x) != len(y):
raise ValueError('x, y must have the same length')
f = splrep(x, y, w=weigths, k=order, s=smoothness)
return splev(x, f, der=derivation)
|
smooth_differentiation
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(x, y, weigths=None, order=5, smoothness=3, derivation=1):\n if len(x) != len(y):\n raise ValueError('x, y must have the same length')\n f = splrep(x, y, w=weigths, k=order, s=smoothness)\n return splev(x, f, der=derivation)\n",
"role": "user"
},
{
"content": "smooth_differentiation",
"role": "assistant"
}
] |
def x(ar1, ar2):
ar1 = np.asarray(ar1).ravel()
ar2 = np.asarray(ar2).ravel()
ar1_biggest_value = ar1[-1]
ar1_smallest_value = ar1[0]
ar2_biggest_value = ar2[-1]
ar2_smallest_value = ar2[0]
if (ar1_biggest_value < ar2_smallest_value or ar1_smallest_value >
ar2_biggest_value):
return ar1[0:0], ar2[0:0]
min_index_ar1 = np.argmin(ar1 < ar2_smallest_value)
max_index_ar1 = np.argmax(ar1 > ar2_biggest_value)
min_index_ar2 = np.argmin(ar2 < ar1_smallest_value)
max_index_ar2 = np.argmax(ar2 > ar1_biggest_value)
if min_index_ar1 < 0:
min_index_ar1 = 0
if min_index_ar2 < 0:
min_index_ar2 = 0
if max_index_ar1 == 0 or max_index_ar1 > ar1.shape[0]:
max_index_ar1 = ar1.shape[0]
if max_index_ar2 == 0 or max_index_ar2 > ar2.shape[0]:
max_index_ar2 = ar2.shape[0]
return ar1[min_index_ar1:max_index_ar1], ar2[min_index_ar2:max_index_ar2]
|
reduce_sorted_to_intersect
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(ar1, ar2):\n ar1 = np.asarray(ar1).ravel()\n ar2 = np.asarray(ar2).ravel()\n ar1_biggest_value = ar1[-1]\n ar1_smallest_value = ar1[0]\n ar2_biggest_value = ar2[-1]\n ar2_smallest_value = ar2[0]\n if (ar1_biggest_value < ar2_smallest_value or ar1_smallest_value >\n ar2_biggest_value):\n return ar1[0:0], ar2[0:0]\n min_index_ar1 = np.argmin(ar1 < ar2_smallest_value)\n max_index_ar1 = np.argmax(ar1 > ar2_biggest_value)\n min_index_ar2 = np.argmin(ar2 < ar1_smallest_value)\n max_index_ar2 = np.argmax(ar2 > ar1_biggest_value)\n if min_index_ar1 < 0:\n min_index_ar1 = 0\n if min_index_ar2 < 0:\n min_index_ar2 = 0\n if max_index_ar1 == 0 or max_index_ar1 > ar1.shape[0]:\n max_index_ar1 = ar1.shape[0]\n if max_index_ar2 == 0 or max_index_ar2 > ar2.shape[0]:\n max_index_ar2 = ar2.shape[0]\n return ar1[min_index_ar1:max_index_ar1], ar2[min_index_ar2:max_index_ar2]\n",
"role": "user"
},
{
"content": "reduce_sorted_to_intersect",
"role": "assistant"
}
] |
def x(array):
s = np.sort(array, axis=None)
s = s[s[1:] == s[:-1]]
return np.unique(s)
|
get_not_unique_values
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(array):\n s = np.sort(array, axis=None)\n s = s[s[1:] == s[:-1]]\n return np.unique(s)\n",
"role": "user"
},
{
"content": "get_not_unique_values",
"role": "assistant"
}
] |
def x(meta_data_array, scan_parameter_name):
scan_parameter_values = meta_data_array[scan_parameter_name]
diff = np.concatenate(([1], np.diff(scan_parameter_values)))
idx = np.concatenate((np.where(diff)[0], [len(scan_parameter_values)]))
index = np.empty(len(idx) - 1, dtype={'names': [scan_parameter_name,
'index'], 'formats': ['u4', 'u4']})
index[scan_parameter_name] = scan_parameter_values[idx[:-1]]
index['index'] = idx[:-1]
return index
|
get_meta_data_index_at_scan_parameter
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(meta_data_array, scan_parameter_name):\n scan_parameter_values = meta_data_array[scan_parameter_name]\n diff = np.concatenate(([1], np.diff(scan_parameter_values)))\n idx = np.concatenate((np.where(diff)[0], [len(scan_parameter_values)]))\n index = np.empty(len(idx) - 1, dtype={'names': [scan_parameter_name,\n 'index'], 'formats': ['u4', 'u4']})\n index[scan_parameter_name] = scan_parameter_values[idx[:-1]]\n index['index'] = idx[:-1]\n return index\n",
"role": "user"
},
{
"content": "get_meta_data_index_at_scan_parameter",
"role": "assistant"
}
] |
def x(hits_array, condition=None):
if condition is None:
return hits_array
for variable in set(re.findall('[a-zA-Z_]+', condition)):
exec(variable + " = hits_array['" + variable + "']")
return hits_array[ne.evaluate(condition)]
|
select_hits
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hits_array, condition=None):\n if condition is None:\n return hits_array\n for variable in set(re.findall('[a-zA-Z_]+', condition)):\n exec(variable + \" = hits_array['\" + variable + \"']\")\n return hits_array[ne.evaluate(condition)]\n",
"role": "user"
},
{
"content": "select_hits",
"role": "assistant"
}
] |
def x(hits_array, events, assume_sorted=True, condition=None):
logging.debug('Calculate hits that exists in the given %d events.' %
len(events))
if assume_sorted:
events, _ = reduce_sorted_to_intersect(events, hits_array[
'event_number'])
if events.shape[0] == 0:
return hits_array[0:0]
try:
if assume_sorted:
selection = analysis_utils.in1d_events(hits_array[
'event_number'], events)
else:
logging.warning(
'Events are usually sorted. Are you sure you want this?')
selection = np.in1d(hits_array['event_number'], events)
if condition is None:
hits_in_events = hits_array[selection]
else:
for variable in set(re.findall('[a-zA-Z_]+', condition)):
exec(variable + " = hits_array['" + variable + "']")
hits_in_events = hits_array[ne.evaluate(condition + ' & selection')
]
except MemoryError:
logging.error(
'There are too many hits to do in RAM operations. Consider decreasing chunk size and use the write_hits_in_events function instead.'
)
raise MemoryError
return hits_in_events
|
get_hits_in_events
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hits_array, events, assume_sorted=True, condition=None):\n logging.debug('Calculate hits that exists in the given %d events.' %\n len(events))\n if assume_sorted:\n events, _ = reduce_sorted_to_intersect(events, hits_array[\n 'event_number'])\n if events.shape[0] == 0:\n return hits_array[0:0]\n try:\n if assume_sorted:\n selection = analysis_utils.in1d_events(hits_array[\n 'event_number'], events)\n else:\n logging.warning(\n 'Events are usually sorted. Are you sure you want this?')\n selection = np.in1d(hits_array['event_number'], events)\n if condition is None:\n hits_in_events = hits_array[selection]\n else:\n for variable in set(re.findall('[a-zA-Z_]+', condition)):\n exec(variable + \" = hits_array['\" + variable + \"']\")\n hits_in_events = hits_array[ne.evaluate(condition + ' & selection')\n ]\n except MemoryError:\n logging.error(\n 'There are too many hits to do in RAM operations. Consider decreasing chunk size and use the write_hits_in_events function instead.'\n )\n raise MemoryError\n return hits_in_events\n",
"role": "user"
},
{
"content": "get_hits_in_events",
"role": "assistant"
}
] |
def x(input_file_hits, scan_parameters=None, try_speedup=False, chunk_size=
10000000):
with tb.open_file(input_file_hits, mode='r+') as in_file_h5:
hit_table = in_file_h5.root.Hits
meta_data = in_file_h5.root.meta_data[:]
meta_data_table_at_scan_parameter = (
get_unique_scan_parameter_combinations(meta_data,
scan_parameters=scan_parameters))
parameter_values = get_scan_parameters_table_from_meta_data(
meta_data_table_at_scan_parameter, scan_parameters)
event_number_ranges = get_ranges_from_array(
meta_data_table_at_scan_parameter['event_number'])
index_event_number(hit_table)
index = 0
best_chunk_size = chunk_size
for parameter_index, (start_event_number, stop_event_number
) in enumerate(event_number_ranges):
logging.debug('Read hits for ' + str(scan_parameters) + ' = ' +
str(parameter_values[parameter_index]))
readout_hit_len = 0
for hits, index in data_aligned_at_events(hit_table,
start_event_number=start_event_number, stop_event_number=
stop_event_number, start_index=index, try_speedup=
try_speedup, chunk_size=best_chunk_size):
yield parameter_values[parameter_index], hits
readout_hit_len += hits.shape[0]
best_chunk_size = int(1.5 * readout_hit_len) if int(1.05 *
readout_hit_len) < chunk_size and int(1.05 * readout_hit_len
) > 1000.0 else chunk_size
|
get_hits_of_scan_parameter
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(input_file_hits, scan_parameters=None, try_speedup=False, chunk_size=\n 10000000):\n with tb.open_file(input_file_hits, mode='r+') as in_file_h5:\n hit_table = in_file_h5.root.Hits\n meta_data = in_file_h5.root.meta_data[:]\n meta_data_table_at_scan_parameter = (\n get_unique_scan_parameter_combinations(meta_data,\n scan_parameters=scan_parameters))\n parameter_values = get_scan_parameters_table_from_meta_data(\n meta_data_table_at_scan_parameter, scan_parameters)\n event_number_ranges = get_ranges_from_array(\n meta_data_table_at_scan_parameter['event_number'])\n index_event_number(hit_table)\n index = 0\n best_chunk_size = chunk_size\n for parameter_index, (start_event_number, stop_event_number\n ) in enumerate(event_number_ranges):\n logging.debug('Read hits for ' + str(scan_parameters) + ' = ' +\n str(parameter_values[parameter_index]))\n readout_hit_len = 0\n for hits, index in data_aligned_at_events(hit_table,\n start_event_number=start_event_number, stop_event_number=\n stop_event_number, start_index=index, try_speedup=\n try_speedup, chunk_size=best_chunk_size):\n yield parameter_values[parameter_index], hits\n readout_hit_len += hits.shape[0]\n best_chunk_size = int(1.5 * readout_hit_len) if int(1.05 *\n readout_hit_len) < chunk_size and int(1.05 * readout_hit_len\n ) > 1000.0 else chunk_size\n",
"role": "user"
},
{
"content": "get_hits_of_scan_parameter",
"role": "assistant"
}
] |
def x(array, event_start=None, event_stop=None, assume_sorted=True):
logging.debug('Calculate data of the the given event range [' + str(
event_start) + ', ' + str(event_stop) + '[')
event_number = array['event_number']
if assume_sorted:
data_event_start = event_number[0]
data_event_stop = event_number[-1]
if (event_start is not None and event_stop is not None) and (
data_event_stop < event_start or data_event_start > event_stop or
event_start == event_stop):
return array[0:0]
if event_start is None:
min_index_data = 0
else:
min_index_data = np.argmin(event_number < event_start)
if event_stop is None:
max_index_data = event_number.shape[0]
else:
max_index_data = np.argmax(event_number >= event_stop)
if min_index_data < 0:
min_index_data = 0
if max_index_data == 0 or max_index_data > event_number.shape[0]:
max_index_data = event_number.shape[0]
return array[min_index_data:max_index_data]
else:
return array[ne.evaluate(
'event_number >= event_start & event_number < event_stop')]
|
get_data_in_event_range
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(array, event_start=None, event_stop=None, assume_sorted=True):\n logging.debug('Calculate data of the the given event range [' + str(\n event_start) + ', ' + str(event_stop) + '[')\n event_number = array['event_number']\n if assume_sorted:\n data_event_start = event_number[0]\n data_event_stop = event_number[-1]\n if (event_start is not None and event_stop is not None) and (\n data_event_stop < event_start or data_event_start > event_stop or\n event_start == event_stop):\n return array[0:0]\n if event_start is None:\n min_index_data = 0\n else:\n min_index_data = np.argmin(event_number < event_start)\n if event_stop is None:\n max_index_data = event_number.shape[0]\n else:\n max_index_data = np.argmax(event_number >= event_stop)\n if min_index_data < 0:\n min_index_data = 0\n if max_index_data == 0 or max_index_data > event_number.shape[0]:\n max_index_data = event_number.shape[0]\n return array[min_index_data:max_index_data]\n else:\n return array[ne.evaluate(\n 'event_number >= event_start & event_number < event_stop')]\n",
"role": "user"
},
{
"content": "get_data_in_event_range",
"role": "assistant"
}
] |
def x(hit_table_in, hit_table_out, events, start_hit_word=0, chunk_size=
5000000, condition=None):
if len(events) > 0:
min_event = np.amin(events)
max_event = np.amax(events)
logging.debug(
'Write hits from hit number >= %d that exists in the selected %d events with %d <= event number <= %d into a new hit table.'
% (start_hit_word, len(events), min_event, max_event))
table_size = hit_table_in.shape[0]
iHit = 0
for iHit in range(start_hit_word, table_size, chunk_size):
hits = hit_table_in.read(iHit, iHit + chunk_size)
last_event_number = hits[-1]['event_number']
hit_table_out.append(get_hits_in_events(hits, events=events,
condition=condition))
if last_event_number > max_event:
return iHit
return start_hit_word
|
write_hits_in_events
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hit_table_in, hit_table_out, events, start_hit_word=0, chunk_size=\n 5000000, condition=None):\n if len(events) > 0:\n min_event = np.amin(events)\n max_event = np.amax(events)\n logging.debug(\n 'Write hits from hit number >= %d that exists in the selected %d events with %d <= event number <= %d into a new hit table.'\n % (start_hit_word, len(events), min_event, max_event))\n table_size = hit_table_in.shape[0]\n iHit = 0\n for iHit in range(start_hit_word, table_size, chunk_size):\n hits = hit_table_in.read(iHit, iHit + chunk_size)\n last_event_number = hits[-1]['event_number']\n hit_table_out.append(get_hits_in_events(hits, events=events,\n condition=condition))\n if last_event_number > max_event:\n return iHit\n return start_hit_word\n",
"role": "user"
},
{
"content": "write_hits_in_events",
"role": "assistant"
}
] |
def x(hit_table_in, hit_table_out, event_start=None, event_stop=None,
start_hit_word=0, chunk_size=5000000, condition=None):
logging.debug('Write hits that exists in the given event range from + ' +
str(event_start) + ' to ' + str(event_stop) + ' into a new hit table')
table_size = hit_table_in.shape[0]
for iHit in range(0, table_size, chunk_size):
hits = hit_table_in.read(iHit, iHit + chunk_size)
last_event_number = hits[-1]['event_number']
selected_hits = get_data_in_event_range(hits, event_start=
event_start, event_stop=event_stop)
if condition is not None:
for variable in set(re.findall('[a-zA-Z_]+', condition)):
exec(variable + " = hits['" + variable + "']")
selected_hits = selected_hits[ne.evaluate(condition)]
hit_table_out.append(selected_hits)
if last_event_number > event_stop:
return iHit + chunk_size
return start_hit_word
|
write_hits_in_event_range
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hit_table_in, hit_table_out, event_start=None, event_stop=None,\n start_hit_word=0, chunk_size=5000000, condition=None):\n logging.debug('Write hits that exists in the given event range from + ' +\n str(event_start) + ' to ' + str(event_stop) + ' into a new hit table')\n table_size = hit_table_in.shape[0]\n for iHit in range(0, table_size, chunk_size):\n hits = hit_table_in.read(iHit, iHit + chunk_size)\n last_event_number = hits[-1]['event_number']\n selected_hits = get_data_in_event_range(hits, event_start=\n event_start, event_stop=event_stop)\n if condition is not None:\n for variable in set(re.findall('[a-zA-Z_]+', condition)):\n exec(variable + \" = hits['\" + variable + \"']\")\n selected_hits = selected_hits[ne.evaluate(condition)]\n hit_table_out.append(selected_hits)\n if last_event_number > event_stop:\n return iHit + chunk_size\n return start_hit_word\n",
"role": "user"
},
{
"content": "write_hits_in_event_range",
"role": "assistant"
}
] |
def x(event_number, condition='n_cluster==1'):
logging.debug('Calculate events with clusters where ' + condition)
n_cluster_in_events = analysis_utils.get_n_cluster_in_events(event_number)
n_cluster = n_cluster_in_events[:, 1]
return n_cluster_in_events[ne.evaluate(condition), 0]
|
get_events_with_n_cluster
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(event_number, condition='n_cluster==1'):\n logging.debug('Calculate events with clusters where ' + condition)\n n_cluster_in_events = analysis_utils.get_n_cluster_in_events(event_number)\n n_cluster = n_cluster_in_events[:, 1]\n return n_cluster_in_events[ne.evaluate(condition), 0]\n",
"role": "user"
},
{
"content": "get_events_with_n_cluster",
"role": "assistant"
}
] |
def x(event_number, cluster_size, condition='cluster_size==1'):
logging.debug('Calculate events with clusters with ' + condition)
return np.unique(event_number[ne.evaluate(condition)])
|
get_events_with_cluster_size
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(event_number, cluster_size, condition='cluster_size==1'):\n logging.debug('Calculate events with clusters with ' + condition)\n return np.unique(event_number[ne.evaluate(condition)])\n",
"role": "user"
},
{
"content": "get_events_with_cluster_size",
"role": "assistant"
}
] |
def x(event_number, event_status, select_mask=65535, condition=0):
logging.debug('Calculate events with certain error code')
return np.unique(event_number[event_status & select_mask == condition])
|
get_events_with_error_code
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(event_number, event_status, select_mask=65535, condition=0):\n logging.debug('Calculate events with certain error code')\n return np.unique(event_number[event_status & select_mask == condition])\n",
"role": "user"
},
{
"content": "get_events_with_error_code",
"role": "assistant"
}
] |
def x(meta_data_array, unique=True):
try:
last_not_parameter_column = meta_data_array.dtype.names.index(
'error_code')
except ValueError:
last_not_parameter_column = meta_data_array.dtype.names.index('error')
if last_not_parameter_column == len(meta_data_array.dtype.names) - 1:
return
scan_parameters = collections.OrderedDict()
for scan_par_name in meta_data_array.dtype.names[4:]:
scan_parameters[scan_par_name] = np.unique(meta_data_array[
scan_par_name]) if unique else meta_data_array[scan_par_name]
return scan_parameters
|
get_scan_parameter
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(meta_data_array, unique=True):\n try:\n last_not_parameter_column = meta_data_array.dtype.names.index(\n 'error_code')\n except ValueError:\n last_not_parameter_column = meta_data_array.dtype.names.index('error')\n if last_not_parameter_column == len(meta_data_array.dtype.names) - 1:\n return\n scan_parameters = collections.OrderedDict()\n for scan_par_name in meta_data_array.dtype.names[4:]:\n scan_parameters[scan_par_name] = np.unique(meta_data_array[\n scan_par_name]) if unique else meta_data_array[scan_par_name]\n return scan_parameters\n",
"role": "user"
},
{
"content": "get_scan_parameter",
"role": "assistant"
}
] |
def x(meta_data_array, scan_parameters=None):
if scan_parameters is None:
try:
last_not_parameter_column = meta_data_array.dtype.names.index(
'error_code')
except ValueError:
return
if last_not_parameter_column == len(meta_data_array.dtype.names) - 1:
return
scan_par_data = {name: meta_data_array.dtype.fields[name] for name in
meta_data_array.dtype.names[last_not_parameter_column + 1:]}
else:
scan_par_data = collections.OrderedDict()
for name in scan_parameters:
scan_par_data[name] = meta_data_array.dtype.fields[name]
return np.ndarray(meta_data_array.shape, np.dtype(scan_par_data),
meta_data_array, 0, meta_data_array.strides)
|
get_scan_parameters_table_from_meta_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(meta_data_array, scan_parameters=None):\n if scan_parameters is None:\n try:\n last_not_parameter_column = meta_data_array.dtype.names.index(\n 'error_code')\n except ValueError:\n return\n if last_not_parameter_column == len(meta_data_array.dtype.names) - 1:\n return\n scan_par_data = {name: meta_data_array.dtype.fields[name] for name in\n meta_data_array.dtype.names[last_not_parameter_column + 1:]}\n else:\n scan_par_data = collections.OrderedDict()\n for name in scan_parameters:\n scan_par_data[name] = meta_data_array.dtype.fields[name]\n return np.ndarray(meta_data_array.shape, np.dtype(scan_par_data),\n meta_data_array, 0, meta_data_array.strides)\n",
"role": "user"
},
{
"content": "get_scan_parameters_table_from_meta_data",
"role": "assistant"
}
] |
def x(scan_parameter):
_, index = np.unique(scan_parameter, return_index=True)
index = np.sort(index)
values = np.array(range(0, len(index)), dtype='i4')
index = np.append(index, len(scan_parameter))
counts = np.diff(index)
return np.repeat(values, counts)
|
get_scan_parameters_index
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(scan_parameter):\n _, index = np.unique(scan_parameter, return_index=True)\n index = np.sort(index)\n values = np.array(range(0, len(index)), dtype='i4')\n index = np.append(index, len(scan_parameter))\n counts = np.diff(index)\n return np.repeat(values, counts)\n",
"role": "user"
},
{
"content": "get_scan_parameters_index",
"role": "assistant"
}
] |
def x(meta_data_array, scan_parameters=None, scan_parameter_columns_only=False
):
try:
last_not_parameter_column = meta_data_array.dtype.names.index(
'error_code')
except ValueError:
last_not_parameter_column = meta_data_array.dtype.names.index('error')
if last_not_parameter_column == len(meta_data_array.dtype.names) - 1:
return
if scan_parameters is None:
return unique_row(meta_data_array, use_columns=range(4, len(
meta_data_array.dtype.names)), selected_columns_only=
scan_parameter_columns_only)
else:
use_columns = []
for scan_parameter in scan_parameters:
try:
use_columns.append(meta_data_array.dtype.names.index(
scan_parameter))
except ValueError:
logging.error('No scan parameter ' + scan_parameter + ' found')
raise RuntimeError('Scan parameter not found')
return unique_row(meta_data_array, use_columns=use_columns,
selected_columns_only=scan_parameter_columns_only)
|
get_unique_scan_parameter_combinations
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(meta_data_array, scan_parameters=None, scan_parameter_columns_only=False\n ):\n try:\n last_not_parameter_column = meta_data_array.dtype.names.index(\n 'error_code')\n except ValueError:\n last_not_parameter_column = meta_data_array.dtype.names.index('error')\n if last_not_parameter_column == len(meta_data_array.dtype.names) - 1:\n return\n if scan_parameters is None:\n return unique_row(meta_data_array, use_columns=range(4, len(\n meta_data_array.dtype.names)), selected_columns_only=\n scan_parameter_columns_only)\n else:\n use_columns = []\n for scan_parameter in scan_parameters:\n try:\n use_columns.append(meta_data_array.dtype.names.index(\n scan_parameter))\n except ValueError:\n logging.error('No scan parameter ' + scan_parameter + ' found')\n raise RuntimeError('Scan parameter not found')\n return unique_row(meta_data_array, use_columns=use_columns,\n selected_columns_only=scan_parameter_columns_only)\n",
"role": "user"
},
{
"content": "get_unique_scan_parameter_combinations",
"role": "assistant"
}
] |
def x(hits, col_span, row_span, min_cut_threshold=0.2, max_cut_threshold=2.0):
hits = np.sum(hits, axis=-1).astype('u8')
mask = np.ones(shape=(80, 336), dtype=np.uint8)
mask[min(col_span):max(col_span) + 1, min(row_span):max(row_span) + 1] = 0
ma = np.ma.masked_where(mask, hits)
if max_cut_threshold is not None:
return np.ma.masked_where(np.logical_or(ma < min_cut_threshold * np
.ma.median(ma), ma > max_cut_threshold * np.ma.median(ma)), ma)
else:
return np.ma.masked_where(ma < min_cut_threshold * np.ma.median(ma), ma
)
|
select_good_pixel_region
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hits, col_span, row_span, min_cut_threshold=0.2, max_cut_threshold=2.0):\n hits = np.sum(hits, axis=-1).astype('u8')\n mask = np.ones(shape=(80, 336), dtype=np.uint8)\n mask[min(col_span):max(col_span) + 1, min(row_span):max(row_span) + 1] = 0\n ma = np.ma.masked_where(mask, hits)\n if max_cut_threshold is not None:\n return np.ma.masked_where(np.logical_or(ma < min_cut_threshold * np\n .ma.median(ma), ma > max_cut_threshold * np.ma.median(ma)), ma)\n else:\n return np.ma.masked_where(ma < min_cut_threshold * np.ma.median(ma), ma\n )\n",
"role": "user"
},
{
"content": "select_good_pixel_region",
"role": "assistant"
}
] |
def x(gdacs, calibration_gdacs, cluster_size_histogram):
logging.info(
'Calculate the correction factor for the single hit cluster rate at %d given GDAC settings'
, len(gdacs))
if len(calibration_gdacs) != cluster_size_histogram.shape[0]:
raise ValueError(
'Length of the provided pixel GDACs does not match the dimension of the cluster size array'
)
hist_sum = np.sum(cluster_size_histogram, axis=1)
hist_rel = cluster_size_histogram / hist_sum[:, np.newaxis].astype('f4'
) * 100.0
maximum_rate = np.amax(hist_rel[:, 1])
correction_factor = maximum_rate / hist_rel[:, 1]
calibration_gdacs_sorted = np.array(calibration_gdacs)
correction_factor_sorted = correction_factor[np.argsort(
calibration_gdacs_sorted)]
calibration_gdacs_sorted = np.sort(calibration_gdacs_sorted)
interpolation = interp1d(calibration_gdacs_sorted.tolist(),
correction_factor_sorted.tolist(), kind='cubic', bounds_error=True)
return interpolation(gdacs)
|
get_hit_rate_correction
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(gdacs, calibration_gdacs, cluster_size_histogram):\n logging.info(\n 'Calculate the correction factor for the single hit cluster rate at %d given GDAC settings'\n , len(gdacs))\n if len(calibration_gdacs) != cluster_size_histogram.shape[0]:\n raise ValueError(\n 'Length of the provided pixel GDACs does not match the dimension of the cluster size array'\n )\n hist_sum = np.sum(cluster_size_histogram, axis=1)\n hist_rel = cluster_size_histogram / hist_sum[:, np.newaxis].astype('f4'\n ) * 100.0\n maximum_rate = np.amax(hist_rel[:, 1])\n correction_factor = maximum_rate / hist_rel[:, 1]\n calibration_gdacs_sorted = np.array(calibration_gdacs)\n correction_factor_sorted = correction_factor[np.argsort(\n calibration_gdacs_sorted)]\n calibration_gdacs_sorted = np.sort(calibration_gdacs_sorted)\n interpolation = interp1d(calibration_gdacs_sorted.tolist(),\n correction_factor_sorted.tolist(), kind='cubic', bounds_error=True)\n return interpolation(gdacs)\n",
"role": "user"
},
{
"content": "get_hit_rate_correction",
"role": "assistant"
}
] |
def x(gdac, mean_threshold_calibration):
interpolation = interp1d(mean_threshold_calibration['parameter_value'],
mean_threshold_calibration['mean_threshold'], kind='slinear',
bounds_error=True)
return interpolation(gdac)
|
get_mean_threshold_from_calibration
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(gdac, mean_threshold_calibration):\n interpolation = interp1d(mean_threshold_calibration['parameter_value'],\n mean_threshold_calibration['mean_threshold'], kind='slinear',\n bounds_error=True)\n return interpolation(gdac)\n",
"role": "user"
},
{
"content": "get_mean_threshold_from_calibration",
"role": "assistant"
}
] |
def x(gdacs, calibration_gdacs, threshold_calibration_array, bounds_error=True
):
if len(calibration_gdacs) != threshold_calibration_array.shape[2]:
raise ValueError(
'Length of the provided pixel GDACs does not match the third dimension of the calibration array'
)
interpolation = interp1d(x=calibration_gdacs, y=
threshold_calibration_array, kind='slinear', bounds_error=bounds_error)
return interpolation(gdacs)
|
get_pixel_thresholds_from_calibration_array
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(gdacs, calibration_gdacs, threshold_calibration_array, bounds_error=True\n ):\n if len(calibration_gdacs) != threshold_calibration_array.shape[2]:\n raise ValueError(\n 'Length of the provided pixel GDACs does not match the third dimension of the calibration array'\n )\n interpolation = interp1d(x=calibration_gdacs, y=\n threshold_calibration_array, kind='slinear', bounds_error=bounds_error)\n return interpolation(gdacs)\n",
"role": "user"
},
{
"content": "get_pixel_thresholds_from_calibration_array",
"role": "assistant"
}
] |
def x(cluster_table):
logging.info('Histogram number of cluster per event')
cluster_in_events = analysis_utils.get_n_cluster_in_events(cluster_table)[
:, 1]
return np.histogram(cluster_in_events, bins=range(0, np.max(
cluster_in_events) + 2))
|
get_n_cluster_per_event_hist
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cluster_table):\n logging.info('Histogram number of cluster per event')\n cluster_in_events = analysis_utils.get_n_cluster_in_events(cluster_table)[\n :, 1]\n return np.histogram(cluster_in_events, bins=range(0, np.max(\n cluster_in_events) + 2))\n",
"role": "user"
},
{
"content": "get_n_cluster_per_event_hist",
"role": "assistant"
}
] |
def x(condition):
d = np.diff(condition, n=1)
idx, = d.nonzero()
idx += 1
if condition[0]:
idx = np.r_[0, idx]
if condition[-1]:
idx = np.r_[idx, condition.size]
idx.shape = -1, 2
return idx
|
contiguous_regions
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(condition):\n d = np.diff(condition, n=1)\n idx, = d.nonzero()\n idx += 1\n if condition[0]:\n idx = np.r_[0, idx]\n if condition[-1]:\n idx = np.r_[idx, condition.size]\n idx.shape = -1, 2\n return idx\n",
"role": "user"
},
{
"content": "contiguous_regions",
"role": "assistant"
}
] |
def x(raw_data, prepend_data_headers=None, trig_count=None):
consecutive_triggers = 16 if trig_count == 0 else trig_count
is_fe_data_header = logical_and(is_fe_word, is_data_header)
trigger_idx = np.where(is_trigger_word(raw_data) >= 1)[0]
fe_dh_idx = np.where(is_fe_data_header(raw_data) >= 1)[0]
n_triggers = trigger_idx.shape[0]
n_dh = fe_dh_idx.shape[0]
if n_triggers:
last_event_data_headers_cnt = np.where(fe_dh_idx > trigger_idx[-1])[0
].shape[0]
if (consecutive_triggers and last_event_data_headers_cnt ==
consecutive_triggers):
if not np.all(trigger_idx[-1] > fe_dh_idx):
trigger_idx = np.r_[trigger_idx, raw_data.shape]
last_event_data_headers_cnt = None
elif last_event_data_headers_cnt != 0:
fe_dh_idx = fe_dh_idx[:-last_event_data_headers_cnt]
elif not np.all(trigger_idx[-1] > fe_dh_idx):
trigger_idx = np.r_[trigger_idx, raw_data.shape]
elif n_dh:
trigger_idx = np.r_[trigger_idx, raw_data.shape]
last_event_data_headers_cnt = None
else:
return False, prepend_data_headers, n_triggers, n_dh
n_triggers_cleaned = trigger_idx.shape[0]
n_dh_cleaned = fe_dh_idx.shape[0]
if (prepend_data_headers is None and n_triggers_cleaned and
n_dh_cleaned and not trigger_idx[0] < fe_dh_idx[0]):
return True, last_event_data_headers_cnt, n_triggers, n_dh
elif consecutive_triggers and prepend_data_headers is not None and n_triggers_cleaned and n_dh_cleaned and trigger_idx[
0] < fe_dh_idx[0]:
return True, last_event_data_headers_cnt, n_triggers, n_dh
elif consecutive_triggers is None and prepend_data_headers == 0 and n_triggers_cleaned and n_dh_cleaned and trigger_idx[
0] < fe_dh_idx[0]:
return True, last_event_data_headers_cnt, n_triggers, n_dh
elif prepend_data_headers is not None:
trigger_idx += prepend_data_headers + 1
fe_dh_idx += prepend_data_headers + 1
trigger_idx = np.r_[0, trigger_idx]
fe_dh_idx = np.r_[range(1, prepend_data_headers + 1), fe_dh_idx]
event_hist, bins = np.histogram(fe_dh_idx, trigger_idx)
if consecutive_triggers is None and np.any(event_hist == 0):
return True, last_event_data_headers_cnt, n_triggers, n_dh
elif consecutive_triggers and np.any(event_hist != consecutive_triggers):
return True, last_event_data_headers_cnt, n_triggers, n_dh
return False, last_event_data_headers_cnt, n_triggers, n_dh
|
check_bad_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(raw_data, prepend_data_headers=None, trig_count=None):\n consecutive_triggers = 16 if trig_count == 0 else trig_count\n is_fe_data_header = logical_and(is_fe_word, is_data_header)\n trigger_idx = np.where(is_trigger_word(raw_data) >= 1)[0]\n fe_dh_idx = np.where(is_fe_data_header(raw_data) >= 1)[0]\n n_triggers = trigger_idx.shape[0]\n n_dh = fe_dh_idx.shape[0]\n if n_triggers:\n last_event_data_headers_cnt = np.where(fe_dh_idx > trigger_idx[-1])[0\n ].shape[0]\n if (consecutive_triggers and last_event_data_headers_cnt ==\n consecutive_triggers):\n if not np.all(trigger_idx[-1] > fe_dh_idx):\n trigger_idx = np.r_[trigger_idx, raw_data.shape]\n last_event_data_headers_cnt = None\n elif last_event_data_headers_cnt != 0:\n fe_dh_idx = fe_dh_idx[:-last_event_data_headers_cnt]\n elif not np.all(trigger_idx[-1] > fe_dh_idx):\n trigger_idx = np.r_[trigger_idx, raw_data.shape]\n elif n_dh:\n trigger_idx = np.r_[trigger_idx, raw_data.shape]\n last_event_data_headers_cnt = None\n else:\n return False, prepend_data_headers, n_triggers, n_dh\n n_triggers_cleaned = trigger_idx.shape[0]\n n_dh_cleaned = fe_dh_idx.shape[0]\n if (prepend_data_headers is None and n_triggers_cleaned and\n n_dh_cleaned and not trigger_idx[0] < fe_dh_idx[0]):\n return True, last_event_data_headers_cnt, n_triggers, n_dh\n elif consecutive_triggers and prepend_data_headers is not None and n_triggers_cleaned and n_dh_cleaned and trigger_idx[\n 0] < fe_dh_idx[0]:\n return True, last_event_data_headers_cnt, n_triggers, n_dh\n elif consecutive_triggers is None and prepend_data_headers == 0 and n_triggers_cleaned and n_dh_cleaned and trigger_idx[\n 0] < fe_dh_idx[0]:\n return True, last_event_data_headers_cnt, n_triggers, n_dh\n elif prepend_data_headers is not None:\n trigger_idx += prepend_data_headers + 1\n fe_dh_idx += prepend_data_headers + 1\n trigger_idx = np.r_[0, trigger_idx]\n fe_dh_idx = np.r_[range(1, prepend_data_headers + 1), fe_dh_idx]\n event_hist, bins = np.histogram(fe_dh_idx, trigger_idx)\n if consecutive_triggers is None and np.any(event_hist == 0):\n return True, last_event_data_headers_cnt, n_triggers, n_dh\n elif consecutive_triggers and np.any(event_hist != consecutive_triggers):\n return True, last_event_data_headers_cnt, n_triggers, n_dh\n return False, last_event_data_headers_cnt, n_triggers, n_dh\n",
"role": "user"
},
{
"content": "check_bad_data",
"role": "assistant"
}
] |
def x(data, stepsize=1):
return np.split(data, np.where(np.diff(data) != stepsize)[0] + 1)
|
consecutive
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data, stepsize=1):\n return np.split(data, np.where(np.diff(data) != stepsize)[0] + 1)\n",
"role": "user"
},
{
"content": "consecutive",
"role": "assistant"
}
] |
def x(self, pbar):
self.n_refresh += 1
if pbar.currval == 0:
return 'ETA: --:--:--'
elif pbar.finished:
return 'Time: %s' % self.format_time(pbar.seconds_elapsed)
else:
elapsed = pbar.seconds_elapsed
try:
speed = pbar.currval / elapsed
if self.speed_smooth is not None:
self.speed_smooth = self.speed_smooth * (1 - self.SMOOTHING
) + speed * self.SMOOTHING
else:
self.speed_smooth = speed
eta = float(pbar.maxval
) / self.speed_smooth - elapsed + 1 if float(pbar.maxval
) / self.speed_smooth - elapsed + 1 > 0 else 0
if float(pbar.currval) / pbar.maxval > 0.3 or self.n_refresh > 10:
return 'ETA: %s' % self.format_time(eta)
if self.old_eta is not None and self.old_eta < eta:
return 'ETA: ~%s' % self.format_time(self.old_eta)
else:
self.old_eta = eta
return 'ETA: ~%s' % self.format_time(eta)
except ZeroDivisionError:
speed = 0
|
update
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, pbar):\n self.n_refresh += 1\n if pbar.currval == 0:\n return 'ETA: --:--:--'\n elif pbar.finished:\n return 'Time: %s' % self.format_time(pbar.seconds_elapsed)\n else:\n elapsed = pbar.seconds_elapsed\n try:\n speed = pbar.currval / elapsed\n if self.speed_smooth is not None:\n self.speed_smooth = self.speed_smooth * (1 - self.SMOOTHING\n ) + speed * self.SMOOTHING\n else:\n self.speed_smooth = speed\n eta = float(pbar.maxval\n ) / self.speed_smooth - elapsed + 1 if float(pbar.maxval\n ) / self.speed_smooth - elapsed + 1 > 0 else 0\n if float(pbar.currval) / pbar.maxval > 0.3 or self.n_refresh > 10:\n return 'ETA: %s' % self.format_time(eta)\n if self.old_eta is not None and self.old_eta < eta:\n return 'ETA: ~%s' % self.format_time(self.old_eta)\n else:\n self.old_eta = eta\n return 'ETA: ~%s' % self.format_time(eta)\n except ZeroDivisionError:\n speed = 0\n",
"role": "user"
},
{
"content": "update",
"role": "assistant"
}
] |
def x(x_p, y_p, y_p_e, smoothed_data, smoothed_data_diff, filename=None):
logging.info('Plot results')
plt.close()
p1 = plt.errorbar(x_p * analysis_configuration['vcal_calibration'], y_p,
yerr=y_p_e, fmt='o')
p2, = plt.plot(x_p * analysis_configuration['vcal_calibration'],
smoothed_data, '-r')
factor = np.amax(y_p) / np.amin(smoothed_data_diff) * 1.1
p3, = plt.plot(x_p * analysis_configuration['vcal_calibration'], factor *
smoothed_data_diff, '-', lw=2)
mpv_index = np.argmax(-analysis_utils.smooth_differentiation(x_p, y_p,
weigths=1 / y_p_e, order=3, smoothness=analysis_configuration[
'smoothness'], derivation=1))
p4, = plt.plot([x_p[mpv_index] * analysis_configuration[
'vcal_calibration'], x_p[mpv_index] * analysis_configuration[
'vcal_calibration']], [0, factor * smoothed_data_diff[mpv_index]],
'k-', lw=2)
text = 'MPV ' + str(int(x_p[mpv_index] * analysis_configuration[
'vcal_calibration'])) + ' e'
plt.text(1.01 * x_p[mpv_index] * analysis_configuration[
'vcal_calibration'], -10.0 * smoothed_data_diff[mpv_index], text,
ha='left')
plt.legend([p1, p2, p3, p4], ['data', 'smoothed spline',
'spline differentiation', text], prop={'size': 12}, loc=0)
plt.title("'Single hit cluster'-occupancy for different pixel thresholds")
plt.xlabel('Pixel threshold [e]')
plt.ylabel('Single hit cluster occupancy [a.u.]')
plt.ylim(0, np.amax(y_p) * 1.15)
if filename is None:
plt.show()
else:
filename.savefig(plt.gcf())
return smoothed_data_diff
|
plot_result
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(x_p, y_p, y_p_e, smoothed_data, smoothed_data_diff, filename=None):\n logging.info('Plot results')\n plt.close()\n p1 = plt.errorbar(x_p * analysis_configuration['vcal_calibration'], y_p,\n yerr=y_p_e, fmt='o')\n p2, = plt.plot(x_p * analysis_configuration['vcal_calibration'],\n smoothed_data, '-r')\n factor = np.amax(y_p) / np.amin(smoothed_data_diff) * 1.1\n p3, = plt.plot(x_p * analysis_configuration['vcal_calibration'], factor *\n smoothed_data_diff, '-', lw=2)\n mpv_index = np.argmax(-analysis_utils.smooth_differentiation(x_p, y_p,\n weigths=1 / y_p_e, order=3, smoothness=analysis_configuration[\n 'smoothness'], derivation=1))\n p4, = plt.plot([x_p[mpv_index] * analysis_configuration[\n 'vcal_calibration'], x_p[mpv_index] * analysis_configuration[\n 'vcal_calibration']], [0, factor * smoothed_data_diff[mpv_index]],\n 'k-', lw=2)\n text = 'MPV ' + str(int(x_p[mpv_index] * analysis_configuration[\n 'vcal_calibration'])) + ' e'\n plt.text(1.01 * x_p[mpv_index] * analysis_configuration[\n 'vcal_calibration'], -10.0 * smoothed_data_diff[mpv_index], text,\n ha='left')\n plt.legend([p1, p2, p3, p4], ['data', 'smoothed spline',\n 'spline differentiation', text], prop={'size': 12}, loc=0)\n plt.title(\"'Single hit cluster'-occupancy for different pixel thresholds\")\n plt.xlabel('Pixel threshold [e]')\n plt.ylabel('Single hit cluster occupancy [a.u.]')\n plt.ylim(0, np.amax(y_p) * 1.15)\n if filename is None:\n plt.show()\n else:\n filename.savefig(plt.gcf())\n return smoothed_data_diff\n",
"role": "user"
},
{
"content": "plot_result",
"role": "assistant"
}
] |
def x(interval):
def x(f):
@wraps(f)
def x(*args, **kwargs):
stopped = Event()
def x():
while not stopped.wait(interval):
f(*args, **kwargs)
t = Thread(name='IntervalTimerThread', target=loop)
t.daemon = True
t.start()
return stopped.set
return wrapper
return decorator
|
interval_timed
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(interval):\n\n def x(f):\n\n @wraps(f)\n def x(*args, **kwargs):\n stopped = Event()\n\n def x():\n while not stopped.wait(interval):\n f(*args, **kwargs)\n t = Thread(name='IntervalTimerThread', target=loop)\n t.daemon = True\n t.start()\n return stopped.set\n return wrapper\n return decorator\n",
"role": "user"
},
{
"content": "interval_timed",
"role": "assistant"
}
] |
def x(interval, func, *args, **kwargs):
stopped = Event()
def x():
while not stopped.wait(interval):
func(*args, **kwargs)
Thread(name='IntervalTimerThread', target=loop).start()
return stopped.set
|
interval_timer
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(interval, func, *args, **kwargs):\n stopped = Event()\n\n def x():\n while not stopped.wait(interval):\n func(*args, **kwargs)\n Thread(name='IntervalTimerThread', target=loop).start()\n return stopped.set\n",
"role": "user"
},
{
"content": "interval_timer",
"role": "assistant"
}
] |
def x(subject, body, smtp_server, user, password, from_addr, to_addrs):
logging.info('Send status E-Mail (' + subject + ')')
content = string.join(('From: %s' % from_addr, 'To: %s' % ','.join(
to_addrs), 'Subject: %s' % subject, '', body), '\r\n')
server = smtplib.SMTP_SSL(smtp_server)
server.login(user, password)
server.sendmail(from_addr, to_addrs, content)
server.quit()
|
send_mail
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(subject, body, smtp_server, user, password, from_addr, to_addrs):\n logging.info('Send status E-Mail (' + subject + ')')\n content = string.join(('From: %s' % from_addr, 'To: %s' % ','.join(\n to_addrs), 'Subject: %s' % subject, '', body), '\\r\\n')\n server = smtplib.SMTP_SSL(smtp_server)\n server.login(user, password)\n server.sendmail(from_addr, to_addrs, content)\n server.quit()\n",
"role": "user"
},
{
"content": "send_mail",
"role": "assistant"
}
] |
def x(self):
if 'dut' not in self._conf or self._conf['dut'] is None:
raise ValueError('Parameter "dut" not defined.')
if 'dut_configuration' not in self._conf or self._conf['dut_configuration'
] is None:
raise ValueError('Parameter "dut_configuration" not defined.')
self._conf.setdefault('working_dir', None)
if 'modules' in self._conf and self._conf['modules']:
for module_id, module_cfg in [(key, value) for key, value in self.
_conf['modules'].items() if 'activate' not in value or
'activate' in value and value['activate'] is True]:
if 'module_group' in module_id:
raise ValueError(
'The module ID "%s" contains the reserved name "module_group".'
% module_id)
if 'flavor' not in module_cfg or module_cfg['flavor'] is None:
raise ValueError(
'No parameter "flavor" defined for module "%s".' %
module_id)
if module_cfg['flavor'] in fe_flavors:
for driver_name in _reserved_driver_names:
if driver_name == 'TDC':
module_cfg.setdefault('TDC', None)
continue
if driver_name not in module_cfg or module_cfg[driver_name
] is None:
raise ValueError(
'No parameter "%s" defined for module "%s".' %
(driver_name, module_id))
if 'rx_channel' not in module_cfg or module_cfg['rx_channel'
] is None:
raise ValueError(
'No parameter "rx_channel" defined for module "%s".' %
module_id)
if 'tx_channel' not in module_cfg or module_cfg['tx_channel'
] is None:
raise ValueError(
'No parameter "tx_channel" defined for module "%s".' %
module_id)
if 'chip_address' not in module_cfg:
raise ValueError(
'No parameter "chip_address" defined for module "%s".'
% module_id)
module_cfg.setdefault('tdc_channel', None)
module_cfg.setdefault('configuration', None)
module_cfg.setdefault('send_data', None)
module_cfg.setdefault('activate', True)
self._module_cfgs[module_id] = module_cfg
self._modules[module_id] = [module_id]
else:
raise ValueError('No module configuration specified')
|
parse_module_cfgs
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if 'dut' not in self._conf or self._conf['dut'] is None:\n raise ValueError('Parameter \"dut\" not defined.')\n if 'dut_configuration' not in self._conf or self._conf['dut_configuration'\n ] is None:\n raise ValueError('Parameter \"dut_configuration\" not defined.')\n self._conf.setdefault('working_dir', None)\n if 'modules' in self._conf and self._conf['modules']:\n for module_id, module_cfg in [(key, value) for key, value in self.\n _conf['modules'].items() if 'activate' not in value or \n 'activate' in value and value['activate'] is True]:\n if 'module_group' in module_id:\n raise ValueError(\n 'The module ID \"%s\" contains the reserved name \"module_group\".'\n % module_id)\n if 'flavor' not in module_cfg or module_cfg['flavor'] is None:\n raise ValueError(\n 'No parameter \"flavor\" defined for module \"%s\".' %\n module_id)\n if module_cfg['flavor'] in fe_flavors:\n for driver_name in _reserved_driver_names:\n if driver_name == 'TDC':\n module_cfg.setdefault('TDC', None)\n continue\n if driver_name not in module_cfg or module_cfg[driver_name\n ] is None:\n raise ValueError(\n 'No parameter \"%s\" defined for module \"%s\".' %\n (driver_name, module_id))\n if 'rx_channel' not in module_cfg or module_cfg['rx_channel'\n ] is None:\n raise ValueError(\n 'No parameter \"rx_channel\" defined for module \"%s\".' %\n module_id)\n if 'tx_channel' not in module_cfg or module_cfg['tx_channel'\n ] is None:\n raise ValueError(\n 'No parameter \"tx_channel\" defined for module \"%s\".' %\n module_id)\n if 'chip_address' not in module_cfg:\n raise ValueError(\n 'No parameter \"chip_address\" defined for module \"%s\".'\n % module_id)\n module_cfg.setdefault('tdc_channel', None)\n module_cfg.setdefault('configuration', None)\n module_cfg.setdefault('send_data', None)\n module_cfg.setdefault('activate', True)\n self._module_cfgs[module_id] = module_cfg\n self._modules[module_id] = [module_id]\n else:\n raise ValueError('No module configuration specified')\n",
"role": "user"
},
{
"content": "parse_module_cfgs",
"role": "assistant"
}
] |
def x(self):
self._module_cfgs[None] = {'flavor': None, 'chip_address': None, 'FIFO':
list(set([self._module_cfgs[module_id]['FIFO'] for module_id in
self._modules])), 'RX': list(set([self._module_cfgs[module_id]['RX'
] for module_id in self._modules])), 'rx_channel': list(set([self.
_module_cfgs[module_id]['rx_channel'] for module_id in self.
_modules])), 'TX': list(set([self._module_cfgs[module_id]['TX'] for
module_id in self._modules])), 'tx_channel': list(set([self.
_module_cfgs[module_id]['tx_channel'] for module_id in self.
_modules])), 'TDC': list(set([self._module_cfgs[module_id]['TDC'] for
module_id in self._modules])), 'tdc_channel': list(set([self.
_module_cfgs[module_id]['tdc_channel'] for module_id in self.
_modules])), 'TLU': list(set([self._module_cfgs[module_id]['TLU'] for
module_id in self._modules])), 'configuration': None, 'send_data': None
}
tx_groups = groupby_dict({key: value for key, value in self.
_module_cfgs.items() if key in self._modules}, 'TX')
for tx, module_group in tx_groups.items():
flavors = list(set([module_cfg['flavor'] for module_id, module_cfg in
self._module_cfgs.items() if module_id in module_group]))
if len(flavors) != 1:
raise ValueError(
"Parameter 'flavor' must be the same for module group TX=%s." %
tx)
chip_addresses = list(set([module_cfg['chip_address'] for module_id,
module_cfg in self._module_cfgs.items() if module_id in
module_group]))
if len(module_group) != len(chip_addresses) or len(module_group
) != 1 and None in chip_addresses:
raise ValueError(
"Parameter 'chip_address' must be different for each module in module group TX=%s."
% tx)
self._module_cfgs['module_group_TX=' + tx] = {'flavor': flavors[0],
'chip_address': None, 'FIFO': list(set([module_cfg['FIFO'] for
module_id, module_cfg in self._module_cfgs.items() if module_id in
module_group])), 'RX': list(set([module_cfg['RX'] for module_id,
module_cfg in self._module_cfgs.items() if module_id in
module_group])), 'rx_channel': list(set([module_cfg[
'rx_channel'] for module_id, module_cfg in self._module_cfgs.
items() if module_id in module_group])), 'TX': tx, 'tx_channel':
list(set([module_cfg['tx_channel'] for module_id, module_cfg in
self._module_cfgs.items() if module_id in module_group])),
'TDC': list(set([module_cfg['TDC'] for module_id, module_cfg in
self._module_cfgs.items() if module_id in module_group])),
'tdc_channel': list(set([module_cfg['tdc_channel'] for
module_id, module_cfg in self._module_cfgs.items() if module_id in
module_group])), 'TLU': list(set([module_cfg['TLU'] for
module_id, module_cfg in self._module_cfgs.items() if module_id in
module_group])), 'configuration': None, 'send_data': None}
self._tx_module_groups['module_group_TX=' + tx] = module_group
self._module_attr = {key: {} for key in self._module_cfgs}
for module_id in self._module_cfgs:
sc = namedtuple('run_configuration', field_names=self.
_default_run_conf.keys())
run_conf = sc(**self._run_conf)
if (module_id in self._modules and self.__class__.__name__ in self.
_conf['modules'][module_id] and self._conf['modules'][module_id
][self.__class__.__name__] is not None):
self._module_run_conf[module_id] = run_conf._replace(**self.
_conf['modules'][module_id][self.__class__.__name__])._asdict()
else:
self._module_run_conf[module_id] = run_conf._asdict()
if module_id in self._tx_module_groups and self._tx_module_groups[
module_id]:
selected_module_id = self._tx_module_groups[module_id][0]
if self.__class__.__name__ in self._conf['modules'][
selected_module_id] and self._conf['modules'][
selected_module_id][self.__class__.__name__] is not None:
self._module_run_conf[module_id] = run_conf._replace(**
self._conf['modules'][selected_module_id][self.
__class__.__name__])._asdict()
|
set_default_cfg
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self._module_cfgs[None] = {'flavor': None, 'chip_address': None, 'FIFO':\n list(set([self._module_cfgs[module_id]['FIFO'] for module_id in\n self._modules])), 'RX': list(set([self._module_cfgs[module_id]['RX'\n ] for module_id in self._modules])), 'rx_channel': list(set([self.\n _module_cfgs[module_id]['rx_channel'] for module_id in self.\n _modules])), 'TX': list(set([self._module_cfgs[module_id]['TX'] for\n module_id in self._modules])), 'tx_channel': list(set([self.\n _module_cfgs[module_id]['tx_channel'] for module_id in self.\n _modules])), 'TDC': list(set([self._module_cfgs[module_id]['TDC'] for\n module_id in self._modules])), 'tdc_channel': list(set([self.\n _module_cfgs[module_id]['tdc_channel'] for module_id in self.\n _modules])), 'TLU': list(set([self._module_cfgs[module_id]['TLU'] for\n module_id in self._modules])), 'configuration': None, 'send_data': None\n }\n tx_groups = groupby_dict({key: value for key, value in self.\n _module_cfgs.items() if key in self._modules}, 'TX')\n for tx, module_group in tx_groups.items():\n flavors = list(set([module_cfg['flavor'] for module_id, module_cfg in\n self._module_cfgs.items() if module_id in module_group]))\n if len(flavors) != 1:\n raise ValueError(\n \"Parameter 'flavor' must be the same for module group TX=%s.\" %\n tx)\n chip_addresses = list(set([module_cfg['chip_address'] for module_id,\n module_cfg in self._module_cfgs.items() if module_id in\n module_group]))\n if len(module_group) != len(chip_addresses) or len(module_group\n ) != 1 and None in chip_addresses:\n raise ValueError(\n \"Parameter 'chip_address' must be different for each module in module group TX=%s.\"\n % tx)\n self._module_cfgs['module_group_TX=' + tx] = {'flavor': flavors[0],\n 'chip_address': None, 'FIFO': list(set([module_cfg['FIFO'] for \n module_id, module_cfg in self._module_cfgs.items() if module_id in\n module_group])), 'RX': list(set([module_cfg['RX'] for module_id,\n module_cfg in self._module_cfgs.items() if module_id in\n module_group])), 'rx_channel': list(set([module_cfg[\n 'rx_channel'] for module_id, module_cfg in self._module_cfgs.\n items() if module_id in module_group])), 'TX': tx, 'tx_channel':\n list(set([module_cfg['tx_channel'] for module_id, module_cfg in\n self._module_cfgs.items() if module_id in module_group])),\n 'TDC': list(set([module_cfg['TDC'] for module_id, module_cfg in\n self._module_cfgs.items() if module_id in module_group])),\n 'tdc_channel': list(set([module_cfg['tdc_channel'] for \n module_id, module_cfg in self._module_cfgs.items() if module_id in\n module_group])), 'TLU': list(set([module_cfg['TLU'] for \n module_id, module_cfg in self._module_cfgs.items() if module_id in\n module_group])), 'configuration': None, 'send_data': None}\n self._tx_module_groups['module_group_TX=' + tx] = module_group\n self._module_attr = {key: {} for key in self._module_cfgs}\n for module_id in self._module_cfgs:\n sc = namedtuple('run_configuration', field_names=self.\n _default_run_conf.keys())\n run_conf = sc(**self._run_conf)\n if (module_id in self._modules and self.__class__.__name__ in self.\n _conf['modules'][module_id] and self._conf['modules'][module_id\n ][self.__class__.__name__] is not None):\n self._module_run_conf[module_id] = run_conf._replace(**self.\n _conf['modules'][module_id][self.__class__.__name__])._asdict()\n else:\n self._module_run_conf[module_id] = run_conf._asdict()\n if module_id in self._tx_module_groups and self._tx_module_groups[\n module_id]:\n selected_module_id = self._tx_module_groups[module_id][0]\n if self.__class__.__name__ in self._conf['modules'][\n selected_module_id] and self._conf['modules'][\n selected_module_id][self.__class__.__name__] is not None:\n self._module_run_conf[module_id] = run_conf._replace(**\n self._conf['modules'][selected_module_id][self.\n __class__.__name__])._asdict()\n",
"role": "user"
},
{
"content": "set_default_cfg",
"role": "assistant"
}
] |
def x(self):
for module_id, module_cfg in self._module_cfgs.items():
if module_id in self._modules or module_id in self._tx_module_groups:
if module_id in self._modules:
module_id_str = 'module ' + module_id
else:
module_id_str = module_id.split('=', 1)
module_id_str[0] = module_id_str[0].replace('_', ' ')
module_id_str = '='.join(module_id_str)
logging.info('Initializing configuration for %s...' % module_id_str
)
if 'scan_parameters' in self._module_run_conf[module_id
] and self._module_run_conf[module_id]['scan_parameters'
] is not None:
if isinstance(self._module_run_conf[module_id][
'scan_parameters'], basestring):
self._module_run_conf[module_id]['scan_parameters'
] = ast.literal_eval(self._module_run_conf[
module_id]['scan_parameters'])
sp = namedtuple('scan_parameters', field_names=zip(*self.
_module_run_conf[module_id]['scan_parameters'])[0])
self._scan_parameters[module_id] = sp(*zip(*self.
_module_run_conf[module_id]['scan_parameters'])[1])
else:
sp = namedtuple_with_defaults('scan_parameters', field_names=[]
)
self._scan_parameters[module_id] = sp()
if module_id in self._modules:
last_configuration = self.get_configuration(module_id=module_id
)
else:
last_configuration = None
if ('configuration' not in module_cfg or module_cfg[
'configuration'] is None
) and last_configuration is None or isinstance(module_cfg[
'configuration'], (int, long)) and module_cfg['configuration'
] <= 0:
if 'chip_address' in module_cfg:
if module_cfg['chip_address'] is None:
chip_address = 0
broadcast = True
else:
chip_address = module_cfg['chip_address']
broadcast = False
else:
raise ValueError(
'Parameter "chip_address" not specified for module "%s".'
% module_id)
if 'flavor' in module_cfg and module_cfg['flavor']:
module_cfg['configuration'] = FEI4Register(fe_type=
module_cfg['flavor'], chip_address=chip_address,
broadcast=broadcast)
else:
raise ValueError(
'Parameter "flavor" not specified for module "%s".' %
module_id)
elif not module_cfg['configuration'] and last_configuration:
module_cfg['configuration'] = FEI4Register(configuration_file
=last_configuration)
elif isinstance(module_cfg['configuration'], basestring):
if os.path.isabs(module_cfg['configuration']):
module_cfg['configuration'] = FEI4Register(
configuration_file=module_cfg['configuration'])
else:
module_cfg['configuration'] = FEI4Register(
configuration_file=os.path.join(module_cfg[
'working_dir'], module_cfg['configuration']))
elif isinstance(module_cfg['configuration'], (int, long)
) and module_cfg['configuration'] > 0:
module_cfg['configuration'] = FEI4Register(configuration_file
=self.get_configuration(module_id=module_id, run_number
=module_cfg['configuration']))
elif not isinstance(module_cfg['configuration'], FEI4Register):
raise ValueError(
'Found no valid value for parameter "configuration" for module "%s".'
% module_id)
self._registers[module_id] = self._module_cfgs[module_id][
'configuration']
self._register_utils[module_id] = FEI4RegisterUtils(self.
_module_dut[module_id], self._module_cfgs[module_id][
'configuration'])
if module_id in self._modules:
module_path = self.get_module_path(module_id)
if not os.path.exists(module_path):
os.makedirs(module_path)
for module_id in self._tx_module_groups:
with self.access_module(module_id=module_id):
self.register_utils.set_conf_mode()
for module_id in self._modules:
logging.info('Configuring %s...' % module_id)
with self.access_module(module_id=module_id):
if self._run_conf['configure_fe']:
self.register_utils.global_reset()
self.register_utils.configure_all()
else:
self.register_utils.set_conf_mode()
if is_fe_ready(self):
fe_not_ready = False
else:
fe_not_ready = True
if self._run_conf['reset_fe']:
self.register_utils.reset_bunch_counter()
self.register_utils.reset_event_counter()
if fe_not_ready:
self.register_utils.reset_service_records()
if not is_fe_ready(self):
logging.warning('Module "%s" is not sending any data.' %
module_id)
self.register_utils.set_conf_mode()
|
init_modules
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n for module_id, module_cfg in self._module_cfgs.items():\n if module_id in self._modules or module_id in self._tx_module_groups:\n if module_id in self._modules:\n module_id_str = 'module ' + module_id\n else:\n module_id_str = module_id.split('=', 1)\n module_id_str[0] = module_id_str[0].replace('_', ' ')\n module_id_str = '='.join(module_id_str)\n logging.info('Initializing configuration for %s...' % module_id_str\n )\n if 'scan_parameters' in self._module_run_conf[module_id\n ] and self._module_run_conf[module_id]['scan_parameters'\n ] is not None:\n if isinstance(self._module_run_conf[module_id][\n 'scan_parameters'], basestring):\n self._module_run_conf[module_id]['scan_parameters'\n ] = ast.literal_eval(self._module_run_conf[\n module_id]['scan_parameters'])\n sp = namedtuple('scan_parameters', field_names=zip(*self.\n _module_run_conf[module_id]['scan_parameters'])[0])\n self._scan_parameters[module_id] = sp(*zip(*self.\n _module_run_conf[module_id]['scan_parameters'])[1])\n else:\n sp = namedtuple_with_defaults('scan_parameters', field_names=[]\n )\n self._scan_parameters[module_id] = sp()\n if module_id in self._modules:\n last_configuration = self.get_configuration(module_id=module_id\n )\n else:\n last_configuration = None\n if ('configuration' not in module_cfg or module_cfg[\n 'configuration'] is None\n ) and last_configuration is None or isinstance(module_cfg[\n 'configuration'], (int, long)) and module_cfg['configuration'\n ] <= 0:\n if 'chip_address' in module_cfg:\n if module_cfg['chip_address'] is None:\n chip_address = 0\n broadcast = True\n else:\n chip_address = module_cfg['chip_address']\n broadcast = False\n else:\n raise ValueError(\n 'Parameter \"chip_address\" not specified for module \"%s\".'\n % module_id)\n if 'flavor' in module_cfg and module_cfg['flavor']:\n module_cfg['configuration'] = FEI4Register(fe_type=\n module_cfg['flavor'], chip_address=chip_address,\n broadcast=broadcast)\n else:\n raise ValueError(\n 'Parameter \"flavor\" not specified for module \"%s\".' %\n module_id)\n elif not module_cfg['configuration'] and last_configuration:\n module_cfg['configuration'] = FEI4Register(configuration_file\n =last_configuration)\n elif isinstance(module_cfg['configuration'], basestring):\n if os.path.isabs(module_cfg['configuration']):\n module_cfg['configuration'] = FEI4Register(\n configuration_file=module_cfg['configuration'])\n else:\n module_cfg['configuration'] = FEI4Register(\n configuration_file=os.path.join(module_cfg[\n 'working_dir'], module_cfg['configuration']))\n elif isinstance(module_cfg['configuration'], (int, long)\n ) and module_cfg['configuration'] > 0:\n module_cfg['configuration'] = FEI4Register(configuration_file\n =self.get_configuration(module_id=module_id, run_number\n =module_cfg['configuration']))\n elif not isinstance(module_cfg['configuration'], FEI4Register):\n raise ValueError(\n 'Found no valid value for parameter \"configuration\" for module \"%s\".'\n % module_id)\n self._registers[module_id] = self._module_cfgs[module_id][\n 'configuration']\n self._register_utils[module_id] = FEI4RegisterUtils(self.\n _module_dut[module_id], self._module_cfgs[module_id][\n 'configuration'])\n if module_id in self._modules:\n module_path = self.get_module_path(module_id)\n if not os.path.exists(module_path):\n os.makedirs(module_path)\n for module_id in self._tx_module_groups:\n with self.access_module(module_id=module_id):\n self.register_utils.set_conf_mode()\n for module_id in self._modules:\n logging.info('Configuring %s...' % module_id)\n with self.access_module(module_id=module_id):\n if self._run_conf['configure_fe']:\n self.register_utils.global_reset()\n self.register_utils.configure_all()\n else:\n self.register_utils.set_conf_mode()\n if is_fe_ready(self):\n fe_not_ready = False\n else:\n fe_not_ready = True\n if self._run_conf['reset_fe']:\n self.register_utils.reset_bunch_counter()\n self.register_utils.reset_event_counter()\n if fe_not_ready:\n self.register_utils.reset_service_records()\n if not is_fe_ready(self):\n logging.warning('Module \"%s\" is not sending any data.' %\n module_id)\n self.register_utils.set_conf_mode()\n",
"role": "user"
},
{
"content": "init_modules",
"role": "assistant"
}
] |
def x(self):
try:
self.dut.close()
except Exception:
logging.warning('Closing DUT was not successful')
else:
logging.debug('Closed DUT')
|
close
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n try:\n self.dut.close()\n except Exception:\n logging.warning('Closing DUT was not successful')\n else:\n logging.debug('Closed DUT')\n",
"role": "user"
},
{
"content": "close",
"role": "assistant"
}
] |
def x(self, data, new_file=False, flush=True):
for i, module_id in enumerate(self._selected_modules):
if data[i] is None:
continue
self._raw_data_files[module_id].append(data_iterable=data[i],
scan_parameters=self._scan_parameters[module_id]._asdict(),
new_file=new_file, flush=flush)
|
handle_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data, new_file=False, flush=True):\n for i, module_id in enumerate(self._selected_modules):\n if data[i] is None:\n continue\n self._raw_data_files[module_id].append(data_iterable=data[i],\n scan_parameters=self._scan_parameters[module_id]._asdict(),\n new_file=new_file, flush=flush)\n",
"role": "user"
},
{
"content": "handle_data",
"role": "assistant"
}
] |
def x(self, exc):
if self.reset_rx_on_error and isinstance(exc[1], (RxSyncError,
EightbTenbError)):
self.fifo_readout.print_readout_status()
self.fifo_readout.reset_rx()
else:
if not self.abort_run.is_set():
self.abort(msg=exc[1].__class__.__name__ + ': ' + str(exc[1]))
self.err_queue.put(exc)
|
handle_err
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, exc):\n if self.reset_rx_on_error and isinstance(exc[1], (RxSyncError,\n EightbTenbError)):\n self.fifo_readout.print_readout_status()\n self.fifo_readout.reset_rx()\n else:\n if not self.abort_run.is_set():\n self.abort(msg=exc[1].__class__.__name__ + ': ' + str(exc[1]))\n self.err_queue.put(exc)\n",
"role": "user"
},
{
"content": "handle_err",
"role": "assistant"
}
] |
def x(self, module_id, run_number=None):
def x(run_number):
module_path = self.get_module_path(module_id)
for root, _, files in os.walk(module_path):
for cfgfile in files:
cfg_root, cfg_ext = os.path.splitext(cfgfile)
if cfg_root.startswith(''.join([str(run_number), '_',
module_id])) and cfg_ext.endswith('.cfg'):
return os.path.join(root, cfgfile)
if not run_number:
run_numbers = sorted(self._get_run_numbers(status='FINISHED').keys(
), reverse=True)
found_fin_run_cfg = True
if not run_numbers:
return None
last_fin_run = run_numbers[0]
for run_number in run_numbers:
cfg_file = find_file(run_number)
if cfg_file:
if not found_fin_run_cfg:
logging.warning(
"Module '%s' has no configuration for run %d, use config of run %d"
, module_id, last_fin_run, run_number)
return cfg_file
else:
found_fin_run_cfg = False
else:
cfg_file = find_file(run_number)
if cfg_file:
return cfg_file
else:
raise ValueError('Found no configuration with run number %s' %
run_number)
|
get_configuration
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, module_id, run_number=None):\n\n def x(run_number):\n module_path = self.get_module_path(module_id)\n for root, _, files in os.walk(module_path):\n for cfgfile in files:\n cfg_root, cfg_ext = os.path.splitext(cfgfile)\n if cfg_root.startswith(''.join([str(run_number), '_',\n module_id])) and cfg_ext.endswith('.cfg'):\n return os.path.join(root, cfgfile)\n if not run_number:\n run_numbers = sorted(self._get_run_numbers(status='FINISHED').keys(\n ), reverse=True)\n found_fin_run_cfg = True\n if not run_numbers:\n return None\n last_fin_run = run_numbers[0]\n for run_number in run_numbers:\n cfg_file = find_file(run_number)\n if cfg_file:\n if not found_fin_run_cfg:\n logging.warning(\n \"Module '%s' has no configuration for run %d, use config of run %d\"\n , module_id, last_fin_run, run_number)\n return cfg_file\n else:\n found_fin_run_cfg = False\n else:\n cfg_file = find_file(run_number)\n if cfg_file:\n return cfg_file\n else:\n raise ValueError('Found no configuration with run number %s' %\n run_number)\n",
"role": "user"
},
{
"content": "get_configuration",
"role": "assistant"
}
] |
def x(self, module_id):
if not isinstance(module_id, basestring) and isinstance(module_id, Iterable
) and set(module_id) - set(self._modules):
raise ValueError('Module IDs invalid:' % ', '.join(set(module_id) -
set(self._modules)))
if isinstance(module_id, basestring
) and module_id not in self._module_cfgs:
raise ValueError('Module ID "%s" is not valid' % module_id)
if self._current_module_handle is not None:
raise RuntimeError(
'Module handle "%s" cannot be set because another module is active'
% module_id)
if module_id is None:
self._selected_modules = self._modules.keys()
elif not isinstance(module_id, basestring) and isinstance(module_id,
Iterable):
self._selected_modules = module_id
elif module_id in self._modules:
self._selected_modules = [module_id]
elif module_id in self._tx_module_groups:
self._selected_modules = self._tx_module_groups[module_id]
else:
RuntimeError('Cannot open files. Module handle "%s" is not valid.' %
self.current_module_handle)
self._selected_fifos = list(set([module_cfg['FIFO'] for name,
module_cfg in self._module_cfgs.items() if name in self.
_selected_modules]))
self._readout_fifos = []
self._filter = []
self._converter = []
for selected_module_id in self._selected_modules:
module_cfg = self._module_cfgs[selected_module_id]
self._readout_fifos.append(module_cfg['FIFO'])
if 'tdc_channel' not in module_cfg:
tdc_filter = false
self._converter.append(None)
elif module_cfg['tdc_channel'] is None:
tdc_filter = is_tdc_word
self._converter.append(convert_tdc_to_channel(channel=
module_cfg['tdc_channel']))
else:
tdc_filter = logical_and(is_tdc_word, is_tdc_from_channel(
module_cfg['tdc_channel']))
self._converter.append(convert_tdc_to_channel(channel=
module_cfg['tdc_channel']))
if 'rx_channel' not in module_cfg:
self._filter.append(logical_or(is_trigger_word, tdc_filter))
elif module_cfg['rx_channel'] is None:
self._filter.append(logical_or(is_trigger_word, logical_or(
tdc_filter, is_fe_word)))
else:
self._filter.append(logical_or(is_trigger_word, logical_or(
tdc_filter, logical_and(is_fe_word, is_data_from_channel(
module_cfg['rx_channel'])))))
self._enabled_fe_channels = list(set([config['RX'] for name, config in
self._module_cfgs.items() if name in self._selected_modules]))
tx_channels = list(set([(1 << config['tx_channel']) for name, config in
self._module_cfgs.items() if name in self._selected_modules]))
if tx_channels:
self.dut['TX']['OUTPUT_ENABLE'] = reduce(lambda x, y: x | y,
tx_channels)
else:
self.dut['TX']['OUTPUT_ENABLE'] = 0
if not isinstance(module_id, basestring) and isinstance(module_id, Iterable
):
self._current_module_handle = None
else:
self._current_module_handle = module_id
if module_id is not None and isinstance(module_id, basestring):
current_thread().name = module_id
|
select_module
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, module_id):\n if not isinstance(module_id, basestring) and isinstance(module_id, Iterable\n ) and set(module_id) - set(self._modules):\n raise ValueError('Module IDs invalid:' % ', '.join(set(module_id) -\n set(self._modules)))\n if isinstance(module_id, basestring\n ) and module_id not in self._module_cfgs:\n raise ValueError('Module ID \"%s\" is not valid' % module_id)\n if self._current_module_handle is not None:\n raise RuntimeError(\n 'Module handle \"%s\" cannot be set because another module is active'\n % module_id)\n if module_id is None:\n self._selected_modules = self._modules.keys()\n elif not isinstance(module_id, basestring) and isinstance(module_id,\n Iterable):\n self._selected_modules = module_id\n elif module_id in self._modules:\n self._selected_modules = [module_id]\n elif module_id in self._tx_module_groups:\n self._selected_modules = self._tx_module_groups[module_id]\n else:\n RuntimeError('Cannot open files. Module handle \"%s\" is not valid.' %\n self.current_module_handle)\n self._selected_fifos = list(set([module_cfg['FIFO'] for name,\n module_cfg in self._module_cfgs.items() if name in self.\n _selected_modules]))\n self._readout_fifos = []\n self._filter = []\n self._converter = []\n for selected_module_id in self._selected_modules:\n module_cfg = self._module_cfgs[selected_module_id]\n self._readout_fifos.append(module_cfg['FIFO'])\n if 'tdc_channel' not in module_cfg:\n tdc_filter = false\n self._converter.append(None)\n elif module_cfg['tdc_channel'] is None:\n tdc_filter = is_tdc_word\n self._converter.append(convert_tdc_to_channel(channel=\n module_cfg['tdc_channel']))\n else:\n tdc_filter = logical_and(is_tdc_word, is_tdc_from_channel(\n module_cfg['tdc_channel']))\n self._converter.append(convert_tdc_to_channel(channel=\n module_cfg['tdc_channel']))\n if 'rx_channel' not in module_cfg:\n self._filter.append(logical_or(is_trigger_word, tdc_filter))\n elif module_cfg['rx_channel'] is None:\n self._filter.append(logical_or(is_trigger_word, logical_or(\n tdc_filter, is_fe_word)))\n else:\n self._filter.append(logical_or(is_trigger_word, logical_or(\n tdc_filter, logical_and(is_fe_word, is_data_from_channel(\n module_cfg['rx_channel'])))))\n self._enabled_fe_channels = list(set([config['RX'] for name, config in\n self._module_cfgs.items() if name in self._selected_modules]))\n tx_channels = list(set([(1 << config['tx_channel']) for name, config in\n self._module_cfgs.items() if name in self._selected_modules]))\n if tx_channels:\n self.dut['TX']['OUTPUT_ENABLE'] = reduce(lambda x, y: x | y,\n tx_channels)\n else:\n self.dut['TX']['OUTPUT_ENABLE'] = 0\n if not isinstance(module_id, basestring) and isinstance(module_id, Iterable\n ):\n self._current_module_handle = None\n else:\n self._current_module_handle = module_id\n if module_id is not None and isinstance(module_id, basestring):\n current_thread().name = module_id\n",
"role": "user"
},
{
"content": "select_module",
"role": "assistant"
}
] |
def x(self):
self._enabled_fe_channels = []
self._readout_fifos = []
self._filter = []
self._converter = []
self.dut['TX']['OUTPUT_ENABLE'] = 0
self._current_module_handle = None
if isinstance(current_thread(), _MainThread):
current_thread().name = 'MainThread'
|
deselect_module
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self._enabled_fe_channels = []\n self._readout_fifos = []\n self._filter = []\n self._converter = []\n self.dut['TX']['OUTPUT_ENABLE'] = 0\n self._current_module_handle = None\n if isinstance(current_thread(), _MainThread):\n current_thread().name = 'MainThread'\n",
"role": "user"
},
{
"content": "deselect_module",
"role": "assistant"
}
] |
def x(self):
if self._scan_threads and self.current_module_handle not in [t.name for
t in self._scan_threads]:
raise RuntimeError('Thread name "%s" is not valid.')
if (self._scan_threads and self.current_module_handle in self.
_curr_sync_threads):
raise RuntimeError('Thread "%s" is already actively reading FIFO.')
with self._sync_lock:
self._curr_sync_threads.append(self.current_module_handle)
self._enter_sync_event.clear()
while not self._enter_sync_event.wait(0.01):
if self.abort_run.is_set():
break
with self._sync_lock:
if len(set(self._curr_sync_threads) & set([t.name for t in self
._scan_threads if t.is_alive()])) == len(set([t.name for t in
self._scan_threads if t.is_alive()])
) or not self._scan_threads:
self._enter_sync_event.set()
|
enter_sync
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self._scan_threads and self.current_module_handle not in [t.name for\n t in self._scan_threads]:\n raise RuntimeError('Thread name \"%s\" is not valid.')\n if (self._scan_threads and self.current_module_handle in self.\n _curr_sync_threads):\n raise RuntimeError('Thread \"%s\" is already actively reading FIFO.')\n with self._sync_lock:\n self._curr_sync_threads.append(self.current_module_handle)\n self._enter_sync_event.clear()\n while not self._enter_sync_event.wait(0.01):\n if self.abort_run.is_set():\n break\n with self._sync_lock:\n if len(set(self._curr_sync_threads) & set([t.name for t in self\n ._scan_threads if t.is_alive()])) == len(set([t.name for t in\n self._scan_threads if t.is_alive()])\n ) or not self._scan_threads:\n self._enter_sync_event.set()\n",
"role": "user"
},
{
"content": "enter_sync",
"role": "assistant"
}
] |
def x(self):
if self._scan_threads and self.current_module_handle not in [t.name for
t in self._scan_threads]:
raise RuntimeError('Thread name "%s" is not valid.')
if (self._scan_threads and self.current_module_handle not in self.
_curr_sync_threads):
raise RuntimeError('Thread "%s" is not reading FIFO.')
with self._sync_lock:
self._curr_sync_threads.remove(self.current_module_handle)
self._exit_sync_event.clear()
while not self._exit_sync_event.wait(0.01):
if self.abort_run.is_set():
break
with self._sync_lock:
if len(set(self._curr_sync_threads) & set([t.name for t in self
._scan_threads if t.is_alive()])
) == 0 or not self._scan_threads:
self._exit_sync_event.set()
|
exit_sync
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self._scan_threads and self.current_module_handle not in [t.name for\n t in self._scan_threads]:\n raise RuntimeError('Thread name \"%s\" is not valid.')\n if (self._scan_threads and self.current_module_handle not in self.\n _curr_sync_threads):\n raise RuntimeError('Thread \"%s\" is not reading FIFO.')\n with self._sync_lock:\n self._curr_sync_threads.remove(self.current_module_handle)\n self._exit_sync_event.clear()\n while not self._exit_sync_event.wait(0.01):\n if self.abort_run.is_set():\n break\n with self._sync_lock:\n if len(set(self._curr_sync_threads) & set([t.name for t in self\n ._scan_threads if t.is_alive()])\n ) == 0 or not self._scan_threads:\n self._exit_sync_event.set()\n",
"role": "user"
},
{
"content": "exit_sync",
"role": "assistant"
}
] |
def x(self, *args, **kwargs):
timeout = kwargs.pop('timeout', 10.0)
self.start_readout(*args, **kwargs)
try:
yield
finally:
try:
self.stop_readout(timeout=timeout)
except Exception:
if self.abort_run.is_set():
with self._readout_lock:
if self.fifo_readout.is_running:
self.fifo_readout.stop(timeout=0.0)
|
readout
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, *args, **kwargs):\n timeout = kwargs.pop('timeout', 10.0)\n self.start_readout(*args, **kwargs)\n try:\n yield\n finally:\n try:\n self.stop_readout(timeout=timeout)\n except Exception:\n if self.abort_run.is_set():\n with self._readout_lock:\n if self.fifo_readout.is_running:\n self.fifo_readout.stop(timeout=0.0)\n",
"role": "user"
},
{
"content": "readout",
"role": "assistant"
}
] |
def x(self, *args, **kwargs):
callback = kwargs.pop('callback', self.handle_data)
errback = kwargs.pop('errback', self.handle_err)
reset_rx = kwargs.pop('reset_rx', True)
reset_fifo = kwargs.pop('reset_fifo', True)
fill_buffer = kwargs.pop('fill_buffer', False)
no_data_timeout = kwargs.pop('no_data_timeout', None)
enabled_fe_channels = kwargs.pop('enabled_fe_channels', self.
_enabled_fe_channels)
if args or kwargs:
self.set_scan_parameters(*args, **kwargs)
if self._scan_threads and self.current_module_handle not in [t.name for
t in self._scan_threads]:
raise RuntimeError('Thread name "%s" is not valid.' % t.name)
if (self._scan_threads and self.current_module_handle in self.
_curr_readout_threads):
raise RuntimeError('Thread "%s" is already actively reading FIFO.')
with self._readout_lock:
self._curr_readout_threads.append(self.current_module_handle)
self._starting_readout_event.clear()
while not self._starting_readout_event.wait(0.01):
if self.abort_run.is_set():
break
with self._readout_lock:
if len(set(self._curr_readout_threads) & set([t.name for t in
self._scan_threads if t.is_alive()])) == len(set([t.name for
t in self._scan_threads if t.is_alive()])
) or not self._scan_threads:
if not self.fifo_readout.is_running:
self.fifo_readout.start(fifos=self._selected_fifos,
callback=callback, errback=errback, reset_rx=
reset_rx, reset_fifo=reset_fifo, fill_buffer=
fill_buffer, no_data_timeout=no_data_timeout,
filter_func=self._filter, converter_func=self.
_converter, fifo_select=self._readout_fifos,
enabled_fe_channels=enabled_fe_channels)
self._starting_readout_event.set()
|
start_readout
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, *args, **kwargs):\n callback = kwargs.pop('callback', self.handle_data)\n errback = kwargs.pop('errback', self.handle_err)\n reset_rx = kwargs.pop('reset_rx', True)\n reset_fifo = kwargs.pop('reset_fifo', True)\n fill_buffer = kwargs.pop('fill_buffer', False)\n no_data_timeout = kwargs.pop('no_data_timeout', None)\n enabled_fe_channels = kwargs.pop('enabled_fe_channels', self.\n _enabled_fe_channels)\n if args or kwargs:\n self.set_scan_parameters(*args, **kwargs)\n if self._scan_threads and self.current_module_handle not in [t.name for\n t in self._scan_threads]:\n raise RuntimeError('Thread name \"%s\" is not valid.' % t.name)\n if (self._scan_threads and self.current_module_handle in self.\n _curr_readout_threads):\n raise RuntimeError('Thread \"%s\" is already actively reading FIFO.')\n with self._readout_lock:\n self._curr_readout_threads.append(self.current_module_handle)\n self._starting_readout_event.clear()\n while not self._starting_readout_event.wait(0.01):\n if self.abort_run.is_set():\n break\n with self._readout_lock:\n if len(set(self._curr_readout_threads) & set([t.name for t in\n self._scan_threads if t.is_alive()])) == len(set([t.name for\n t in self._scan_threads if t.is_alive()])\n ) or not self._scan_threads:\n if not self.fifo_readout.is_running:\n self.fifo_readout.start(fifos=self._selected_fifos,\n callback=callback, errback=errback, reset_rx=\n reset_rx, reset_fifo=reset_fifo, fill_buffer=\n fill_buffer, no_data_timeout=no_data_timeout,\n filter_func=self._filter, converter_func=self.\n _converter, fifo_select=self._readout_fifos,\n enabled_fe_channels=enabled_fe_channels)\n self._starting_readout_event.set()\n",
"role": "user"
},
{
"content": "start_readout",
"role": "assistant"
}
] |
def x(self, timeout=10.0):
if self._scan_threads and self.current_module_handle not in [t.name for
t in self._scan_threads]:
raise RuntimeError('Thread name "%s" is not valid.')
if (self._scan_threads and self.current_module_handle not in self.
_curr_readout_threads):
raise RuntimeError('Thread "%s" is not reading FIFO.')
with self._readout_lock:
self._curr_readout_threads.remove(self.current_module_handle)
self._stopping_readout_event.clear()
while not self._stopping_readout_event.wait(0.01):
with self._readout_lock:
if len(set(self._curr_readout_threads) & set([t.name for t in
self._scan_threads if t.is_alive()])
) == 0 or not self._scan_threads or self.abort_run.is_set():
if self.fifo_readout.is_running:
self.fifo_readout.stop(timeout=timeout)
self._stopping_readout_event.set()
|
stop_readout
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, timeout=10.0):\n if self._scan_threads and self.current_module_handle not in [t.name for\n t in self._scan_threads]:\n raise RuntimeError('Thread name \"%s\" is not valid.')\n if (self._scan_threads and self.current_module_handle not in self.\n _curr_readout_threads):\n raise RuntimeError('Thread \"%s\" is not reading FIFO.')\n with self._readout_lock:\n self._curr_readout_threads.remove(self.current_module_handle)\n self._stopping_readout_event.clear()\n while not self._stopping_readout_event.wait(0.01):\n with self._readout_lock:\n if len(set(self._curr_readout_threads) & set([t.name for t in\n self._scan_threads if t.is_alive()])\n ) == 0 or not self._scan_threads or self.abort_run.is_set():\n if self.fifo_readout.is_running:\n self.fifo_readout.stop(timeout=timeout)\n self._stopping_readout_event.set()\n",
"role": "user"
},
{
"content": "stop_readout",
"role": "assistant"
}
] |
def x(max_tdc, tdc_calibration_values, tdc_pixel_calibration):
charge_calibration = np.zeros(shape=(80, 336, max_tdc))
for column in range(80):
for row in range(336):
actual_pixel_calibration = tdc_pixel_calibration[column, row, :]
if np.any(actual_pixel_calibration != 0) and np.any(np.isfinite
(actual_pixel_calibration)):
selected_measurements = np.isfinite(actual_pixel_calibration)
selected_actual_pixel_calibration = actual_pixel_calibration[
selected_measurements]
selected_tdc_calibration_values = tdc_calibration_values[
selected_measurements]
interpolation = interp1d(x=
selected_actual_pixel_calibration, y=
selected_tdc_calibration_values, kind='slinear',
bounds_error=False, fill_value=0)
charge_calibration[column, row, :] = interpolation(np.
arange(max_tdc))
return charge_calibration
|
get_charge
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(max_tdc, tdc_calibration_values, tdc_pixel_calibration):\n charge_calibration = np.zeros(shape=(80, 336, max_tdc))\n for column in range(80):\n for row in range(336):\n actual_pixel_calibration = tdc_pixel_calibration[column, row, :]\n if np.any(actual_pixel_calibration != 0) and np.any(np.isfinite\n (actual_pixel_calibration)):\n selected_measurements = np.isfinite(actual_pixel_calibration)\n selected_actual_pixel_calibration = actual_pixel_calibration[\n selected_measurements]\n selected_tdc_calibration_values = tdc_calibration_values[\n selected_measurements]\n interpolation = interp1d(x=\n selected_actual_pixel_calibration, y=\n selected_tdc_calibration_values, kind='slinear',\n bounds_error=False, fill_value=0)\n charge_calibration[column, row, :] = interpolation(np.\n arange(max_tdc))\n return charge_calibration\n",
"role": "user"
},
{
"content": "get_charge",
"role": "assistant"
}
] |
def x(calibation_file, max_tdc):
with tb.open_file(calibation_file, mode='r') as in_file_calibration_h5:
tdc_calibration = in_file_calibration_h5.root.HitOrCalibration[:, :,
:, 1]
tdc_calibration_values = (in_file_calibration_h5.root.
HitOrCalibration.attrs.scan_parameter_values[:])
return get_charge(max_tdc, tdc_calibration_values, tdc_calibration)
|
get_charge_calibration
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(calibation_file, max_tdc):\n with tb.open_file(calibation_file, mode='r') as in_file_calibration_h5:\n tdc_calibration = in_file_calibration_h5.root.HitOrCalibration[:, :,\n :, 1]\n tdc_calibration_values = (in_file_calibration_h5.root.\n HitOrCalibration.attrs.scan_parameter_values[:])\n return get_charge(max_tdc, tdc_calibration_values, tdc_calibration)\n",
"role": "user"
},
{
"content": "get_charge_calibration",
"role": "assistant"
}
] |
def x(self):
export_icon = QtGui.QIcon()
pixmap = QtGui.QPixmap(os.path.join(PLUGINSDIR,
'csv/icons/document-export.png'))
export_icon.addPixmap(pixmap, QtGui.QIcon.Normal, QtGui.QIcon.On)
self.plot_action = QtGui.QAction(translate('PlotpyBARdata',
'Plot data with pyBAR plugin', 'Plot data with pyBAR plugin'), self,
shortcut=QtGui.QKeySequence.UnknownKey, triggered=self.plot, icon=
export_icon, statusTip=translate('PlotpyBARdata',
'Plotting of selected data with pyBAR',
'Status bar text for the Dataset -> Plot pyBAR data... action'))
menu = self.vtgui.dataset_menu
menu.addSeparator()
menu.addAction(self.plot_action)
cmenu = self.vtgui.leaf_node_cm
cmenu.addSeparator()
cmenu.addAction(self.plot_action)
|
addEntry
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n export_icon = QtGui.QIcon()\n pixmap = QtGui.QPixmap(os.path.join(PLUGINSDIR,\n 'csv/icons/document-export.png'))\n export_icon.addPixmap(pixmap, QtGui.QIcon.Normal, QtGui.QIcon.On)\n self.plot_action = QtGui.QAction(translate('PlotpyBARdata',\n 'Plot data with pyBAR plugin', 'Plot data with pyBAR plugin'), self,\n shortcut=QtGui.QKeySequence.UnknownKey, triggered=self.plot, icon=\n export_icon, statusTip=translate('PlotpyBARdata',\n 'Plotting of selected data with pyBAR',\n 'Status bar text for the Dataset -> Plot pyBAR data... action'))\n menu = self.vtgui.dataset_menu\n menu.addSeparator()\n menu.addAction(self.plot_action)\n cmenu = self.vtgui.leaf_node_cm\n cmenu.addSeparator()\n cmenu.addAction(self.plot_action)\n",
"role": "user"
},
{
"content": "addEntry",
"role": "assistant"
}
] |
def x(self):
enabled = True
current = self.vtgui.dbs_tree_view.currentIndex()
if current:
leaf = self.vtgui.dbs_tree_model.nodeFromIndex(current)
if leaf.node_kind in (u'group', u'root group'):
enabled = False
self.plot_action.setEnabled(enabled)
|
updateDatasetMenu
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n enabled = True\n current = self.vtgui.dbs_tree_view.currentIndex()\n if current:\n leaf = self.vtgui.dbs_tree_model.nodeFromIndex(current)\n if leaf.node_kind in (u'group', u'root group'):\n enabled = False\n self.plot_action.setEnabled(enabled)\n",
"role": "user"
},
{
"content": "updateDatasetMenu",
"role": "assistant"
}
] |
def x(self):
about_text = translate('pyBarPlugin',
"""<qt>
<p>Data plotting plug-in for pyBAR.
</qt>"""
, 'About')
descr = dict(module_name='pyBarPlugin', folder=PLUGINSDIR, version=
__version__, plugin_name='pyBarPlugin', author=
'David-Leon Pohl <david-leon.pohl@rub.de>, Jens Janssen <janssen@physik.uni-bonn.de>'
, descr=about_text)
return descr
|
helpAbout
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n about_text = translate('pyBarPlugin',\n \"\"\"<qt>\n <p>Data plotting plug-in for pyBAR.\n </qt>\"\"\"\n , 'About')\n descr = dict(module_name='pyBarPlugin', folder=PLUGINSDIR, version=\n __version__, plugin_name='pyBarPlugin', author=\n 'David-Leon Pohl <david-leon.pohl@rub.de>, Jens Janssen <janssen@physik.uni-bonn.de>'\n , descr=about_text)\n return descr\n",
"role": "user"
},
{
"content": "helpAbout",
"role": "assistant"
}
] |
def x(socket, conf, name):
meta_data = dict(name=name, conf=conf)
try:
socket.send_json(meta_data, flags=zmq.NOBLOCK)
except zmq.Again:
pass
|
send_meta_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(socket, conf, name):\n meta_data = dict(name=name, conf=conf)\n try:\n socket.send_json(meta_data, flags=zmq.NOBLOCK)\n except zmq.Again:\n pass\n",
"role": "user"
},
{
"content": "send_meta_data",
"role": "assistant"
}
] |
def x(socket, data, scan_parameters={}, name='ReadoutData'):
if not scan_parameters:
scan_parameters = {}
data_meta_data = dict(name=name, dtype=str(data[0].dtype), shape=data[0
].shape, timestamp_start=data[1], timestamp_stop=data[2],
readout_error=data[3], scan_parameters=scan_parameters)
try:
socket.send_json(data_meta_data, flags=zmq.SNDMORE | zmq.NOBLOCK)
socket.send(data[0], flags=zmq.NOBLOCK)
except zmq.Again:
pass
|
send_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(socket, data, scan_parameters={}, name='ReadoutData'):\n if not scan_parameters:\n scan_parameters = {}\n data_meta_data = dict(name=name, dtype=str(data[0].dtype), shape=data[0\n ].shape, timestamp_start=data[1], timestamp_stop=data[2],\n readout_error=data[3], scan_parameters=scan_parameters)\n try:\n socket.send_json(data_meta_data, flags=zmq.SNDMORE | zmq.NOBLOCK)\n socket.send(data[0], flags=zmq.NOBLOCK)\n except zmq.Again:\n pass\n",
"role": "user"
},
{
"content": "send_data",
"role": "assistant"
}
] |
def x(filename, mode='w', title='', scan_parameters=None, socket_address=None):
return RawDataFile(filename=filename, mode=mode, title=title,
scan_parameters=scan_parameters, socket_address=socket_address)
|
open_raw_data_file
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(filename, mode='w', title='', scan_parameters=None, socket_address=None):\n return RawDataFile(filename=filename, mode=mode, title=title,\n scan_parameters=scan_parameters, socket_address=socket_address)\n",
"role": "user"
},
{
"content": "open_raw_data_file",
"role": "assistant"
}
] |
def x(data_queue, filename, mode='a', title='', scan_parameters=None):
if not scan_parameters:
scan_parameters = {}
with open_raw_data_file(filename, mode='a', title='', scan_parameters=
list(dict.iterkeys(scan_parameters))) as raw_data_file:
raw_data_file.append(data_queue, scan_parameters=scan_parameters)
|
save_raw_data_from_data_queue
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(data_queue, filename, mode='a', title='', scan_parameters=None):\n if not scan_parameters:\n scan_parameters = {}\n with open_raw_data_file(filename, mode='a', title='', scan_parameters=\n list(dict.iterkeys(scan_parameters))) as raw_data_file:\n raw_data_file.append(data_queue, scan_parameters=scan_parameters)\n",
"role": "user"
},
{
"content": "save_raw_data_from_data_queue",
"role": "assistant"
}
] |
def x(self):
for iteration in range(0, self.global_iterations):
if self.stop_run.is_set():
break
logging.info('Global tuning step %d / %d', iteration + 1, self.
global_iterations)
self.set_scan_parameters(global_step=self.scan_parameters.
global_step + 1)
GdacTuning.scan(self)
commands = []
commands.extend(self.register.get_commands('ConfMode'))
commands.extend(self.register.get_commands('WrRegister', name=[
'Vthin_AltCoarse', 'Vthin_AltFine']))
commands.extend(self.register.get_commands('RunMode'))
self.register_utils.send_commands(commands)
if self.stop_run.is_set():
break
self.set_scan_parameters(global_step=self.scan_parameters.
global_step + 1)
FeedbackTuning.scan(self)
commands = []
commands.extend(self.register.get_commands('ConfMode'))
commands.extend(self.register.get_commands('WrRegister', name=[
'PrmpVbpf']))
commands.extend(self.register.get_commands('RunMode'))
self.register_utils.send_commands(commands)
if self.global_iterations >= 0 and not self.stop_run.is_set():
self.set_scan_parameters(global_step=self.scan_parameters.
global_step + 1)
GdacTuning.scan(self)
commands = []
commands.extend(self.register.get_commands('ConfMode'))
commands.extend(self.register.get_commands('WrRegister', name=[
'Vthin_AltCoarse', 'Vthin_AltFine']))
commands.extend(self.register.get_commands('RunMode'))
self.register_utils.send_commands(commands)
Vthin_AC = self.register.get_global_register_value('Vthin_AltCoarse')
Vthin_AF = self.register.get_global_register_value('Vthin_AltFine')
PrmpVbpf = self.register.get_global_register_value('PrmpVbpf')
logging.info(
'Results of global threshold tuning: Vthin_AltCoarse / Vthin_AltFine = %d / %d'
, Vthin_AC, Vthin_AF)
logging.info('Results of global feedback tuning: PrmpVbpf = %d',
PrmpVbpf)
for iteration in range(0, self.local_iterations):
if self.stop_run.is_set():
break
logging.info('Local tuning step %d / %d', iteration + 1, self.
local_iterations)
self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1
)
TdacTuning.scan(self)
commands = []
commands.extend(self.register.get_commands('ConfMode'))
commands.extend(self.register.get_commands('WrFrontEnd',
same_mask_for_all_dc=False, name='TDAC'))
commands.extend(self.register.get_commands('RunMode'))
self.register_utils.send_commands(commands)
if self.stop_run.is_set():
break
self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1
)
FdacTuning.scan(self)
commands = []
commands.extend(self.register.get_commands('ConfMode'))
commands.extend(self.register.get_commands('WrFrontEnd',
same_mask_for_all_dc=False, name='FDAC'))
commands.extend(self.register.get_commands('RunMode'))
self.register_utils.send_commands(commands)
if self.local_iterations >= 0 and not self.stop_run.is_set():
self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1
)
TdacTuning.scan(self)
commands = []
commands.extend(self.register.get_commands('ConfMode'))
commands.extend(self.register.get_commands('WrFrontEnd',
same_mask_for_all_dc=False, name='TDAC'))
commands.extend(self.register.get_commands('RunMode'))
self.register_utils.send_commands(commands)
|
scan
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n for iteration in range(0, self.global_iterations):\n if self.stop_run.is_set():\n break\n logging.info('Global tuning step %d / %d', iteration + 1, self.\n global_iterations)\n self.set_scan_parameters(global_step=self.scan_parameters.\n global_step + 1)\n GdacTuning.scan(self)\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n commands.extend(self.register.get_commands('WrRegister', name=[\n 'Vthin_AltCoarse', 'Vthin_AltFine']))\n commands.extend(self.register.get_commands('RunMode'))\n self.register_utils.send_commands(commands)\n if self.stop_run.is_set():\n break\n self.set_scan_parameters(global_step=self.scan_parameters.\n global_step + 1)\n FeedbackTuning.scan(self)\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n commands.extend(self.register.get_commands('WrRegister', name=[\n 'PrmpVbpf']))\n commands.extend(self.register.get_commands('RunMode'))\n self.register_utils.send_commands(commands)\n if self.global_iterations >= 0 and not self.stop_run.is_set():\n self.set_scan_parameters(global_step=self.scan_parameters.\n global_step + 1)\n GdacTuning.scan(self)\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n commands.extend(self.register.get_commands('WrRegister', name=[\n 'Vthin_AltCoarse', 'Vthin_AltFine']))\n commands.extend(self.register.get_commands('RunMode'))\n self.register_utils.send_commands(commands)\n Vthin_AC = self.register.get_global_register_value('Vthin_AltCoarse')\n Vthin_AF = self.register.get_global_register_value('Vthin_AltFine')\n PrmpVbpf = self.register.get_global_register_value('PrmpVbpf')\n logging.info(\n 'Results of global threshold tuning: Vthin_AltCoarse / Vthin_AltFine = %d / %d'\n , Vthin_AC, Vthin_AF)\n logging.info('Results of global feedback tuning: PrmpVbpf = %d',\n PrmpVbpf)\n for iteration in range(0, self.local_iterations):\n if self.stop_run.is_set():\n break\n logging.info('Local tuning step %d / %d', iteration + 1, self.\n local_iterations)\n self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1\n )\n TdacTuning.scan(self)\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n commands.extend(self.register.get_commands('WrFrontEnd',\n same_mask_for_all_dc=False, name='TDAC'))\n commands.extend(self.register.get_commands('RunMode'))\n self.register_utils.send_commands(commands)\n if self.stop_run.is_set():\n break\n self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1\n )\n FdacTuning.scan(self)\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n commands.extend(self.register.get_commands('WrFrontEnd',\n same_mask_for_all_dc=False, name='FDAC'))\n commands.extend(self.register.get_commands('RunMode'))\n self.register_utils.send_commands(commands)\n if self.local_iterations >= 0 and not self.stop_run.is_set():\n self.set_scan_parameters(local_step=self.scan_parameters.local_step + 1\n )\n TdacTuning.scan(self)\n commands = []\n commands.extend(self.register.get_commands('ConfMode'))\n commands.extend(self.register.get_commands('WrFrontEnd',\n same_mask_for_all_dc=False, name='TDAC'))\n commands.extend(self.register.get_commands('RunMode'))\n self.register_utils.send_commands(commands)\n",
"role": "user"
},
{
"content": "scan",
"role": "assistant"
}
] |
def x(x, y, x_err=None, y_err=None, title=None, point_label=None, legend=
None, plot_range=None, plot_range_y=None, x_label=None, y_label=None,
y_2_label=None, log_x=False, log_y=False, size=None, filename=None):
fig = Figure()
FigureCanvas(fig)
ax = fig.add_subplot(111)
if x_err is not None:
x_err = [x_err, x_err]
if y_err is not None:
y_err = [y_err, y_err]
ax.set_title(title)
if y_label is not None:
ax.set_ylabel(y_label)
if log_x:
ax.set_xscale('log')
if log_y:
ax.set_yscale('log')
if plot_range:
ax.set_xlim((min(plot_range), max(plot_range)))
if plot_range_y:
ax.set_ylim((min(plot_range_y), max(plot_range_y)))
if legend:
fig.legend(legend, 0)
ax.grid(True)
ax.errorbar(x, y, xerr=x_err, yerr=y_err, fmt='o', color='black')
if point_label is not None:
for X, Y, Z in zip(x, y, point_label):
ax.annotate('{}'.format(Z), xy=(X, Y), xytext=(-5, 5), ha=
'right', textcoords='offset points')
line_fit, _ = np.polyfit(x, y, 1, full=False, cov=True)
fit_fn = np.poly1d(line_fit)
ax.plot(x, fit_fn(x), '-', lw=2, color='gray')
setp(ax.get_xticklabels(), visible=False)
divider = make_axes_locatable(ax)
ax_bottom_plot = divider.append_axes('bottom', 2.0, pad=0.0, sharex=ax)
ax_bottom_plot.bar(x, y - fit_fn(x), align='center', width=np.amin(np.
diff(x)) / 2, color='gray')
ax_bottom_plot.grid(True)
if x_label is not None:
ax.set_xlabel(x_label)
if y_2_label is not None:
ax.set_ylabel(y_2_label)
ax.set_ylim(-np.amax(np.abs(y - fit_fn(x))), np.amax(np.abs(y - fit_fn(x)))
)
ax.plot(ax.set_xlim(), [0, 0], '-', color='black')
setp(ax_bottom_plot.get_yticklabels()[-2:-1], visible=False)
if size is not None:
fig.set_size_inches(size)
if not filename:
fig.show()
elif isinstance(filename, PdfPages):
filename.savefig(fig)
elif filename:
fig.savefig(filename, bbox_inches='tight')
return fig
|
plot_linear_relation
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(x, y, x_err=None, y_err=None, title=None, point_label=None, legend=\n None, plot_range=None, plot_range_y=None, x_label=None, y_label=None,\n y_2_label=None, log_x=False, log_y=False, size=None, filename=None):\n fig = Figure()\n FigureCanvas(fig)\n ax = fig.add_subplot(111)\n if x_err is not None:\n x_err = [x_err, x_err]\n if y_err is not None:\n y_err = [y_err, y_err]\n ax.set_title(title)\n if y_label is not None:\n ax.set_ylabel(y_label)\n if log_x:\n ax.set_xscale('log')\n if log_y:\n ax.set_yscale('log')\n if plot_range:\n ax.set_xlim((min(plot_range), max(plot_range)))\n if plot_range_y:\n ax.set_ylim((min(plot_range_y), max(plot_range_y)))\n if legend:\n fig.legend(legend, 0)\n ax.grid(True)\n ax.errorbar(x, y, xerr=x_err, yerr=y_err, fmt='o', color='black')\n if point_label is not None:\n for X, Y, Z in zip(x, y, point_label):\n ax.annotate('{}'.format(Z), xy=(X, Y), xytext=(-5, 5), ha=\n 'right', textcoords='offset points')\n line_fit, _ = np.polyfit(x, y, 1, full=False, cov=True)\n fit_fn = np.poly1d(line_fit)\n ax.plot(x, fit_fn(x), '-', lw=2, color='gray')\n setp(ax.get_xticklabels(), visible=False)\n divider = make_axes_locatable(ax)\n ax_bottom_plot = divider.append_axes('bottom', 2.0, pad=0.0, sharex=ax)\n ax_bottom_plot.bar(x, y - fit_fn(x), align='center', width=np.amin(np.\n diff(x)) / 2, color='gray')\n ax_bottom_plot.grid(True)\n if x_label is not None:\n ax.set_xlabel(x_label)\n if y_2_label is not None:\n ax.set_ylabel(y_2_label)\n ax.set_ylim(-np.amax(np.abs(y - fit_fn(x))), np.amax(np.abs(y - fit_fn(x)))\n )\n ax.plot(ax.set_xlim(), [0, 0], '-', color='black')\n setp(ax_bottom_plot.get_yticklabels()[-2:-1], visible=False)\n if size is not None:\n fig.set_size_inches(size)\n if not filename:\n fig.show()\n elif isinstance(filename, PdfPages):\n filename.savefig(fig)\n elif filename:\n fig.savefig(filename, bbox_inches='tight')\n return fig\n",
"role": "user"
},
{
"content": "plot_linear_relation",
"role": "assistant"
}
] |
def x(x, y, n_bins=100, title=None, x_label=None, y_label=None, log_y=False,
filename=None):
if len(x) != len(y):
raise ValueError('x and y dimensions have to be the same')
n, bin_edges = np.histogram(x, bins=n_bins)
sy = np.histogram(x, bins=n_bins, weights=y)[0]
sy2 = np.histogram(x, bins=n_bins, weights=y * y)[0]
bin_centers = (bin_edges[1:] + bin_edges[:-1]) / 2
mean = sy / n
std = np.sqrt(sy2 / n - mean * mean)
std_mean = std / np.sqrt(n - 1)
mean[np.isnan(mean)] = 0.0
std_mean[np.isnan(std_mean)] = 0.0
fig = Figure()
FigureCanvas(fig)
ax = fig.add_subplot(111)
ax.errorbar(bin_centers, mean, yerr=std_mean, fmt='o')
ax.set_title(title)
if x_label is not None:
ax.set_xlabel(x_label)
if y_label is not None:
ax.set_ylabel(y_label)
if log_y:
ax.yscale('log')
ax.grid(True)
if not filename:
fig.show()
elif isinstance(filename, PdfPages):
filename.savefig(fig)
else:
fig.savefig(filename)
|
plot_profile_histogram
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(x, y, n_bins=100, title=None, x_label=None, y_label=None, log_y=False,\n filename=None):\n if len(x) != len(y):\n raise ValueError('x and y dimensions have to be the same')\n n, bin_edges = np.histogram(x, bins=n_bins)\n sy = np.histogram(x, bins=n_bins, weights=y)[0]\n sy2 = np.histogram(x, bins=n_bins, weights=y * y)[0]\n bin_centers = (bin_edges[1:] + bin_edges[:-1]) / 2\n mean = sy / n\n std = np.sqrt(sy2 / n - mean * mean)\n std_mean = std / np.sqrt(n - 1)\n mean[np.isnan(mean)] = 0.0\n std_mean[np.isnan(std_mean)] = 0.0\n fig = Figure()\n FigureCanvas(fig)\n ax = fig.add_subplot(111)\n ax.errorbar(bin_centers, mean, yerr=std_mean, fmt='o')\n ax.set_title(title)\n if x_label is not None:\n ax.set_xlabel(x_label)\n if y_label is not None:\n ax.set_ylabel(y_label)\n if log_y:\n ax.yscale('log')\n ax.grid(True)\n if not filename:\n fig.show()\n elif isinstance(filename, PdfPages):\n filename.savefig(fig)\n else:\n fig.savefig(filename)\n",
"role": "user"
},
{
"content": "plot_profile_histogram",
"role": "assistant"
}
] |
def x(number, multiple):
multiple = int(multiple)
if multiple == 0:
multiple = 1
ceil_mod_number = number - number % -multiple
return int(ceil_mod_number)
|
round_to_multiple
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(number, multiple):\n multiple = int(multiple)\n if multiple == 0:\n multiple = 1\n ceil_mod_number = number - number % -multiple\n return int(ceil_mod_number)\n",
"role": "user"
},
{
"content": "round_to_multiple",
"role": "assistant"
}
] |
def x(hist, prob=(0.05, 0.95), return_indices=False, copy=True):
hist_t = np.array(hist)
cdf = np.cumsum(hist_t)
if cdf[-1] == 0:
normcdf = cdf.astype('float')
else:
normcdf = cdf.astype('float') / cdf[-1]
unormcdf, indices = np.unique(normcdf, return_index=True)
try:
hp = np.where(unormcdf > prob[1])[0][0]
lp = np.where(unormcdf >= prob[0])[0][0]
except IndexError:
hp_index = hist_t.shape[0]
lp_index = 0
else:
hp_index = indices[hp]
lp_index = indices[lp]
masked_hist = np.ma.array(hist, copy=copy, mask=True)
masked_hist.mask[lp_index:hp_index + 1] = False
if return_indices:
return masked_hist, (lp_index, hp_index)
else:
return masked_hist
|
hist_quantiles
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hist, prob=(0.05, 0.95), return_indices=False, copy=True):\n hist_t = np.array(hist)\n cdf = np.cumsum(hist_t)\n if cdf[-1] == 0:\n normcdf = cdf.astype('float')\n else:\n normcdf = cdf.astype('float') / cdf[-1]\n unormcdf, indices = np.unique(normcdf, return_index=True)\n try:\n hp = np.where(unormcdf > prob[1])[0][0]\n lp = np.where(unormcdf >= prob[0])[0][0]\n except IndexError:\n hp_index = hist_t.shape[0]\n lp_index = 0\n else:\n hp_index = indices[hp]\n lp_index = indices[lp]\n masked_hist = np.ma.array(hist, copy=copy, mask=True)\n masked_hist.mask[lp_index:hp_index + 1] = False\n if return_indices:\n return masked_hist, (lp_index, hp_index)\n else:\n return masked_hist\n",
"role": "user"
},
{
"content": "hist_quantiles",
"role": "assistant"
}
] |
def x(hist, return_index=False, copy=True):
hist_t = np.array(hist)
index = np.where(hist_t)[-1][-1] + 1 if np.sum(hist_t
) > 1 else hist_t.shape[0]
masked_hist = np.ma.array(hist, copy=copy, mask=True)
masked_hist.mask[index:] = False
if return_index:
return masked_hist, index
else:
return masked_hist
|
hist_last_nonzero
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(hist, return_index=False, copy=True):\n hist_t = np.array(hist)\n index = np.where(hist_t)[-1][-1] + 1 if np.sum(hist_t\n ) > 1 else hist_t.shape[0]\n masked_hist = np.ma.array(hist, copy=copy, mask=True)\n masked_hist.mask[index:] = False\n if return_index:\n return masked_hist, index\n else:\n return masked_hist\n",
"role": "user"
},
{
"content": "hist_last_nonzero",
"role": "assistant"
}
] |
def x(self, fifo, no_data_timeout=None):
logging.info('Starting readout thread for %s', fifo)
time_last_data = time()
time_wait = 0.0
empty_reads = 0
while not self.force_stop[fifo].wait(time_wait if time_wait >= 0.0 else 0.0
):
time_read = time()
try:
if (no_data_timeout and time_last_data + no_data_timeout <
get_float_time()):
raise NoDataTimeout(
'Received no data for %0.1f second(s) from %s' % (
no_data_timeout, fifo))
raw_data = self.read_raw_data_from_fifo(fifo)
except NoDataTimeout:
no_data_timeout = None
if self.errback:
self.errback(sys.exc_info())
else:
raise
except Exception:
if self.errback:
self.errback(sys.exc_info())
else:
raise
if self.stop_readout.is_set():
break
else:
n_data_words = raw_data.shape[0]
if n_data_words > 0:
time_last_data = time()
empty_reads = 0
time_start_read, time_stop_read = self.update_timestamp(fifo)
status = 0
self._fifo_data_deque[fifo].append((raw_data,
time_start_read, time_stop_read, status))
with self._fifo_conditions[fifo]:
self._fifo_conditions[fifo].notify_all()
elif self.stop_readout.is_set():
if empty_reads == self._n_empty_reads:
break
else:
empty_reads += 1
finally:
time_wait = self.readout_interval - (time() - time_read)
self._fifo_data_deque[fifo].append(None)
with self._fifo_conditions[fifo]:
self._fifo_conditions[fifo].notify_all()
logging.info('Stopping readout thread for %s', fifo)
|
readout
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, fifo, no_data_timeout=None):\n logging.info('Starting readout thread for %s', fifo)\n time_last_data = time()\n time_wait = 0.0\n empty_reads = 0\n while not self.force_stop[fifo].wait(time_wait if time_wait >= 0.0 else 0.0\n ):\n time_read = time()\n try:\n if (no_data_timeout and time_last_data + no_data_timeout <\n get_float_time()):\n raise NoDataTimeout(\n 'Received no data for %0.1f second(s) from %s' % (\n no_data_timeout, fifo))\n raw_data = self.read_raw_data_from_fifo(fifo)\n except NoDataTimeout:\n no_data_timeout = None\n if self.errback:\n self.errback(sys.exc_info())\n else:\n raise\n except Exception:\n if self.errback:\n self.errback(sys.exc_info())\n else:\n raise\n if self.stop_readout.is_set():\n break\n else:\n n_data_words = raw_data.shape[0]\n if n_data_words > 0:\n time_last_data = time()\n empty_reads = 0\n time_start_read, time_stop_read = self.update_timestamp(fifo)\n status = 0\n self._fifo_data_deque[fifo].append((raw_data,\n time_start_read, time_stop_read, status))\n with self._fifo_conditions[fifo]:\n self._fifo_conditions[fifo].notify_all()\n elif self.stop_readout.is_set():\n if empty_reads == self._n_empty_reads:\n break\n else:\n empty_reads += 1\n finally:\n time_wait = self.readout_interval - (time() - time_read)\n self._fifo_data_deque[fifo].append(None)\n with self._fifo_conditions[fifo]:\n self._fifo_conditions[fifo].notify_all()\n logging.info('Stopping readout thread for %s', fifo)\n",
"role": "user"
},
{
"content": "readout",
"role": "assistant"
}
] |
def x(self, fifo):
logging.debug('Starting worker thread for %s', fifo)
self._fifo_conditions[fifo].acquire()
while True:
try:
data_tuple = self._fifo_data_deque[fifo].popleft()
except IndexError:
self._fifo_conditions[fifo].wait(self.readout_interval)
else:
if data_tuple is None:
break
else:
for index, (filter_func, converter_func, fifo_select
) in enumerate(izip(self.filter_func, self.
converter_func, self.fifo_select)):
if fifo_select is None or fifo_select == fifo:
converted_data_tuple = convert_data_iterable((
data_tuple,), filter_func=filter_func,
converter_func=converter_func)[0]
n_data_words = converted_data_tuple[0].shape[0]
with self.data_words_per_second_lock:
self._words_per_read[index].append((
n_data_words, converted_data_tuple[1],
converted_data_tuple[2]))
self._data_deque[index].append(converted_data_tuple)
with self._data_conditions[index]:
self._data_conditions[index].notify_all()
for index, fifo_select in enumerate(self.fifo_select):
if fifo_select is None or fifo_select == fifo:
self._data_deque[index].append(None)
with self._data_conditions[index]:
self._data_conditions[index].notify_all()
self._fifo_conditions[fifo].release()
logging.debug('Stopping worker thread for %s', fifo)
|
worker
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, fifo):\n logging.debug('Starting worker thread for %s', fifo)\n self._fifo_conditions[fifo].acquire()\n while True:\n try:\n data_tuple = self._fifo_data_deque[fifo].popleft()\n except IndexError:\n self._fifo_conditions[fifo].wait(self.readout_interval)\n else:\n if data_tuple is None:\n break\n else:\n for index, (filter_func, converter_func, fifo_select\n ) in enumerate(izip(self.filter_func, self.\n converter_func, self.fifo_select)):\n if fifo_select is None or fifo_select == fifo:\n converted_data_tuple = convert_data_iterable((\n data_tuple,), filter_func=filter_func,\n converter_func=converter_func)[0]\n n_data_words = converted_data_tuple[0].shape[0]\n with self.data_words_per_second_lock:\n self._words_per_read[index].append((\n n_data_words, converted_data_tuple[1],\n converted_data_tuple[2]))\n self._data_deque[index].append(converted_data_tuple)\n with self._data_conditions[index]:\n self._data_conditions[index].notify_all()\n for index, fifo_select in enumerate(self.fifo_select):\n if fifo_select is None or fifo_select == fifo:\n self._data_deque[index].append(None)\n with self._data_conditions[index]:\n self._data_conditions[index].notify_all()\n self._fifo_conditions[fifo].release()\n logging.debug('Stopping worker thread for %s', fifo)\n",
"role": "user"
},
{
"content": "worker",
"role": "assistant"
}
] |
def x(self, index, no_data_timeout=None):
is_fe_data_header = logical_and(is_fe_word, is_data_header)
logging.debug('Starting writer thread with index %d', index)
self._data_conditions[index].acquire()
time_last_data = time()
time_write = time()
converted_data_tuple_list = [None] * len(self.filter_func)
while True:
try:
if no_data_timeout and time_last_data + no_data_timeout < time():
raise NoDataTimeout(
'Received no data for %0.1f second(s) for writer thread with index %d'
% (no_data_timeout, index))
converted_data_tuple = self._data_deque[index].popleft()
except NoDataTimeout:
no_data_timeout = None
if self.errback:
self.errback(sys.exc_info())
else:
raise
except IndexError:
self._data_conditions[index].wait(self.readout_interval)
else:
if converted_data_tuple is None:
if self.callback and any(converted_data_tuple_list):
try:
self.callback(converted_data_tuple_list)
except Exception:
self.errback(sys.exc_info())
break
else:
if no_data_timeout and np.any(is_fe_data_header(
converted_data_tuple[0])):
time_last_data = time()
if converted_data_tuple_list[index]:
converted_data_tuple_list[index].append(
converted_data_tuple)
else:
converted_data_tuple_list[index] = [converted_data_tuple]
if self.fill_buffer:
self._data_buffer[index].append(converted_data_tuple)
if self.callback and any(converted_data_tuple_list) and (self.
write_interval and time() - time_write >= self.write_interval or
not self.write_interval):
try:
self.callback(converted_data_tuple_list)
except Exception:
self.errback(sys.exc_info())
else:
converted_data_tuple_list = [None] * len(self.filter_func)
time_write = time()
self._data_conditions[index].release()
logging.debug('Stopping writer thread with index %d', index)
|
writer
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, index, no_data_timeout=None):\n is_fe_data_header = logical_and(is_fe_word, is_data_header)\n logging.debug('Starting writer thread with index %d', index)\n self._data_conditions[index].acquire()\n time_last_data = time()\n time_write = time()\n converted_data_tuple_list = [None] * len(self.filter_func)\n while True:\n try:\n if no_data_timeout and time_last_data + no_data_timeout < time():\n raise NoDataTimeout(\n 'Received no data for %0.1f second(s) for writer thread with index %d'\n % (no_data_timeout, index))\n converted_data_tuple = self._data_deque[index].popleft()\n except NoDataTimeout:\n no_data_timeout = None\n if self.errback:\n self.errback(sys.exc_info())\n else:\n raise\n except IndexError:\n self._data_conditions[index].wait(self.readout_interval)\n else:\n if converted_data_tuple is None:\n if self.callback and any(converted_data_tuple_list):\n try:\n self.callback(converted_data_tuple_list)\n except Exception:\n self.errback(sys.exc_info())\n break\n else:\n if no_data_timeout and np.any(is_fe_data_header(\n converted_data_tuple[0])):\n time_last_data = time()\n if converted_data_tuple_list[index]:\n converted_data_tuple_list[index].append(\n converted_data_tuple)\n else:\n converted_data_tuple_list[index] = [converted_data_tuple]\n if self.fill_buffer:\n self._data_buffer[index].append(converted_data_tuple)\n if self.callback and any(converted_data_tuple_list) and (self.\n write_interval and time() - time_write >= self.write_interval or\n not self.write_interval):\n try:\n self.callback(converted_data_tuple_list)\n except Exception:\n self.errback(sys.exc_info())\n else:\n converted_data_tuple_list = [None] * len(self.filter_func)\n time_write = time()\n self._data_conditions[index].release()\n logging.debug('Stopping writer thread with index %d', index)\n",
"role": "user"
},
{
"content": "writer",
"role": "assistant"
}
] |
def x(self, filter_func=None, converter_func=None):
if self._is_running:
raise RuntimeError('Readout thread running')
if not self.fill_buffer:
logging.warning('Data buffer is not activated')
return [convert_data_iterable(data_iterable, filter_func=filter_func,
converter_func=converter_func) for data_iterable in self._data_buffer]
|
get_data_from_buffer
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, filter_func=None, converter_func=None):\n if self._is_running:\n raise RuntimeError('Readout thread running')\n if not self.fill_buffer:\n logging.warning('Data buffer is not activated')\n return [convert_data_iterable(data_iterable, filter_func=filter_func,\n converter_func=converter_func) for data_iterable in self._data_buffer]\n",
"role": "user"
},
{
"content": "get_data_from_buffer",
"role": "assistant"
}
] |
def x(self, filter_func=None, converter_func=None):
if self._is_running:
raise RuntimeError('Readout thread running')
if not self.fill_buffer:
logging.warning('Data buffer is not activated')
return [convert_data_array(data_array_from_data_iterable(data_iterable),
filter_func=filter_func, converter_func=converter_func) for
data_iterable in self._data_buffer]
|
get_raw_data_from_buffer
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, filter_func=None, converter_func=None):\n if self._is_running:\n raise RuntimeError('Readout thread running')\n if not self.fill_buffer:\n logging.warning('Data buffer is not activated')\n return [convert_data_array(data_array_from_data_iterable(data_iterable),\n filter_func=filter_func, converter_func=converter_func) for\n data_iterable in self._data_buffer]\n",
"role": "user"
},
{
"content": "get_raw_data_from_buffer",
"role": "assistant"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.