code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
def plot_one_box(x, img, color=None, label=None, line_thickness=None, position='top'): <NEW_LINE> <INDENT> tl = line_thickness or round(0.002 * (img.shape[0] + img.shape[1]) / 2) + 1 <NEW_LINE> color = color or [random.randint(0, 255) for _ in range(3)] <NEW_LINE> c1, c2 = (int(x[0]), int(x[1])), (int(x[2]), int(x[3])) <NEW_LINE> cv2.rectangle(img, c1, c2, color, thickness=tl) <NEW_LINE> if label: <NEW_LINE> <INDENT> tf = max(tl - 1, 1) <NEW_LINE> t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0] <NEW_LINE> if position == 'bottom': <NEW_LINE> <INDENT> box_h = c2[1]-c1[1] <NEW_LINE> c1 = c1[0], c1[1] + box_h + t_size[1] + 3 <NEW_LINE> c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3 <NEW_LINE> <DEDENT> cv2.rectangle(img, c1, c2, color, -1) <NEW_LINE> cv2.putText(img, label, (c1[0], c1[1] - 2), 0, tl / 3, [225, 255, 255], thickness=tf, lineType=cv2.LINE_AA)
|
adds one bounding box and label to the image
:param x: List[int], Bounding box boundaries that contains four values
e.g. (wmin, hmin, wmax, hmax) -> Top-left corner and Bottom-right corner, W-width, H-height
:param img: ndarray, the image will be annotated, shape (H, W, 3) or (H, W)
:param color: List[int], A list of integers that are [R, G, B] values
:param label: str, a string that will be put on the top of the bounding box
:param line_thickness: int, an integer value indicates the bounding box's line width
:param position: str, choose from 'top', 'bottom', specifying the label position
:return: None, the input image itself will be updated (in-place).
|
625941bb925a0f43d2549d3f
|
def _predict(self, image_features, prediction_stage=2, **kwargs): <NEW_LINE> <INDENT> if len(image_features) != 1: <NEW_LINE> <INDENT> raise ValueError('length of `image_features` must be 1. Found {}'.format( len(image_features))) <NEW_LINE> <DEDENT> image_feature = image_features[0] <NEW_LINE> predictions_dict = {} <NEW_LINE> if prediction_stage == 2: <NEW_LINE> <INDENT> predictions_dict[BOX_ENCODINGS] = self._box_prediction_head(image_feature) <NEW_LINE> predictions_dict[CLASS_PREDICTIONS_WITH_BACKGROUND] = ( self._class_prediction_head(image_feature)) <NEW_LINE> <DEDENT> elif prediction_stage == 3: <NEW_LINE> <INDENT> for prediction_head in self.get_third_stage_prediction_heads(): <NEW_LINE> <INDENT> head_object = self._third_stage_heads[prediction_head] <NEW_LINE> predictions_dict[prediction_head] = head_object(image_feature) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('prediction_stage should be either 2 or 3.') <NEW_LINE> <DEDENT> return predictions_dict
|
Optionally computes encoded object locations, confidences, and masks.
Predicts the heads belonging to the given prediction stage.
Args:
image_features: A list of float tensors of shape
[batch_size, height_i, width_i, channels_i] containing roi pooled
features for each image. The length of the list should be 1 otherwise
a ValueError will be raised.
prediction_stage: Prediction stage. Acceptable values are 2 and 3.
**kwargs: Unused Keyword args
Returns:
A dictionary containing the predicted tensors that are listed in
self._prediction_heads. A subset of the following keys will exist in the
dictionary:
BOX_ENCODINGS: A float tensor of shape
[batch_size, 1, num_classes, code_size] representing the
location of the objects.
CLASS_PREDICTIONS_WITH_BACKGROUND: A float tensor of shape
[batch_size, 1, num_classes + 1] representing the class
predictions for the proposals.
MASK_PREDICTIONS: A float tensor of shape
[batch_size, 1, num_classes, image_height, image_width]
Raises:
ValueError: If num_predictions_per_location is not 1 or if
len(image_features) is not 1.
ValueError: if prediction_stage is not 2 or 3.
|
625941bb91f36d47f21ac3ba
|
def preprocess(self, batch): <NEW_LINE> <INDENT> assert self._batch_size == len(batch), "Invalid input batch size: {}".format(len(batch)) <NEW_LINE> return mx.nd.array(data_transformer.file_to_vec(batch[0].get('body'), file_vector_size=defs.file_chars_trunc_limit))
|
Transform raw input into model input data.
:param batch: list of raw requests, should match batch size
:return: list of preprocessed model input data
|
625941bbaad79263cf390907
|
def empty(self) -> bool: <NEW_LINE> <INDENT> return len(self.mystack) == 0
|
Returns whether the queue is empty.
|
625941bbcdde0d52a9e52efa
|
def test_import_since_must_be_valid_iso(self): <NEW_LINE> <INDENT> config = '{ "import_since": "2019-01-01" }' <NEW_LINE> assert FisbrokerPlugin().validate_config(config) <NEW_LINE> config = '{ "import_since": "2019.01.01" }' <NEW_LINE> with assert_raises(ValueError): <NEW_LINE> <INDENT> assert FisbrokerPlugin().validate_config(config)
|
Test that the `import_since` config must be a valid ISO8601 date.
|
625941bb1f037a2d8b9460c9
|
def split_chunk(l: list, n: int): <NEW_LINE> <INDENT> for i in range(0, len(l), n): <NEW_LINE> <INDENT> yield l[i:i + n]
|
将数组按照给定长度进行分割
:param l:
:param n:
:return:
|
625941bb26068e7796caeba4
|
def test_func_remove_paths_for_good_case_ALWAYS_never_delete_input_file(self): <NEW_LINE> <INDENT> cur_pipe = Pipeline.Pipeline(str_name = "test_func_remove_paths_for_good_case_ALWAYS") <NEW_LINE> str_env = os.path.join(self.str_test_directory, "test_func_remove_paths_for_good_case_ALWAYS") <NEW_LINE> str_dependency_1 = os.path.join(str_env, "Dependency_1.txt") <NEW_LINE> str_product_1 = os.path.join(str_env, "Product_1.txt") <NEW_LINE> str_product_1_ok = cur_pipe.func_get_ok_file_path(str_product_1) <NEW_LINE> str_product_2 = os.path.join(str_env, "Product_2.txt") <NEW_LINE> str_product_2_ok = cur_pipe.func_get_ok_file_path(str_product_2) <NEW_LINE> self.func_make_dummy_dir(str_env) <NEW_LINE> self.func_make_dummy_files([str_dependency_1, str_product_1, str_product_1_ok, str_product_2, str_product_2_ok]) <NEW_LINE> cur_cmd = Command.Command("Command 1", [str_dependency_1], [str_product_1]).func_set_resource_clean_level([str_dependency_1, str_product_1] , Resource.CLEAN_ALWAYS) <NEW_LINE> cur_cmd2 = Command.Command("Command 2", [str_product_1], [str_product_2]).func_set_resource_clean_level([str_product_1] , Resource.CLEAN_ALWAYS) <NEW_LINE> dt_cur = DependencyTree.DependencyTree([cur_cmd, cur_cmd2]) <NEW_LINE> dt_cur.func_complete_command(cur_cmd) <NEW_LINE> dt_cur.func_complete_command(cur_cmd2) <NEW_LINE> f_success = cur_pipe.func_remove_paths(cmd_command = cur_cmd2, str_output_directory = str_env, dt_dependency_tree = dt_cur, f_remove_products = False) <NEW_LINE> f_removed_files = not os.path.exists(str_product_1) <NEW_LINE> f_other_files_remain = f_removed_files and os.path.exists(str_product_1_ok) <NEW_LINE> f_other_files_remain = f_other_files_remain and os.path.exists(str_dependency_1) <NEW_LINE> f_other_files_remain = f_other_files_remain and os.path.exists(str_product_2) <NEW_LINE> f_other_files_remain = f_other_files_remain and os.path.exists(str_product_2_ok) <NEW_LINE> self.func_remove_files([str_dependency_1, str_product_1, str_product_1_ok, str_product_2, str_product_2_ok]) <NEW_LINE> self.func_remove_dirs([str_env]) <NEW_LINE> self.func_test_true(f_success and f_removed_files and f_other_files_remain)
|
Good case trying to remove one dependency at clean level ALWAYS, any always file is always deleted.
Here the input file and intermediate file is requested to be deleted but only the intermediate file is deleted.
|
625941bb66673b3332b91f5c
|
def printTime(func): <NEW_LINE> <INDENT> from helpers import info <NEW_LINE> def decorated(*args, **kwargs): <NEW_LINE> <INDENT> info('begin') <NEW_LINE> func(*args, **kwargs) <NEW_LINE> info('end') <NEW_LINE> <DEDENT> return decorated
|
@Decorator
print time info before and after the function
|
625941bb9b70327d1c4e0c9e
|
def create_request_json(method, *args, **kwargs): <NEW_LINE> <INDENT> return pickle.dumps(create_request_dict(method, *args, **kwargs), protocol=pickle.HIGHEST_PROTOCOL)
|
Returns a JSON-RPC-String for a method
:param method: Name of the method
:param args: Positional parameters
:param kwargs: Named parameters
|
625941bbc4546d3d9de728fc
|
def sigmasq(htilde, psd = None, low_frequency_cutoff=None, high_frequency_cutoff=None): <NEW_LINE> <INDENT> htilde = make_frequency_series(htilde) <NEW_LINE> N = (len(htilde)-1) * 2 <NEW_LINE> norm = 4.0 * htilde.delta_f <NEW_LINE> kmin, kmax = get_cutoff_indices(low_frequency_cutoff, high_frequency_cutoff, htilde.delta_f, N) <NEW_LINE> ht = htilde[kmin:kmax] <NEW_LINE> if psd: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> numpy.testing.assert_almost_equal(ht.delta_f, psd.delta_f) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise ValueError('Waveform does not have same delta_f as psd') <NEW_LINE> <DEDENT> <DEDENT> if psd is None: <NEW_LINE> <INDENT> sq = ht.inner(ht) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sq = ht.weighted_inner(ht, psd[kmin:kmax]) <NEW_LINE> <DEDENT> return sq.real * norm
|
Return the loudness of the waveform. This is defined (see Duncan
Brown's thesis) as the unnormalized matched-filter of the input waveform,
htilde, with itself. This quantity is usually referred to as (sigma)^2
and is then used to normalize matched-filters with the data.
Parameters
----------
htilde : TimeSeries or FrequencySeries
The input vector containing a waveform.
psd : {None, FrequencySeries}, optional
The psd used to weight the accumulated power.
low_frequency_cutoff : {None, float}, optional
The frequency to begin considering waveform power.
high_frequency_cutoff : {None, float}, optional
The frequency to stop considering waveform power.
Returns
-------
sigmasq: float
|
625941bb3c8af77a43ae3668
|
def get_first_import(node, context, name, base, level): <NEW_LINE> <INDENT> fullname = '%s.%s' % (base, name) if base else name <NEW_LINE> first = None <NEW_LINE> found = False <NEW_LINE> for first in context.body: <NEW_LINE> <INDENT> if first is node: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if first.scope() is node.scope() and first.fromlineno > node.fromlineno: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if isinstance(first, astng.Import): <NEW_LINE> <INDENT> if any(fullname == iname[0] for iname in first.names): <NEW_LINE> <INDENT> found = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(first, astng.From): <NEW_LINE> <INDENT> if level == first.level and any( fullname == '%s.%s' % (first.modname, iname[0]) for iname in first.names): <NEW_LINE> <INDENT> found = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if found and not are_exclusive(first, node): <NEW_LINE> <INDENT> return first
|
return the node where [base.]<name> is imported or None if not found
|
625941bb1b99ca400220a97b
|
def run_reacher(): <NEW_LINE> <INDENT> reacher = BraxEnv.create(env=envs.reacher.Reacher()) <NEW_LINE> reacher_state, obs = reacher.init() <NEW_LINE> agent = DummyReacherAgent.create() <NEW_LINE> agent_state = agent.init() <NEW_LINE> def rewards_func(state, obs, action, env, counter): <NEW_LINE> <INDENT> del state, env, counter <NEW_LINE> reward_dist = -jnp.norm(obs[-3:]) <NEW_LINE> reward_ctrl = -jnp.square(action).sum() <NEW_LINE> return reward_dist + reward_ctrl <NEW_LINE> <DEDENT> ppo.train( reacher, agent.create(), rewards_func, horizon=10, config=get_config()) <NEW_LINE> for _ in range(10): <NEW_LINE> <INDENT> agent_state, action = agent(agent_state, obs) <NEW_LINE> reacher_state, obs = reacher(reacher_state, action) <NEW_LINE> print(obs[-3:], action)
|
Run reacher example.
|
625941bcd6c5a10208143f13
|
def set_axislabel(self, text, minpad=1, **kwargs): <NEW_LINE> <INDENT> fontdict = kwargs.pop('fontdict', None) <NEW_LINE> if minpad is None: <NEW_LINE> <INDENT> minpad = 1 <NEW_LINE> <DEDENT> self.axislabels.set_text(text) <NEW_LINE> self.axislabels.set_minpad(minpad) <NEW_LINE> self.axislabels.set(**kwargs) <NEW_LINE> if fontdict is not None: <NEW_LINE> <INDENT> self.axislabels.update(fontdict)
|
Set the text and optionally visual properties for the axis label.
Parameters
----------
text : str
The axis label text.
minpad : float, optional
The padding for the label in terms of axis label font size.
kwargs
Keywords are passed to :class:`matplotlib.text.Text`. These
can include keywords to set the ``color``, ``size``, ``weight``, and
other text properties.
|
625941bb4a966d76dd550ed7
|
def batchnorm_backward_alt(dout, cache): <NEW_LINE> <INDENT> dx, dgamma, dbeta = None, None, None <NEW_LINE> dgamma = np.sum(cache['x_norm'] * dout, axis=0) <NEW_LINE> dbeta = np.sum(dout, axis=0) <NEW_LINE> N, D = dout.shape <NEW_LINE> divar = np.sum(dout * cache['gamma'] * cache['xu'], axis=0) <NEW_LINE> dsqrvar = divar * (-1 / (cache['sqrvar'] ** 2)) <NEW_LINE> d_var = dsqrvar * 0.5 * (1 / (np.sqrt(cache['var'] + cache['eps']))) <NEW_LINE> dsk = d_var * np.ones((N, D)) * (1 / N) <NEW_LINE> dxu2 = dsk * 2 * cache['xu'] <NEW_LINE> du = -1 * np.sum((dxu2 + (dout * cache['gamma'] * cache['ivar'])), axis=0) <NEW_LINE> dx1 = 1 * (dxu2 + (dout * cache['gamma'] * cache['ivar'])) <NEW_LINE> dx2 = (1 / N) * np.ones((N, D)) * du <NEW_LINE> dx = dx1 + dx2 <NEW_LINE> return dx, dgamma, dbeta
|
Alternative backward pass for batch normalization.
For this implementation you should work out the derivatives for the batch
normalizaton backward pass on paper and simplify as much as possible. You
should be able to derive a simple expression for the backward pass.
See the jupyter notebook for more hints.
Note: This implementation should expect to receive the same cache variable
as batchnorm_backward, but might not use all of the values in the cache.
Inputs / outputs: Same as batchnorm_backward
|
625941bb507cdc57c6306b9f
|
def findNodeSentinel(self,delim,lines,n): <NEW_LINE> <INDENT> offset = 0 <NEW_LINE> nodeSentinelLine = -1 <NEW_LINE> line = n - 1 <NEW_LINE> while len(lines) > line >= 0 and nodeSentinelLine == -1: <NEW_LINE> <INDENT> progress = line <NEW_LINE> s = lines[line] <NEW_LINE> i = g.skip_ws(s,0) <NEW_LINE> if g.match(s,i,delim): <NEW_LINE> <INDENT> line,nodeSentinelLine,offset = self.handleDelim( delim,s,i,line,lines,n,offset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> line -= 1 <NEW_LINE> <DEDENT> assert nodeSentinelLine > -1 or line < progress <NEW_LINE> <DEDENT> return nodeSentinelLine,offset
|
Scan backwards from the line n, looking for an @-body line. When found,
get the vnode's name from that line and set p to the indicated vnode. This
will fail if vnode names have been changed, and that can't be helped.
We compute the offset of the requested line **within the found node**.
|
625941bc30dc7b7665901834
|
def convert_query_params( query_params: ImmutableMultiDict, model: Type[BaseModel] ) -> dict: <NEW_LINE> <INDENT> return { **query_params.to_dict(), **{ key: value for key, value in query_params.to_dict(flat=False).items() if key in model.__fields__ and model.__fields__[key].is_complex() }, }
|
group query parameters into lists if model defines them
:param query_params: flasks request.args
:param model: query parameter's model
:return: resulting parameters
|
625941bc851cf427c661a3dd
|
def set_model(self, model): <NEW_LINE> <INDENT> if model[0] == 'Completion': <NEW_LINE> <INDENT> self.__init__(model[1])
|
Set data model
|
625941bcdc8b845886cb53ff
|
def test_successful_validation(capsys): <NEW_LINE> <INDENT> class NewScript(Script): <NEW_LINE> <INDENT> def get_scheme(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def validate_input(self, definition): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def stream_events(self, inputs, ew): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> script = NewScript() <NEW_LINE> ew = EventWriter(sys.stdout, sys.stderr) <NEW_LINE> args = [TEST_SCRIPT_PATH, "--validate-arguments"] <NEW_LINE> return_value = script.run_script(args, ew, data_open("data/validation.xml")) <NEW_LINE> output = capsys.readouterr() <NEW_LINE> assert output.err == "" <NEW_LINE> assert output.out == "" <NEW_LINE> assert return_value == 0
|
Check that successful validation yield no text and a 0 exit value.
|
625941bcb57a9660fec3374b
|
@cli.command() <NEW_LINE> @click.argument('problem') <NEW_LINE> def timeit(problem): <NEW_LINE> <INDENT> problem = problem.rjust(3, '0') <NEW_LINE> timer = _timeit.Timer('mod.solve()', setup = f'import importlib; mod = importlib.import_module(f"problems.{problem}")') <NEW_LINE> loops, total_time = timer.autorange() <NEW_LINE> click.echo(f'Time per Solve: {total_time / loops:.6f} seconds')
|
Time the solver for a problem.
|
625941bccad5886f8bd26ead
|
def find(self, alpha=5): <NEW_LINE> <INDENT> self.alpha = alpha <NEW_LINE> image_max = maximum_filter(self.image_conv, self.win_size) <NEW_LINE> maxima = (self.image_conv == image_max) <NEW_LINE> self.mean = np.mean(self.image_conv) <NEW_LINE> self.std = np.sqrt(np.mean((self.image_conv - self.mean)**2)) <NEW_LINE> self.threshold = self.alpha*self.std + self.mean <NEW_LINE> diff = (image_max > self.threshold) <NEW_LINE> maxima[diff == 0] = 0 <NEW_LINE> labeled, num_objects = label(maxima) <NEW_LINE> if num_objects > 0: <NEW_LINE> <INDENT> self.positions = maximum_position(self.image, labeled, range(1, num_objects + 1)) <NEW_LINE> self.positions = np.array(self.positions).astype(int) <NEW_LINE> self.drop_overlapping() <NEW_LINE> self.drop_border() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.positions = np.zeros((0, 2), dtype=int)
|
Takes an image and detect the peaks usingthe local maximum filter.
Returns a boolean mask of the peaks (i.e. 1 when
the pixel's value is the neighborhood maximum, 0 otherwise). Taken from
http://stackoverflow.com/questions/9111711/
get-coordinates-of-local-maxima-in-2d-array-above-certain-value
|
625941bc44b2445a33931f6a
|
def create_window_covering_service(accessory): <NEW_LINE> <INDENT> service = accessory.add_service(ServicesTypes.WINDOW_COVERING) <NEW_LINE> cur_state = service.add_char(CharacteristicsTypes.POSITION_CURRENT) <NEW_LINE> cur_state.value = 0 <NEW_LINE> targ_state = service.add_char(CharacteristicsTypes.POSITION_TARGET) <NEW_LINE> targ_state.value = 0 <NEW_LINE> position_state = service.add_char(CharacteristicsTypes.POSITION_STATE) <NEW_LINE> position_state.value = 0 <NEW_LINE> position_hold = service.add_char(CharacteristicsTypes.POSITION_HOLD) <NEW_LINE> position_hold.value = 0 <NEW_LINE> obstruction = service.add_char(CharacteristicsTypes.OBSTRUCTION_DETECTED) <NEW_LINE> obstruction.value = False <NEW_LINE> name = service.add_char(CharacteristicsTypes.NAME) <NEW_LINE> name.value = "testdevice" <NEW_LINE> return service
|
Define a window-covering characteristics as per page 219 of HAP spec.
|
625941bcbe8e80087fb20b13
|
def normalize(self): <NEW_LINE> <INDENT> total = sum(self.prob.values()) <NEW_LINE> if not isclose(total, 1.0): <NEW_LINE> <INDENT> for val in self.prob: <NEW_LINE> <INDENT> self.prob[val] = self.prob[val] * 1.0 / total <NEW_LINE> <DEDENT> <DEDENT> return self
|
Make sure the probabilities of all values sum to 1.
Returns the normalized distribution.
Raises a ZeroDivisionError if the sum of the values is 0.
|
625941bc7047854f462a12d8
|
def construct_parser(): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser( prog="slice-image", description="Slice an image into tiles.", epilog="Report bugs and make feature requests at" "https://github.com/samdobson/image_slicer/issues", add_help=False, ) <NEW_LINE> required = parser.add_argument_group("Required Arguments") <NEW_LINE> required.add_argument("image", help="image file") <NEW_LINE> optional = parser.add_argument_group("Optional Arguments") <NEW_LINE> optional.add_argument( "-n", "--num-tiles", type=int, default=0, help="Number of tiles to make. Automatically decides the" "number of rows and columns.", ) <NEW_LINE> optional.add_argument("-d", "--dir", default="./", help="output directory") <NEW_LINE> optional.add_argument( "-f", "--format", default="png", help="output image format (e.g JPEG, PNG, GIF)" ) <NEW_LINE> optional.add_argument( "-r", "--rows", type=int, default=1, help="Number of rows to divide the image. Used when num_tiles is 0.", ) <NEW_LINE> optional.add_argument( "-c", "--columns", type=int, default=1, help="Number of columns to divide the image. Used when num_tiles is 0.", ) <NEW_LINE> info = parser.add_argument_group("Info") <NEW_LINE> info.add_argument("-h", "--help", action="help", help="display this screen"), <NEW_LINE> info.add_argument("-v", "--version", action="version", version="%(prog)s 0.2") <NEW_LINE> return parser
|
Return an ArgumentParser.
|
625941bc004d5f362079a201
|
def mksls(src, dst=None): <NEW_LINE> <INDENT> ps_opts = {} <NEW_LINE> with salt.utils.fopen(src, 'r') as fh_: <NEW_LINE> <INDENT> for line in fh_: <NEW_LINE> <INDENT> if line.startswith('#'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not line.strip(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> comps = shlex.split(line) <NEW_LINE> if comps[0] not in ps_opts.keys(): <NEW_LINE> <INDENT> ps_opts[comps[0]] = {} <NEW_LINE> <DEDENT> cmds = comps[1].split('/') <NEW_LINE> pointer = ps_opts[comps[0]] <NEW_LINE> for cmd in cmds: <NEW_LINE> <INDENT> pointer = pointer.setdefault(cmd, {}) <NEW_LINE> <DEDENT> pointer['type'] = comps[2] <NEW_LINE> if len(comps) > 3: <NEW_LINE> <INDENT> pointer['argument'] = comps[3] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> sls = {} <NEW_LINE> sls[ps_opts['d-i']['languagechooser']['language-name-fb']['argument']] = { 'locale': ['system'] } <NEW_LINE> sls[ps_opts['d-i']['kbd-chooser']['method']['argument']] = { 'keyboard': ['system'] } <NEW_LINE> timezone = ps_opts['d-i']['time']['zone']['argument'] <NEW_LINE> sls[timezone] = {'timezone': ['system']} <NEW_LINE> if ps_opts['d-i']['tzconfig']['gmt']['argument'] == 'true': <NEW_LINE> <INDENT> sls[timezone]['timezone'].append('utc') <NEW_LINE> <DEDENT> if 'netcfg' in ps_opts['d-i'].keys(): <NEW_LINE> <INDENT> iface = ps_opts['d-i']['netcfg']['choose_interface']['argument'] <NEW_LINE> sls[iface] = {} <NEW_LINE> sls[iface]['enabled'] = True <NEW_LINE> if ps_opts['d-i']['netcfg']['confirm_static'] == 'true': <NEW_LINE> <INDENT> sls[iface]['proto'] = 'static' <NEW_LINE> <DEDENT> elif ps_opts['d-i']['netcfg']['disable_dhcp'] == 'false': <NEW_LINE> <INDENT> sls[iface]['proto'] = 'dhcp' <NEW_LINE> <DEDENT> sls[iface]['netmask'] = ps_opts['d-i']['netcfg']['get_netmask']['argument'] <NEW_LINE> sls[iface]['domain'] = ps_opts['d-i']['netcfg']['get_domain']['argument'] <NEW_LINE> sls[iface]['gateway'] = ps_opts['d-i']['netcfg']['get_gateway']['argument'] <NEW_LINE> sls[iface]['hostname'] = ps_opts['d-i']['netcfg']['get_hostname']['argument'] <NEW_LINE> sls[iface]['ipaddress'] = ps_opts['d-i']['netcfg']['get_ipaddress']['argument'] <NEW_LINE> sls[iface]['nameservers'] = ps_opts['d-i']['netcfg']['get_nameservers']['argument'] <NEW_LINE> <DEDENT> if dst is not None: <NEW_LINE> <INDENT> with salt.utils.fopen(dst, 'w') as fh_: <NEW_LINE> <INDENT> fh_.write(yaml.safe_dump(sls, default_flow_style=False)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return yaml.safe_dump(sls, default_flow_style=False)
|
Convert a preseed file to an SLS file
|
625941bc711fe17d8254223c
|
def __setitem__(self, x, value): <NEW_LINE> <INDENT> if isinstance(x, slice): <NEW_LINE> <INDENT> item = x.start <NEW_LINE> left = x.stop if x.stop is not None else 0 <NEW_LINE> right = x.step if x.step is not None else 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item = x <NEW_LINE> left = 0 <NEW_LINE> right = 5 <NEW_LINE> <DEDENT> old_value = self[item] <NEW_LINE> if isinstance(item, int): <NEW_LINE> <INDENT> self.memory[item][left:right] = value <NEW_LINE> if self.mem_hook is not None and old_value.word_list != self.memory[item].word_list: <NEW_LINE> <INDENT> self.mem_hook(item, old_value, self.memory[item]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if item in TRIGGERS: <NEW_LINE> <INDENT> self.__dict__[item] = value <NEW_LINE> changed = old_value != self[item] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.reg(item)[left:right] = value <NEW_LINE> changed = old_value.word_list != self[item].word_list <NEW_LINE> <DEDENT> if self.cpu_hook is not None and changed: <NEW_LINE> <INDENT> self.cpu_hook(item, old_value, self[item])
|
Can raise exception
|
625941bc4e696a04525c9317
|
def reading(self, blocknum): <NEW_LINE> <INDENT> self.max_read_block = max(blocknum, self.max_read_block) <NEW_LINE> self.adjustCBlock(blocknum)
|
Set which block is currently being read
|
625941bcff9c53063f47c0c0
|
def get_result_detail(self): <NEW_LINE> <INDENT> if self._get_result_idl() and hasattr(self._get_result_idl(), '__dict__'): <NEW_LINE> <INDENT> return self._genetate_dict_from_result_idl(self._get_result_idl())
|
Gets the results from policy submit/update/delete operations
|
625941bcff9c53063f47c0c1
|
def __compileTerm(self): <NEW_LINE> <INDENT> if self.tokenizer.tokenType() == "INT_CONST": <NEW_LINE> <INDENT> self.vmWriter.writePush("constant", self.tokenizer.intVal()) <NEW_LINE> self.tokenizer.advance() <NEW_LINE> <DEDENT> elif self.tokenizer.tokenType() == "STRING_CONST": <NEW_LINE> <INDENT> string = self.tokenizer.stringVal() <NEW_LINE> self.vmWriter.writePush("constant", len(string)) <NEW_LINE> self.vmWriter.writeCall("String.new", 1) <NEW_LINE> for letter in string: <NEW_LINE> <INDENT> self.vmWriter.writePush("constant", ord(letter)) <NEW_LINE> self.vmWriter.writeCall("String.appendChar", 2) <NEW_LINE> <DEDENT> self.tokenizer.advance() <NEW_LINE> <DEDENT> elif self.tokenizer.tokenType() == "KEYWORD": <NEW_LINE> <INDENT> k = self.tokenizer.keyWord() <NEW_LINE> if k not in KeyWordConstant: <NEW_LINE> <INDENT> print("Error: invalid KeyWordConstant" + k + " in term") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> if k == "null" or k == "false": <NEW_LINE> <INDENT> self.vmWriter.writePush("constant", 0) <NEW_LINE> <DEDENT> elif k == "true": <NEW_LINE> <INDENT> self.vmWriter.writePush("constant", 1) <NEW_LINE> self.vmWriter.writeArithmetic("neg") <NEW_LINE> <DEDENT> elif k == "this": <NEW_LINE> <INDENT> self.vmWriter.writePush("pointer", 0) <NEW_LINE> <DEDENT> self.tokenizer.advance() <NEW_LINE> <DEDENT> elif self.tokenizer.tokenType() == "SYMBOL": <NEW_LINE> <INDENT> s = self.tokenizer.symbol() <NEW_LINE> if s == "(": <NEW_LINE> <INDENT> self.tokenizer.advance() <NEW_LINE> self.__compileExpression() <NEW_LINE> if self.tokenizer.tokenType() != "SYMBOL" or self.tokenizer.symbol() != ")": <NEW_LINE> <INDENT> print("Error: missing ) after expression in term") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> self.tokenizer.advance() <NEW_LINE> <DEDENT> elif s in unaryOp: <NEW_LINE> <INDENT> self.tokenizer.advance() <NEW_LINE> self.__compileTerm() <NEW_LINE> if s == "-": <NEW_LINE> <INDENT> self.vmWriter.writeArithmetic("neg") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.vmWriter.writeArithmetic("not") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("Error: invalid symbol " + s + " in term") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> elif self.__checkIdentifier(): <NEW_LINE> <INDENT> self.tokenizer.advance() <NEW_LINE> firstHalf = self.currentName <NEW_LINE> if self.tokenizer.tokenType() == "SYMBOL": <NEW_LINE> <INDENT> s = self.tokenizer.symbol() <NEW_LINE> if s == "[": <NEW_LINE> <INDENT> self.vmWriter.writePush(self.symbolTable.kindOf(firstHalf), self.symbolTable.indexOf(firstHalf)) <NEW_LINE> self.tokenizer.advance() <NEW_LINE> self.__compileExpression() <NEW_LINE> self.vmWriter.writeArithmetic("add") <NEW_LINE> self.vmWriter.writePop("pointer", 1) <NEW_LINE> self.vmWriter.writePush("that", 0) <NEW_LINE> if self.tokenizer.tokenType() != "SYMBOL" or self.tokenizer.symbol() != "]": <NEW_LINE> <INDENT> print("Error: missing ] after varName[expression]") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> self.tokenizer.advance() <NEW_LINE> <DEDENT> elif s == "(" or s == ".": <NEW_LINE> <INDENT> self.__compileSubroutineCall(firstHalf) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.vmWriter.writePush(self.symbolTable.kindOf(self.currentName), self.symbolTable.indexOf(self.currentName)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.vmWriter.writePush(self.symbolTable.kindOf(self.currentName), self.symbolTable.indexOf(self.currentName)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("Error: invalid term") <NEW_LINE> sys.exit(1)
|
Compiles a complete jack term grammar
|
625941bc7b25080760e39326
|
def embedFact(self, factIdx, debug): <NEW_LINE> <INDENT> if factIdx == 0 and not self.isLoaded: <NEW_LINE> <INDENT> seq, seqLens = self.captionEmbed, self.captionLens <NEW_LINE> factEmbed, states = utils.dynamicRNN( self.factRNN, seq, seqLens, returnStates=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> idx = factIdx if self.isLoaded else factIdx - 1 <NEW_LINE> quesTokens, quesLens = self.questionTokens[idx], self.questionLens[idx] <NEW_LINE> if debug: <NEW_LINE> <INDENT> print("quesTokens", quesTokens) <NEW_LINE> <DEDENT> ansTokens, ansLens = self.answerTokens[idx], self.answerLengths[idx] <NEW_LINE> qaTokens = utils.concatPaddedSequences( quesTokens, quesLens, ansTokens, ansLens, padding='right') <NEW_LINE> qa = self.wordEmbed(qaTokens) <NEW_LINE> qaLens = quesLens + ansLens <NEW_LINE> qaEmbed, states = utils.dynamicRNN( self.factRNN, qa, qaLens, returnStates=True) <NEW_LINE> factEmbed = qaEmbed <NEW_LINE> <DEDENT> factRNNstates = states <NEW_LINE> if debug: <NEW_LINE> <INDENT> print("Fact", factEmbed, factRNNstates) <NEW_LINE> <DEDENT> self.factEmbeds.append((factEmbed, factRNNstates))
|
Embed facts i.e. caption and round 0 or question-answer pair otherwise
|
625941bcfff4ab517eb2f305
|
def getRow(self, rowIndex): <NEW_LINE> <INDENT> res = [] <NEW_LINE> for i in range(rowIndex + 1): <NEW_LINE> <INDENT> tmp = [] <NEW_LINE> for j in range(i + 1): <NEW_LINE> <INDENT> if i == 0 or j == 0 or i == j: <NEW_LINE> <INDENT> tmp.append(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tmp.append(res[i - 1][j] + res[i - 1][j - 1]) <NEW_LINE> <DEDENT> <DEDENT> res.append(tmp) <NEW_LINE> <DEDENT> return res[rowIndex]
|
:type rowIndex: int
:rtype: List[int]
|
625941bc3539df3088e2e216
|
def testOrganisationPricingPlan(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
|
Test OrganisationPricingPlan
|
625941bc046cf37aa974cc15
|
def set_data(pwm_no, data): <NEW_LINE> <INDENT> if pwm_no == 0: <NEW_LINE> <INDENT> regs.PWMDAT0 = data <NEW_LINE> <DEDENT> elif pwm_no == 1: <NEW_LINE> <INDENT> regs.PWMDAT1 = data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Invalid PWM number')
|
Sets the data register for the given PWM peripheral.
@param pwm_no: the identifier of the PWM (0 or 1)
@param data: the value to set (an integer)
|
625941bc9b70327d1c4e0c9f
|
def update_predict_network(self): <NEW_LINE> <INDENT> states, actions, rewards, new_states, is_terminals = self.memory.sample(self.batch_size) <NEW_LINE> preprocessed_states, preprocessed_new_states = self.preprocessor.process_batch(states, new_states) <NEW_LINE> actions = self.preprocessor.process_action(actions) <NEW_LINE> q_values = self.calc_target_q_values(preprocessed_new_states) <NEW_LINE> if self.enable_double_dqn: <NEW_LINE> <INDENT> actions = np.argmax(self.calc_q_values(preprocessed_new_states), axis=1) <NEW_LINE> max_q_values = q_values[range(self.batch_size), actions] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> max_q_values = np.max(q_values,axis = 1) <NEW_LINE> <DEDENT> max_q_values[is_terminals] = 0.0 <NEW_LINE> targets = rewards + self.gamma * max_q_values <NEW_LINE> targets = np.expand_dims(targets, axis = 1) <NEW_LINE> self.q_network.train_on_batch([preprocessed_states, actions], targets) <NEW_LINE> if self.num_steps % self.target_update_freq == 0: <NEW_LINE> <INDENT> print('Update the Target Network at %d steps'% self.num_steps) <NEW_LINE> self.update_target_network()
|
Update your predict network.
Behavior may differ based on what stage of training your in.
1. training mode: check if you should update your network parameters based on the current step and the value you set for train_freq.
1) sample a minibatch --> 2) calculate the target values
--> 3) update your network,--> 4) then update your target values.
@return: the loss and other metrics as an output
|
625941bcf9cc0f698b1404ca
|
def load_hosts(self): <NEW_LINE> <INDENT> if not isfile(self.hostfile): <NEW_LINE> <INDENT> self.active_host = [] <NEW_LINE> self.available_hosts = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with open(self.hostfile, 'r') as f: <NEW_LINE> <INDENT> hosts = json.load(f) <NEW_LINE> <DEDENT> self.active_host = hosts['active'] <NEW_LINE> self.available_hosts = hosts['available'] <NEW_LINE> <DEDENT> if any(isinstance(i, list) for i in self.active_host): <NEW_LINE> <INDENT> print( "Error: more than one active host. Please check config file ~/.config/loon/host.json and modify or remove it if necessary." ) <NEW_LINE> <DEDENT> def RemoveDups(duplicate): <NEW_LINE> <INDENT> final_list = [] <NEW_LINE> flag = False <NEW_LINE> for num in duplicate: <NEW_LINE> <INDENT> if num not in final_list: <NEW_LINE> <INDENT> final_list.append(num) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flag = True <NEW_LINE> <DEDENT> <DEDENT> return final_list, flag <NEW_LINE> <DEDENT> self.available_hosts, flag = RemoveDups(self.available_hosts) <NEW_LINE> if flag: <NEW_LINE> <INDENT> self.save_hosts() <NEW_LINE> <DEDENT> return
|
Load hosts from file
|
625941bc8e7ae83300e4ae97
|
def GetHistoryNews(): <NEW_LINE> <INDENT> res = requests.get(url=history_url, headers=headers) <NEW_LINE> return NewsFilter(res.json()['data']['data'], res.json()['data']['count'])
|
to get the history news from jintou
:return:
|
625941bc7b180e01f3dc46d0
|
def __init__(self, context): <NEW_LINE> <INDENT> QPushButton.__init__(self) <NEW_LINE> self._context = context <NEW_LINE> self._context.addLanguageEventListner(self.onTranslate) <NEW_LINE> self._context.addCloseEventListner(self.onDestroy) <NEW_LINE> self.setCheckable(True) <NEW_LINE> self.setFocusPolicy(Qt.NoFocus) <NEW_LINE> self.setStyleSheet(R.values.styles.transparent_background) <NEW_LINE> self.setIcon(R.getIconById("icon_pause")) <NEW_LINE> self.setIconSize(QSize(80,80)) <NEW_LINE> self._button_state = EmergencyStopState.UNLOCKED <NEW_LINE> self._keep_running = True <NEW_LINE> self.connect(self,SIGNAL('clicked(bool)'),self._trigger_button) <NEW_LINE> self._estop_pub = rospy.Publisher(self.EMERGENCY_STOP_TOPIC_NAME, Bool, latch=True, queue_size=1) <NEW_LINE> self._preempt_move_base_pub = rospy.Publisher("/move_base/cancel", GoalID, queue_size=1) <NEW_LINE> self._estop_pub_thread = threading.Thread(name='emergency_stop_publisher_loop', target=self._emergency_stop_publisher_loop) <NEW_LINE> self._estop_pub_thread.start()
|
! The constructor.
|
625941bce5267d203edcdb6b
|
def SetInput2(self, *args): <NEW_LINE> <INDENT> return _itkLabelOverlayImageFilterPython.itkLabelOverlayImageFilterIUS3IUL3IRGBUS3_Superclass_SetInput2(self, *args)
|
SetInput2(self, itkImageUL3 image2)
|
625941bc3d592f4c4ed1cf49
|
def validate_controller_credentials(old_conf, conf, args): <NEW_LINE> <INDENT> controller_name = getattr(args, CONTROLLER_NAME, None) <NEW_LINE> controller = conf.get(CONTROLLERS, {}).get(controller_name, {}) <NEW_LINE> old_controller = old_conf.get(CONTROLLERS, {}).get(controller_name, {}) <NEW_LINE> controller_class = controller.get('class', None) <NEW_LINE> if controller_class == AWS: <NEW_LINE> <INDENT> verify_AWS_credentials(conf, args, controller_name, controller, old_controller) <NEW_LINE> if SUBCREDS in controller: <NEW_LINE> <INDENT> for obj_name, cred_obj in controller[SUBCREDS].iteritems(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> old_creds = old_conf[CONTROLLERS][controller_name][ SUBCREDS][obj_name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> old_creds = {} <NEW_LINE> <DEDENT> verify_AWS_credentials(conf, args, obj_name, cred_obj, old_creds, sub=True) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif controller_class == AZURE: <NEW_LINE> <INDENT> credentials = controller.get('credentials', {}) <NEW_LINE> spa = {'tenant', 'grant_type', 'client_id', 'client_secret'} <NEW_LINE> upa = {'username', 'password'} <NEW_LINE> is_adding_spa = getattr(args, 'Service Principal credentials tenant', None) <NEW_LINE> if is_adding_spa: <NEW_LINE> <INDENT> nested_set(controller, ['credentials', 'grant_type'], 'client_credentials') <NEW_LINE> <DEDENT> current = set(credentials.keys()) <NEW_LINE> if not current: <NEW_LINE> <INDENT> sys.stderr.write( 'Azure controller "%s" is missing credentials. ' 'Azure credentials must contain tenant, client ID ' 'and client secret or username and password. ' 'To change credentials, use set.\n' % controller_name) <NEW_LINE> sys.exit(2) <NEW_LINE> <DEDENT> if 0 < len(current & spa) < len(spa): <NEW_LINE> <INDENT> sys.stderr.write( 'Azure controller "%s" is missing credentials. ' 'Azure credentials must contain tenant, client ID ' 'and client secret or username and password. ' 'To change credentials, use set.\n' % controller_name) <NEW_LINE> sys.exit(2) <NEW_LINE> <DEDENT> if 0 < len(current & upa) < len(upa): <NEW_LINE> <INDENT> sys.stderr.write( 'Azure controller "%s" is missing credentials. ' 'Azure credentials must contain tenant, client ID ' 'and client secret or username and password. ' 'To change credentials, use set.\n' % controller_name) <NEW_LINE> sys.exit(2) <NEW_LINE> <DEDENT> if current == spa | upa: <NEW_LINE> <INDENT> if args.force or prompt( 'replace existing credentials?'): <NEW_LINE> <INDENT> if is_adding_spa: <NEW_LINE> <INDENT> for key in ['Azure username', 'Azure password']: <NEW_LINE> <INDENT> nested_delete(conf, ARGUMENTS[key][1]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for key in ['tenant', 'grant_type', 'client_id', 'client_secret']: <NEW_LINE> <INDENT> nested_delete(conf, [CONTROLLERS, controller_name, 'credentials', key]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> sys.exit(0) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif controller_class == GCP: <NEW_LINE> <INDENT> pass
|
Validate controller's key values and dependencies.
|
625941bce64d504609d7470b
|
def get_sitemap(self, user_agent): <NEW_LINE> <INDENT> if user_agent in self.sitemaps.keys(): <NEW_LINE> <INDENT> return self.sitemaps.get(user_agent) <NEW_LINE> <DEDENT> raise RuntimeError("There is not a sitemap for the given user_agent.")
|
Getter method for the sitemap given a specific user_agent.
|
625941bc091ae35668666e30
|
def alienOrder(self, words): <NEW_LINE> <INDENT> indegree, outdegree = collections.defaultdict(int), collections.defaultdict(set) <NEW_LINE> for pair in [(words[i], words[i + 1]) for i in xrange(len(words) - 1)]: <NEW_LINE> <INDENT> for a, b in zip(pair[0], pair[1]): <NEW_LINE> <INDENT> if a != b: <NEW_LINE> <INDENT> outdegree[a].add(b) <NEW_LINE> indegree[b] += 1 <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> chars = set(''.join(words)) <NEW_LINE> free = chars - set(indegree) <NEW_LINE> order = '' <NEW_LINE> while free: <NEW_LINE> <INDENT> a = free.pop() <NEW_LINE> order += a <NEW_LINE> for b in outdegree[a]: <NEW_LINE> <INDENT> indegree[b] -= 1 <NEW_LINE> if indegree[b] == 0: <NEW_LINE> <INDENT> free.add(b) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return order if set(order) == chars else ''
|
:type words: List[str]
:rtype: str
|
625941bc3eb6a72ae02ec3a0
|
def parse_filters_kwargs(request, client_keywords=None): <NEW_LINE> <INDENT> filters = {} <NEW_LINE> kwargs = {} <NEW_LINE> client_keywords = client_keywords or {} <NEW_LINE> for param in request.GET: <NEW_LINE> <INDENT> if param in client_keywords: <NEW_LINE> <INDENT> kwargs[param] = request.GET[param] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filters[param] = request.GET[param] <NEW_LINE> <DEDENT> <DEDENT> return filters, kwargs
|
Extract REST filter parameters from the request GET args.
Client processes some keywords separately from filters and takes
them as separate inputs. This will ignore those keys to avoid
potential conflicts.
|
625941bc8c0ade5d55d3e88b
|
def __init__(self, source, prefetch): <NEW_LINE> <INDENT> if prefetch < 1: <NEW_LINE> <INDENT> raise ValueError('`prefetch_num` must be at least 1') <NEW_LINE> <DEDENT> self._source = source <NEW_LINE> self._prefetch_num = prefetch <NEW_LINE> self._worker = None <NEW_LINE> self._batch_queue = None <NEW_LINE> self._epoch_counter = None <NEW_LINE> self._stopping = None <NEW_LINE> self._worker_alive = None <NEW_LINE> self._worker_ready_sem = None
|
Construct a :class:`ThreadingFlow`.
Args:
source (DataFlow): The source data flow.
prefetch (int): Number of mini-batches to prefetch ahead.
It should be at least 1.
|
625941bcde87d2750b85fc5a
|
def loss_capsule(self, caps_output): <NEW_LINE> <INDENT> with tf.name_scope("loss_capsule"): <NEW_LINE> <INDENT> caps_output_norm = safe_norm(caps_output, axis=-1, keep_dims=False, name="caps_output_norm") <NEW_LINE> T = tf.one_hot(self.labels_placeholder, depth = self.config.caps2_num_caps, name = "T") <NEW_LINE> present_error = tf.square(tf.maximum(0., self.config.m_plus - caps_output_norm), name="present_error") <NEW_LINE> absent_error = tf.square(tf.maximum(0., caps_output_norm - self.config.m_minus), name="absent_error") <NEW_LINE> loss_raw = tf.add(T * present_error, self.config.lambda_caps_loss * (1.0 - T) * absent_error, name="loss_raw") <NEW_LINE> loss_margin = tf.reduce_mean(tf.reduce_sum(loss_raw, axis=1), name="loss_margin") <NEW_LINE> <DEDENT> return loss_margin
|
capsule loss
|
625941bc6fece00bbac2d607
|
def __init__(self, path: str, sent_tokenize: callable, sent_to_features: callable, balance: bool = False, seed: int = 0, max_files: int = -1): <NEW_LINE> <INDENT> if not os.path.isdir(path): <NEW_LINE> <INDENT> raise IOError('The path "%s" is not a directory.' % path) <NEW_LINE> <DEDENT> files = os.listdir(path) <NEW_LINE> if max_files != -1: <NEW_LINE> <INDENT> files = files[:max_files] <NEW_LINE> <DEDENT> self.dataset = [] <NEW_LINE> progressbar = tqdm(files) <NEW_LINE> for file in progressbar: <NEW_LINE> <INDENT> progressbar.set_description(file) <NEW_LINE> file_path = os.path.join(path, file) <NEW_LINE> with open(file_path, 'r') as input_file: <NEW_LINE> <INDENT> file_data = json.load(input_file) <NEW_LINE> text, abstract = file_data['text'], file_data['abstract'] <NEW_LINE> is_valid_esd_json(file_data, is_train_document=True) <NEW_LINE> text_sentences = sent_tokenize(text) <NEW_LINE> abstract_sentences = sent_tokenize(abstract) <NEW_LINE> try: <NEW_LINE> <INDENT> relevant_indices = self._compute_relevant_indices(text_sentences, abstract_sentences) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for index in range(len(text_sentences)): <NEW_LINE> <INDENT> example = { 'sentence': text_sentences[index], 'features': sent_to_features(text_sentences[index]), 'position': index / float(len(text_sentences)), 'is_relevant': index in relevant_indices } <NEW_LINE> self.dataset.append(example) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if balance: <NEW_LINE> <INDENT> self.dataset = balance_dataset(self.dataset, 'is_relevant', seed)
|
The file loader for the dataset files as described in the README.
Parameters
----------
path : str
Path to the folder containing the JSON files.
sent_tokenize : callable
A method which takes a document (text) as input and produces a list of sentences found in the document as
output.
sent_to_features : callable
A method which takes a sentence as input and produces features (a vector of embedding indices) as output.
balance : bool, optional
Whether to balance the dataset or not (default: False).
seed : int, optional
Seed used for shuffling during balancing (only used when balance=True, default: 0).
max_files : int, optional
The maximum number of files used, -1 for no maximum (default: -1).
Raises
------
ValueError
When the path is not a valid directory.
|
625941bc796e427e537b048e
|
def getStringRepr(self): <NEW_LINE> <INDENT> return _MEDCouplingCorba.MEDCouplingLinearTime_getStringRepr(self)
|
getStringRepr(self) -> string
1
|
625941bc15fb5d323cde09d6
|
def _encode_value_elem(self, var, value): <NEW_LINE> <INDENT> if type(var) in (list, tuple, set): <NEW_LINE> <INDENT> for item in var: <NEW_LINE> <INDENT> subelem = ET.Element('value') <NEW_LINE> value.append(subelem) <NEW_LINE> self._encode_value_elem(item, subelem) <NEW_LINE> <DEDENT> <DEDENT> elif type(var) is dict: <NEW_LINE> <INDENT> for (key, item) in var.iteritems(): <NEW_LINE> <INDENT> subelem = ET.Element('value', {'name':key}) <NEW_LINE> value.append(subelem) <NEW_LINE> self._encode_value_elem(item, subelem) <NEW_LINE> <DEDENT> <DEDENT> elif type(var) in (str, unicode, int, float): <NEW_LINE> <INDENT> value.text = unicode(var) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unsupported data type: must be list, tuple," + " set, dict, str, unicode, int or float") <NEW_LINE> <DEDENT> return value
|
The converse of _parse_value_elem. Take the var variable, encode
it and either insert it as a child of the 'option' Element
argument (list, dict) or set the option.text attribute (scalar).
|
625941bc8e71fb1e9831d679
|
def parse(self): <NEW_LINE> <INDENT> parser = SafeConfigParser() <NEW_LINE> parser.readfp(StringIO(self.obj.content)) <NEW_LINE> for section in parser.sections(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> content = parser.get(section=section, option="deps") <NEW_LINE> for n, line in enumerate(content.splitlines()): <NEW_LINE> <INDENT> if self.is_marked_line(line): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if line: <NEW_LINE> <INDENT> req = RequirementsTXTLineParser.parse(line) <NEW_LINE> if req: <NEW_LINE> <INDENT> req.dependency_type = self.obj.file_type <NEW_LINE> self.obj.dependencies.append(req) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except NoOptionError: <NEW_LINE> <INDENT> pass
|
:return:
|
625941bce64d504609d7470c
|
def build_fenwick_tree(self): <NEW_LINE> <INDENT> for i in range(self.size): <NEW_LINE> <INDENT> self.update(i, self.arr[i])
|
build fenwick tree from input array
|
625941bc046cf37aa974cc16
|
def _create_http_client(): <NEW_LINE> <INDENT> global _http_client <NEW_LINE> defaults = {'user_agent': USER_AGENT} <NEW_LINE> auth_username, auth_password = _credentials <NEW_LINE> if auth_username and auth_password: <NEW_LINE> <INDENT> defaults['auth_username'] = auth_username <NEW_LINE> defaults['auth_password'] = auth_password <NEW_LINE> <DEDENT> _http_client = httpclient.AsyncHTTPClient( force_instance=True, defaults=defaults, io_loop=_io_loop, max_clients=_max_clients)
|
Create the HTTP client with authentication credentials if required.
|
625941bc15fb5d323cde09d7
|
def test_jwt_login_custom_response_json(self): <NEW_LINE> <INDENT> client = APIClient(enforce_csrf_checks=True) <NEW_LINE> response = client.post('/auth-token/', self.data, format='json') <NEW_LINE> decoded_payload = utils.jwt_decode_handler(response.data['token']) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(decoded_payload['username'], self.username) <NEW_LINE> self.assertEqual(response.data['user'], self.username)
|
Ensure JWT login view using JSON POST works.
|
625941bc07d97122c4178750
|
def get_min_max(ints): <NEW_LINE> <INDENT> result = (None, None) <NEW_LINE> if len(ints) >= 1: <NEW_LINE> <INDENT> result = (ints[0], ints[0]) <NEW_LINE> if len(ints) > 1: <NEW_LINE> <INDENT> for item in ints[1::]: <NEW_LINE> <INDENT> if result[1] < item: <NEW_LINE> <INDENT> result = (result[0], item) <NEW_LINE> <DEDENT> if result[0] > item: <NEW_LINE> <INDENT> result = (item, result[1]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return result
|
Return a tuple(min, max) out of list of unsorted integers.
Args:
ints(list): list of integers containing one or more integers
|
625941bcf7d966606f6a9ecd
|
def create_stuffed_animal(self, **kwargs): <NEW_LINE> <INDENT> return EasterBunny(**kwargs)
|
Create a Easter Bunny.
:param kwargs: dict
:return: EasterBunny
|
625941bcbe383301e01b5358
|
def count_rec_2(self, first): <NEW_LINE> <INDENT> if first is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return self.count_rec_2(first.next) + 1
|
Time complexity: O(N)
Space complexity: 0(N)
calls: N+1 times
space: N+1 times
|
625941bc097d151d1a222d28
|
@print_analyze <NEW_LINE> def find_median(data: list): <NEW_LINE> <INDENT> def get_element(_data: list, position=None): <NEW_LINE> <INDENT> if len(_data) == 1: <NEW_LINE> <INDENT> return _data[0] <NEW_LINE> <DEDENT> pivot = choice(_data) <NEW_LINE> lesser = [elem for elem in _data if elem < pivot] <NEW_LINE> greater = [elem for elem in _data if elem > pivot] <NEW_LINE> pivots = [elem for elem in _data if elem == pivot] <NEW_LINE> if position < len(lesser): <NEW_LINE> <INDENT> return get_element(lesser, position) <NEW_LINE> <DEDENT> elif position < len(lesser) + len(pivots): <NEW_LINE> <INDENT> return pivot <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> position -= len(lesser) + len(pivots) <NEW_LINE> return get_element(greater, position) <NEW_LINE> <DEDENT> <DEDENT> length = len(data) <NEW_LINE> pos = length // 2 <NEW_LINE> if length % 2 == 1: <NEW_LINE> <INDENT> return get_element(data, pos) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (get_element(data, pos - 1) + get_element(data, pos)) / 2
|
Based on Hoare's algorithm but without sorting
google it: "hoare's selection" or "quickselect"
|
625941bc45492302aab5e18c
|
def execute(self, method, data, action=''): <NEW_LINE> <INDENT> self._resolve_method(method) <NEW_LINE> self._build_post_body(data) <NEW_LINE> if action != '': <NEW_LINE> <INDENT> action += '/' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> action = '/' <NEW_LINE> <DEDENT> self._action = action <NEW_LINE> self._connection = httplib.HTTPConnection(self._host) <NEW_LINE> self._set_request_url() <NEW_LINE> if self._verb == 'GET': <NEW_LINE> <INDENT> self._execute_get() <NEW_LINE> <DEDENT> elif self._verb == 'POST': <NEW_LINE> <INDENT> self._execute_post() <NEW_LINE> <DEDENT> elif self._verb == 'PUT': <NEW_LINE> <INDENT> self._execute_put() <NEW_LINE> <DEDENT> elif self._verb == 'DELETE': <NEW_LINE> <INDENT> self._execute_delete() <NEW_LINE> <DEDENT> response = self._connection.getresponse() <NEW_LINE> self._response_code = response.status <NEW_LINE> self._response_body = response.read() <NEW_LINE> result = json.loads(self._response_body) <NEW_LINE> return result
|
Execute some query to the MailerSoft Server.
:param method: GET/POST/PUT/DELETE
:type method: str
:param data: Request information for the request body.
:type data: dict
:param action: Where query should be executed.
:type action: str
:returns: Execution result of the query.
:rtype: json
|
625941bc596a897236089996
|
def test_show_when_hide_case(self): <NEW_LINE> <INDENT> name = 'GPS定位_隐藏时显示值' <NEW_LINE> comp = GPSPage(self.driver, name) <NEW_LINE> comp.from_scroll_to('300') <NEW_LINE> self.assertTrue(comp.is_gps_elem_invisibility(), msg=name + '检验不通过') <NEW_LINE> self.assertTrue(comp.show_when_hide('该控件已隐藏'), msg=name + '检验不通过')
|
隐藏时显示值
|
625941bc4527f215b584c326
|
def disable_rate_limit_forward(self): <NEW_LINE> <INDENT> self.headers.pop('X-Forwarded-For', None) <NEW_LINE> self.headers.pop('X-Forwarded-API-Key', None)
|
Disable IP rate limit.
|
625941bca8370b771705276c
|
def _addGenerator(self, doc): <NEW_LINE> <INDENT> from svnmailer import version <NEW_LINE> generator = doc.createElement('generator') <NEW_LINE> self._addTextElements(generator, (u'name', u'svnmailer (cia_xmlrpc notifier)'), (u'version', version.string.decode('utf-8')), (u'url', u'http://opensource.perlig.de/svnmailer/'), ) <NEW_LINE> doc.documentElement.appendChild(generator)
|
Adds the generator info to the message
:param doc: The message document
:type doc: DOM object
|
625941bc56ac1b37e62640a1
|
def __init__(self, labels, classifiers): <NEW_LINE> <INDENT> super(OneVsAllClassifier, self).__init__(labels, classifiers)
|
@param list string: List of labels
@param list (string, Classifier): tuple of (label, classifier); the classifier is the one-vs-all classifier
|
625941bca8370b771705276d
|
def digestAuth(realm, algorithm=MD5, nonce=None, qop=AUTH): <NEW_LINE> <INDENT> assert algorithm in SUPPORTED_ALGORITHM <NEW_LINE> assert qop in SUPPORTED_QOP <NEW_LINE> if nonce is None: <NEW_LINE> <INDENT> nonce = calculateNonce(realm, algorithm) <NEW_LINE> <DEDENT> return 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % ( realm, nonce, algorithm, qop )
|
Challenges the client for a Digest authentication.
|
625941bc3539df3088e2e217
|
@receiver(post_save, sender=User) <NEW_LINE> def create_user_profile(sender, instance, created, **kwargs): <NEW_LINE> <INDENT> if created: <NEW_LINE> <INDENT> Profile.objects.create(user=instance)
|
This function create a Profile every time a User is created
|
625941bcd99f1b3c44c67461
|
def serialize(root): <NEW_LINE> <INDENT> return root.serialize()
|
Serialize the tree into a string
Example input:
Node("bob")
Node("sally") Node("jeff")
Example output:
{val: "bob", left: {val: "sally"}, right: {val: "jeff"}
|
625941bca17c0f6771cbdf1f
|
def socket_send(self, prog): <NEW_LINE> <INDENT> msg = "No message from robot" <NEW_LINE> try: <NEW_LINE> <INDENT> self.c.send(str.encode(prog)) <NEW_LINE> if prog[-3]=='0': <NEW_LINE> <INDENT> msg=bytes.decode(self.c.recv(1024)) <NEW_LINE> if msg=="No message from robot" or msg=='': <NEW_LINE> <INDENT> print(".......................Robot disconnected :O.......................") <NEW_LINE> input("press enter to continue") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except socket.error as socketerror: <NEW_LINE> <INDENT> print(".......................Some kind of error :(.......................") <NEW_LINE> input("press enter to continue") <NEW_LINE> <DEDENT> return msg
|
low level socket communications with the robot
|
625941bc1f5feb6acb0c4a20
|
def predict(self,X_predict): <NEW_LINE> <INDENT> assert self._X_train is not None and self._y_train is not None, "must fit before predict!" <NEW_LINE> assert X_predict.shape[1] == self._X_train.shape[1], "the feature number of X_predict must be equal to X_train" <NEW_LINE> y_predict=[self._predict(x) for x in X_predict] <NEW_LINE> return y_predict
|
给定待预测数据集X_predict,返回表示X_predict的结果向量
|
625941bca8ecb033257d2fa2
|
def isDirty( self ): <NEW_LINE> <INDENT> return self._dirty
|
Returns whether or not the scene is dirty and thus needs a rebuild.
:return <bool>
|
625941bc6fb2d068a760ef66
|
def get_entry_text(entry, conf): <NEW_LINE> <INDENT> with open(conf) as fdesc: <NEW_LINE> <INDENT> text = fdesc.read() <NEW_LINE> return re.search(".*( +<entry name=\"%s\".*?</entry>)" % entry, text, re.DOTALL).group(1)
|
Get an entry text from the xml configuration file
|
625941bc50485f2cf553cc65
|
def get_user_settings_from_username(username): <NEW_LINE> <INDENT> user_model = user_models.UserSettingsModel.get_by_normalized_username( UserSettings.normalize_username(username)) <NEW_LINE> if user_model is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return get_user_settings(user_model.id)
|
Gets the user settings for a given username.
Returns None if the user is not found.
|
625941bc3cc13d1c6d3c724e
|
def load_config(only_defaults=False, override=None): <NEW_LINE> <INDENT> data = DEFAULTS | INTERNAL_DEFAULTS <NEW_LINE> if not only_defaults: <NEW_LINE> <INDENT> path = get_config_path() <NEW_LINE> config = _sanitize_data(_parse_config(path)) <NEW_LINE> data.update(config) <NEW_LINE> env_override = os.environ.get('INDICO_CONF_OVERRIDE') <NEW_LINE> if env_override: <NEW_LINE> <INDENT> data.update(_sanitize_data(ast.literal_eval(env_override))) <NEW_LINE> <DEDENT> resolved_path = resolve_link(path) if os.path.islink(path) else path <NEW_LINE> resolved_path = None if resolved_path == os.devnull else resolved_path <NEW_LINE> data['CONFIG_PATH'] = path <NEW_LINE> data['CONFIG_PATH_RESOLVED'] = resolved_path <NEW_LINE> if resolved_path is not None: <NEW_LINE> <INDENT> data['LOGGING_CONFIG_PATH'] = os.path.join(os.path.dirname(resolved_path), data['LOGGING_CONFIG_FILE']) <NEW_LINE> <DEDENT> <DEDENT> if override: <NEW_LINE> <INDENT> data.update(_sanitize_data(override, allow_internal=True)) <NEW_LINE> <DEDENT> _postprocess_config(data) <NEW_LINE> return ImmutableDict(data)
|
Load the configuration data.
:param only_defaults: Whether to load only the default options,
ignoring any user-specified config file
or environment-based overrides.
:param override: An optional dict with extra values to add to
the configuration. Any values provided here
will override values from the config file.
|
625941bcd10714528d5ffbac
|
def register_callback(self, callback=lambda *args, **kwargs: None): <NEW_LINE> <INDENT> self._callbacks.append(callback)
|
Register a callback to notify updates to the hub state. The callback MUST be safe to call
from the event loop. The nobo instance is passed to the callback function. Limit callbacks
to read state.
:param callback: a callback method
|
625941bcbe7bc26dc91cd4d1
|
def try_exit(): <NEW_LINE> <INDENT> global IS_CLOSING <NEW_LINE> if IS_CLOSING: <NEW_LINE> <INDENT> tornado.ioloop.IOLoop.instance().stop() <NEW_LINE> logging.info('exit success')
|
Define the server exit function
|
625941bc4c3428357757c1f6
|
def spawnve(*args,**kw): <NEW_LINE> <INDENT> pass
|
spawnve(mode, path, args, env)
Execute the program 'path' in a new process.
mode: mode of process creation
path: path of executable file
args: tuple or list of arguments
env: dictionary of strings mapping to strings
|
625941bcd99f1b3c44c67462
|
def solve(grid): <NEW_LINE> <INDENT> grid_val = grid_values(grid) <NEW_LINE> return reduce_puzzle(grid_val)
|
Find the solution to a Sudoku grid.
Args:
grid(string): a string representing a sudoku grid.
Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
Returns:
The dictionary representation of the final sudoku grid. False if no solution exists.
|
625941bc8a349b6b435e8040
|
def poly_from_zeros(z): <NEW_LINE> <INDENT> if len(z) == 0: <NEW_LINE> <INDENT> return [1] <NEW_LINE> <DEDENT> p = [1, -z[0]] <NEW_LINE> for k in range(1, len(z)): <NEW_LINE> <INDENT> p = _convolve(p, [1, -z[k]]) <NEW_LINE> <DEDENT> return p
|
Convert the zeros of a polynomial to the coefficients.
Coefficients are ordered from highest degree term to lowest.
The leading coefficient will be 1.
This is the same operation as performed by `numpy.poly`.
Examples
--------
Convert the zeros [2, 3] to the polynomial coefficients,
(x - 2)*(x - 3) = x**2 - 5*x + 6
>>> poly_from_zeros([2, 3])
[1, -5, 6]
|
625941bc0a366e3fb873e6e4
|
def test_missing_input(self): <NEW_LINE> <INDENT> with self.assertRaises(PipelineRuntimeError): <NEW_LINE> <INDENT> self.pipeline.run_pipeline( self.bad_data_missing_input, print_summary=False)
|
Check that pipeline won't run with missing input
|
625941bc4428ac0f6e5ba6be
|
def get_lr(self): <NEW_LINE> <INDENT> cycle = math.floor(1 + self.last_epoch / self.total_size) <NEW_LINE> x = 1. + self.last_epoch / self.total_size - cycle <NEW_LINE> if x <= self.step_ratio: <NEW_LINE> <INDENT> scale_factor = x / self.step_ratio <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> scale_factor = (x - 1) / (self.step_ratio - 1) <NEW_LINE> <DEDENT> lrs = [] <NEW_LINE> for base_lr, max_lr in zip(self.base_lrs, self.max_lrs): <NEW_LINE> <INDENT> base_height = (max_lr - base_lr) * scale_factor <NEW_LINE> if self.scale_mode == 'cycle': <NEW_LINE> <INDENT> lr = base_lr + base_height * self.scale_fn(cycle) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lr = base_lr + base_height * self.scale_fn(self.last_epoch) <NEW_LINE> <DEDENT> lrs.append(lr) <NEW_LINE> <DEDENT> if self.cycle_momentum: <NEW_LINE> <INDENT> momentums = [] <NEW_LINE> for base_momentum, max_momentum in zip(self.base_momentums, self.max_momentums): <NEW_LINE> <INDENT> base_height = (max_momentum - base_momentum) * scale_factor <NEW_LINE> if self.scale_mode == 'cycle': <NEW_LINE> <INDENT> momentum = max_momentum - base_height * self.scale_fn(cycle) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> momentum = max_momentum - base_height * self.scale_fn(self.last_epoch) <NEW_LINE> <DEDENT> momentums.append(momentum) <NEW_LINE> <DEDENT> for param_group, momentum in zip(self.optimizer.param_groups, momentums): <NEW_LINE> <INDENT> param_group['momentum'] = momentum <NEW_LINE> <DEDENT> <DEDENT> return lrs
|
Calculates the learning rate at batch index. This function treats
`self.last_epoch` as the last batch index.
If `self.cycle_momentum` is ``True``, this function has a side effect of
updating the optimizer's momentum.
|
625941bc8da39b475bd64e41
|
def main(): <NEW_LINE> <INDENT> data = input() <NEW_LINE> data = data.split() <NEW_LINE> print(iter_power(float(data[0]), int(data[1])))
|
Main function
|
625941bc91f36d47f21ac3bc
|
def __copy__(self): <NEW_LINE> <INDENT> return GradientBasedOptimiser(initial_lr=self._ini_lr)
|
Create a new optimiser with the same parameter, but without initial internal state
|
625941bc21a7993f00bc7bb7
|
def run(self, results_dir: str, checker_name: str, **kwargs: Any) -> Optional[Dict[str, Any]]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> checker = cast(Type[BaseChecker], self.plugins[checker_name]) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if checker.CATEGORY != kwargs.get('category'): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not checker.is_available(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if checker.CATEGORY == CheckerCategory.SOURCE: <NEW_LINE> <INDENT> logger.info("Running checks on source files using '%s'", checker_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info("Running checks on packages using '%s'", checker_name) <NEW_LINE> <DEDENT> return checker.run_check(results_dir, **kwargs)
|
Runs a particular checker and returns the results.
Args:
results_dir: Path to a directory in which the checker
should store the results.
checker_name: Name of the checker to be run.
Raises:
NotImplementedError: If a checker with the given name doesn't
exist.
Returns:
Results of the checker.
|
625941bcbe7bc26dc91cd4d2
|
def _conditions(self, full_path, environ): <NEW_LINE> <INDENT> mtime = stat(full_path).st_mtime <NEW_LINE> return str(mtime), formatdate(mtime)
|
Return a tuple of etag, last_modified by mtime from stat.
|
625941bc8c3a87329515828b
|
def addRandomConnections(self): <NEW_LINE> <INDENT> edges = self.edges <NEW_LINE> newNode = self.nodes[-1] <NEW_LINE> randomNodes = [] <NEW_LINE> count = 0 <NEW_LINE> while count < self.m: <NEW_LINE> <INDENT> oldNodes = self.nodes[:-1] <NEW_LINE> randomNode = random.choice(oldNodes) <NEW_LINE> if randomNode not in randomNodes: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> randomNodes.append(randomNode) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> for randomNode in randomNodes: <NEW_LINE> <INDENT> self.edges.append((newNode, randomNode)) <NEW_LINE> self.newEdges.append((newNode, randomNode))
|
Adds new edges via Phase 2
|
625941bc3eb6a72ae02ec3a1
|
def create_estimator(experiment_dir, hparams, decode_length=20): <NEW_LINE> <INDENT> if FLAGS.worker_gpu > 1: <NEW_LINE> <INDENT> strategy = tf.distribute.MirroredStrategy() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> strategy = None <NEW_LINE> <DEDENT> config = tf.estimator.RunConfig( save_checkpoints_steps=1000, save_summary_steps=300, train_distribute=strategy) <NEW_LINE> model_fn = seq2act_estimator.create_model_fn( hparams, seq2act_estimator.compute_additional_loss if hparams.use_additional_loss else None, seq2act_estimator.compute_additional_metric if hparams.use_additional_loss else None, compute_seq_accuracy=True, decode_length=decode_length) <NEW_LINE> if FLAGS.reference_checkpoint: <NEW_LINE> <INDENT> latest_checkpoint = tf.train.latest_checkpoint( FLAGS.reference_checkpoint) <NEW_LINE> ws = tf.estimator.WarmStartSettings( ckpt_to_initialize_from=latest_checkpoint, vars_to_warm_start=["embed_tokens/task_embed_w", "encode_decode/.*", "output_layer/.*"]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ws = None <NEW_LINE> <DEDENT> estimator = tf.estimator.Estimator( model_fn=model_fn, model_dir=experiment_dir, config=config, warm_start_from=ws) <NEW_LINE> return estimator
|
Creates an estimator with given hyper parameters.
|
625941bc7d43ff24873a2b6a
|
def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkScalarImageToRunLengthMatrixFilterIUL2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj
|
New() -> itkScalarImageToRunLengthMatrixFilterIUL2
Create a new object of the class itkScalarImageToRunLengthMatrixFilterIUL2 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarImageToRunLengthMatrixFilterIUL2.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarImageToRunLengthMatrixFilterIUL2.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
|
625941bcc4546d3d9de728fe
|
def CheckDependecies(self, request, context): <NEW_LINE> <INDENT> if self.dut_check != 0: <NEW_LINE> <INDENT> return vsperf_pb2.StatusReply(message="DUT-Host is not Connected [!]" "\nMake sure to establish connection with" " DUT-Host.") <NEW_LINE> <DEDENT> packages = ['python34-tkinter', 'sysstat', 'bc'] <NEW_LINE> for pkg in packages: <NEW_LINE> <INDENT> pkg_check_cmd = "rpm -q {}".format(pkg) <NEW_LINE> pkg_cmd_response = self.client.execute(pkg_check_cmd)[0] <NEW_LINE> if pkg_cmd_response == 1: <NEW_LINE> <INDENT> install_pkg_cmd = "echo '{}' | sudo -S yum install -y {}".format( self.pwd, pkg) <NEW_LINE> self.client.run(install_pkg_cmd, pty=True) <NEW_LINE> <DEDENT> <DEDENT> return vsperf_pb2.StatusReply(message="Python34-tkinter, sysstat and bc Packages" "are now Installed")
|
Check and Install required packages on DUT
|
625941bc85dfad0860c3ad26
|
def get_albums(self): <NEW_LINE> <INDENT> return self._albums
|
Return albums
@return albums as [int]
|
625941bcd7e4931a7ee9dde9
|
def test_vr_3_single_invalid_address(app, db_session, client): <NEW_LINE> <INDENT> registrant = create_registrant(db_session) <NEW_LINE> with client.session_transaction() as http_session: <NEW_LINE> <INDENT> http_session['session_id'] = str(registrant.session_id) <NEW_LINE> <DEDENT> form_payload = { 'addr': "123 Fake St", 'city': "FakeTown", 'state': "NA", 'zip': '00000' } <NEW_LINE> response = client.post('/vr/address', data=form_payload, follow_redirects=False) <NEW_LINE> redirect_data = response.data.decode() <NEW_LINE> assert response.status_code == 302 <NEW_LINE> assert ('/vr/party' in redirect_data) == True <NEW_LINE> updated_registrant = db_session.query(Registrant).filter_by(session_id = registrant.session_id).first() <NEW_LINE> assert updated_registrant.registration_value.get('addr') == '123 Fake St' <NEW_LINE> assert 'validated_addresses' in updated_registrant.registration_value <NEW_LINE> assert updated_registrant.registration_value['validated_addresses'] == False
|
An existing user provides an invalid address, but no previous address or mailing address. Should still redirect.
|
625941bc656771135c3eb738
|
def b2d_conformal(bary, base=np.exp(2j/3*np.arange(3))): <NEW_LINE> <INDENT> return t2d_conformal(b2r(bary,base))
|
Wrapper for t2d_conformal to put it in terms of barycentric coordinates.
|
625941bc507cdc57c6306ba1
|
def __init__(self, gate_setting_tree): <NEW_LINE> <INDENT> self.GateSettingTree = gate_setting_tree <NEW_LINE> self.EntranceHost = self.GateSettingTree["EntranceHost"] <NEW_LINE> self.EntrancePort = self.GateSettingTree["EntrancePort"] <NEW_LINE> self.GateMapping = self.GateSettingTree["GateMapping"] <NEW_LINE> print(self.GateSettingTree) <NEW_LINE> self.__print_msg__()
|
Parameters
this is non ssl mode
----------
{
"EntranceHost": ip address or domain name,
"EntrancePort": port num,
"GateMapping": {
"Host": ip address or domain name ,
"Port": port num,
}
}
|
625941bc1f037a2d8b9460cc
|
def button_data_get(subject: str, subject_data: tuple) -> list: <NEW_LINE> <INDENT> return dh.button_data_get(subject_data, dh.columns_with_button_get(subject))
|
Returns the modal window data needed to handle client-side events.
|
625941bc23e79379d52ee434
|
def render(template_path, context, generate_file=True, begin_comment="#", end_comment=''): <NEW_LINE> <INDENT> path, filename = os.path.split(template_path) <NEW_LINE> content = jinja2.Environment( loader=jinja2.FileSystemLoader(path) ).get_template(filename).render(context) <NEW_LINE> conf_filename, _ = os.path.splitext(filename) <NEW_LINE> conf_path = os.path.join(path, conf_filename) <NEW_LINE> content = "{} {}{}\n{} {}{}\n\n{}".format( begin_comment, "Generated from Jinja template", end_comment, begin_comment, "DO NOT EDIT THIS FILE BY HAND -- YOUR CHANGES WILL BE OVERWRITTEN", end_comment, content) <NEW_LINE> if generate_file: <NEW_LINE> <INDENT> with open(conf_path, "w") as desc: <NEW_LINE> <INDENT> desc.write(content) <NEW_LINE> <DEDENT> <DEDENT> return content
|
From a template Jinja (with extension .template) and a context
generate a configuration and create a file (without extension .template)
if precised. Add a header to this configuration to mark the file as
generated.
Parameters
----------
template_path : string
Path file to Jinja template (with extension .template).
context : dict
(key, value) parameters to feed to JinJa template.
generate_file : bool (optional, by default True)
If true, persist generated configuration to a file in same directory
than Jinja template (without extension .template).
begin_comment : string (optional, by default #)
begin token for inline comment in generated configuration
end_comment : string (optional, by default empty)
end token for inline comment in generated configuration
Returns
-------
string
Content of generated configuration file
|
625941bc6e29344779a624e2
|
def GetPoseF_GTF(cfg,dlc_cfg, sess, inputs, outputs,cap,nframes,batchsize): <NEW_LINE> <INDENT> PredictedData = np.zeros((nframes, 3 * len(dlc_cfg['all_joints_names']))) <NEW_LINE> batch_ind = 0 <NEW_LINE> batch_num = 0 <NEW_LINE> ny,nx=int(cap.get(4)),int(cap.get(3)) <NEW_LINE> if cfg['cropping']: <NEW_LINE> <INDENT> ny,nx=checkcropping(cfg,cap) <NEW_LINE> <DEDENT> pose_tensor = predict.extract_GPUprediction(outputs, dlc_cfg) <NEW_LINE> frames = np.empty((batchsize, ny, nx, 3), dtype='ubyte') <NEW_LINE> pbar=tqdm(total=nframes) <NEW_LINE> counter=0 <NEW_LINE> step=max(10,int(nframes/100)) <NEW_LINE> while(cap.isOpened()): <NEW_LINE> <INDENT> if counter%step==0: <NEW_LINE> <INDENT> pbar.update(step) <NEW_LINE> <DEDENT> ret, frame = cap.read() <NEW_LINE> if ret: <NEW_LINE> <INDENT> frame=cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) <NEW_LINE> if cfg['cropping']: <NEW_LINE> <INDENT> frames[batch_ind] = img_as_ubyte(frame[cfg['y1']:cfg['y2'],cfg['x1']:cfg['x2']]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> frames[batch_ind] = img_as_ubyte(frame) <NEW_LINE> <DEDENT> if batch_ind==batchsize-1: <NEW_LINE> <INDENT> pose = sess.run(pose_tensor, feed_dict={inputs: frames}) <NEW_LINE> pose[:, [0,1,2]] = pose[:, [1,0,2]] <NEW_LINE> pose=np.reshape(pose,(batchsize,-1)) <NEW_LINE> PredictedData[batch_num*batchsize:(batch_num+1)*batchsize, :] = pose <NEW_LINE> batch_ind = 0 <NEW_LINE> batch_num += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> batch_ind+=1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> nframes = counter <NEW_LINE> print("Detected frames: ", nframes) <NEW_LINE> if batch_ind>0: <NEW_LINE> <INDENT> pose = sess.run(pose_tensor, feed_dict={inputs: frames}) <NEW_LINE> pose[:, [0,1,2]] = pose[:, [1,0,2]] <NEW_LINE> pose=np.reshape(pose,(batchsize,-1)) <NEW_LINE> PredictedData[batch_num*batchsize:batch_num*batchsize+batch_ind, :] = pose[:batch_ind,:] <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> counter+=1 <NEW_LINE> <DEDENT> pbar.close() <NEW_LINE> return PredictedData,nframes
|
Batchwise prediction of pose
|
625941bc38b623060ff0acbb
|
def normalize(string): <NEW_LINE> <INDENT> return string.replace("_", "").replace("-", "").replace(".", "").upper()
|
Normalize string
:param string: string to normalize
:return: the string normalized
|
625941bccad5886f8bd26eaf
|
def tcp_rst(self): <NEW_LINE> <INDENT> return self.tcp_flags & dpkt.tcp.TH_RST != 0
|
Does the current packet have the TCP RST flag set?
|
625941bc76d4e153a657e9fd
|
def get_exception(test_meth_elem): <NEW_LINE> <INDENT> exception = {} <NEW_LINE> for exc in test_meth_elem.iter("exception"): <NEW_LINE> <INDENT> exception["classname"] = exc.attrib["class"] <NEW_LINE> for child in exc: <NEW_LINE> <INDENT> txt = stringify_arg(child.text) <NEW_LINE> if child.tag == "message": <NEW_LINE> <INDENT> exception["message"] = txt <NEW_LINE> <DEDENT> if child.tag == "full-stacktrace": <NEW_LINE> <INDENT> exception["stack_trace"] = txt <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return exception
|
Gets any exception information from a test_method element
:param test_meth_elem:
:return:
|
625941bc91af0d3eaac9b8e2
|
def QLM_score(fetched_index, query_term_freq): <NEW_LINE> <INDENT> DOC_SCORE_QLM = {} <NEW_LINE> C = 0 <NEW_LINE> lambda_value = 0.35 <NEW_LINE> for doc in DOC_TOKEN_COUNT: <NEW_LINE> <INDENT> DOC_SCORE_QLM[doc] = 0 <NEW_LINE> C = C + DOC_TOKEN_COUNT[doc] <NEW_LINE> <DEDENT> for query_term in query_term_freq: <NEW_LINE> <INDENT> cq = 0 <NEW_LINE> for doc in fetched_index[query_term]: <NEW_LINE> <INDENT> cq = cq + fetched_index[query_term][doc] <NEW_LINE> <DEDENT> for doc in fetched_index[query_term]: <NEW_LINE> <INDENT> D = DOC_TOKEN_COUNT[doc] <NEW_LINE> fq = fetched_index[query_term][doc] <NEW_LINE> first_part = float(1-lambda_value) * (fq / D) <NEW_LINE> second_part = float(lambda_value) * (cq / C) <NEW_LINE> DOC_SCORE_QLM[doc] += math.log(first_part + second_part) <NEW_LINE> <DEDENT> <DEDENT> return DOC_SCORE_QLM
|
Computes QLM scores for all documents in the given index.
Returns a map of the document ids with thier QLM score.
|
625941bc3617ad0b5ed67dc5
|
def test_html_and_multiple_ranges_spanning_tags(self): <NEW_LINE> <INDENT> self.assertEqual( highlightregion('foo<span class="xy">abc</span>' '<span class="z">12</span>3', [(0, 6), (7, 9)]), '<span class="hl">foo</span><span class="xy">' '<span class="hl">abc</span></span><span class="z">1' '<span class="hl">2</span></span><span class="hl">3</span>')
|
Testing highlightregion with HTML string and multiple ranges
spanning tags
|
625941bccb5e8a47e48b797b
|
def reverseString(self, s: List[str]) -> None: <NEW_LINE> <INDENT> def reverse(start, end): <NEW_LINE> <INDENT> if start >= end: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> s[start], s[end] = s[end], s[start] <NEW_LINE> reverse(start + 1, end - 1) <NEW_LINE> <DEDENT> if len(s) < 2: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> start = 0 <NEW_LINE> end = len(s) - 1 <NEW_LINE> reverse(start, end)
|
Do not return anything, modify s in-place instead.
|
625941bc38b623060ff0acbc
|
def test_slice_zero(self): <NEW_LINE> <INDENT> rec = self.record <NEW_LINE> self.assertEqual(len(rec), 26) <NEW_LINE> self.assertEqual(len(rec[2:-2]), 22) <NEW_LINE> self.assertEqual(len(rec[5:2]), 0) <NEW_LINE> self.assertEqual(len(rec[5:2][2:-2]), 0)
|
Zero slice
|
625941bcbe8e80087fb20b15
|
def post_incident(self): <NEW_LINE> <INDENT> token = self.get_jwt_token_as_user() <NEW_LINE> response = self.client.post( "/api/v2/incident", data=json.dumps(self.create_incident), headers={'content-type': 'application/json', 'Authorization': f'Bearer {token}'} ) <NEW_LINE> return response
|
create incident function
|
625941bc287bf620b61d393a
|
def process(self, input_string): <NEW_LINE> <INDENT> result = self._process(input_string + self.done_code, self.done_string) <NEW_LINE> self.child.stdin.write('\n') <NEW_LINE> return result
|
Process the input string and return a dictionary with 'dvi', 'stdout', 'logfile' and
'stderr' entries.
|
625941bc0fa83653e4656e89
|
def reverse (ss): <NEW_LINE> <INDENT> ss_reverse = "" <NEW_LINE> l = len(ss) - 1 <NEW_LINE> while l >= 0: <NEW_LINE> <INDENT> ss_reverse += ss[l] <NEW_LINE> l -= 1 <NEW_LINE> <DEDENT> return ss_reverse
|
Inverte a string
|
625941bc167d2b6e31218a64
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.