code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def between(value1, value2, n, p): <NEW_LINE> <INDENT> mean = p <NEW_LINE> var = (1/n) * p * (1 - p) <NEW_LINE> stddev = var ** 0.5 <NEW_LINE> minimum = standardize(mean - 3 * stddev, mean, stddev) <NEW_LINE> maximum = standardize(mean + 3 * stddev, mean, stddev) <NEW_LINE> value1 = round(standardize(value1, mean, stddev), 2) <NEW_LINE> value2 = round(standardize(value2, mean, stddev), 2) <NEW_LINE> x = np.linspace(minimum, maximum, num = 100) <NEW_LINE> constant = 1.0 / np.sqrt(2*np.pi) <NEW_LINE> pdf_normal_distribution = constant * np.exp((-x**2) / 2.0) <NEW_LINE> fig, ax = plt.subplots(figsize=(10, 5)) <NEW_LINE> ax.plot(x, pdf_normal_distribution) <NEW_LINE> ax.set_ylim(0) <NEW_LINE> ax.set_title('Normal Distribution', size = 20) <NEW_LINE> ax.set_ylabel('Probability Density', size = 20) <NEW_LINE> plt.plot([value1, value1], [0, 0.35], color='red', linestyle='-') <NEW_LINE> plt.plot([value2, value2], [0, 0.35], color='red', linestyle='-') <NEW_LINE> v1 = round(quad(normalProbabilityDensity, np.NINF, value1)[0], 4) <NEW_LINE> v2 = round(quad(normalProbabilityDensity, np.NINF, value2)[0], 4) <NEW_LINE> plt.text((value1 + value2) / 2, 0.2, "Area between = " + str((0.5-v1)+(v2-0.5))) <NEW_LINE> plt.show()
Plots a normal density curve and denotes the location of the value we are trying to find the probability of. Note: The probability is initially set to show the probability it is LESS THAN value and not include a continuity correction Code base: https://towardsdatascience.com/how-to-use-and-create-a-z-table-standard-normal-table-240e21f36e53
625941bc004d5f362079a215
def row1_invariant(self, target_col): <NEW_LINE> <INDENT> if (self.current_position(0,0) != (1, target_col)): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for col in range(target_col + 1, self.get_width()): <NEW_LINE> <INDENT> if self.current_position(0, col) != (0,col): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.current_position(1, col) != (1,col): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> for row in range(2, self.get_height()): <NEW_LINE> <INDENT> for col in range(self.get_width()): <NEW_LINE> <INDENT> if self.current_position(row, col) != (row, col): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True
Check whether the puzzle satisfies the row one invariant at the given column (col > 1) Returns a boolean
625941bcbe8e80087fb20b27
def CreateSubtransactionSetElements(): <NEW_LINE> <INDENT> subtransactionSetsElement = Element("subtransaction_sets") <NEW_LINE> for subtransaction_set in SubtransactionSets.all(): <NEW_LINE> <INDENT> subtransactionSetElement = CreateSubtransactionSetElement(subtransaction_set) <NEW_LINE> subtransactionSetsElement.append(subtransactionSetElement) <NEW_LINE> <DEDENT> return subtransactionSetsElement
Create Subtransaction Set Elements to the parent element
625941bc8e7ae83300e4aeab
def average(model, models): <NEW_LINE> <INDENT> for ps in zip(*[m.params() for m in [model] + models]): <NEW_LINE> <INDENT> p[0].copy_(torch.sum(*ps[1:]) / len(ps[1:]))
Average models into transformer
625941bc57b8e32f52483379
def transform (image, boxes,labels,difficulties,split): <NEW_LINE> <INDENT> assert split in {'TRAIN','TEST'} <NEW_LINE> mean = [0.485, 0.456, 0.406] <NEW_LINE> std = [0.229, 0.224, 0.225] <NEW_LINE> new_image = image <NEW_LINE> new_boxes = boxes <NEW_LINE> new_labels = labels <NEW_LINE> new_difficulties = difficulties <NEW_LINE> if split == 'TRAIN': <NEW_LINE> <INDENT> new_image = photometric_distort(new_image) <NEW_LINE> <DEDENT> new_image,new_boxes = resize(new_image,new_boxes,dims=(300,300)) <NEW_LINE> new_image = FT.to_tensor(new_image) <NEW_LINE> new_image = FT.normalize(new_image, mean=mean, std=std) <NEW_LINE> return new_image, new_boxes, new_labels, new_difficulties
:param image: :param boxes: :param labels: :param difficulties: :param split: :return:
625941bc498bea3a759b998f
def _create_roles(keystone, timeout=600, poll_interval=10): <NEW_LINE> <INDENT> wait_cycles = int(timeout / poll_interval) <NEW_LINE> for count in range(wait_cycles): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> LOG.debug('Creating admin role, try %d.' % count) <NEW_LINE> _create_role(keystone, 'admin') <NEW_LINE> break <NEW_LINE> <DEDENT> except (exceptions.ConnectionRefused, exceptions.ServiceUnavailable): <NEW_LINE> <INDENT> LOG.debug('Unable to create, sleeping for %d seconds.' % poll_interval) <NEW_LINE> time.sleep(poll_interval)
Create initial roles in Keystone. :param keystone: keystone v2 client :param timeout: total seconds to wait for keystone :param poll_interval: seconds to wait between keystone checks
625941bce5267d203edcdb7f
def __init__(self, sizes): <NEW_LINE> <INDENT> self.num_layers = len(sizes) <NEW_LINE> self.sizes = sizes <NEW_LINE> self.biases = [np.random.randn(ch_out, 1) for ch_out in sizes[1:]] <NEW_LINE> self.weights = [np.random.randn(ch_out, ch_in) for ch_in, ch_out in zip(sizes[:-1], sizes[1:])]
The list ``sizes`` contains the number of neurons in the respective layers of the network. For example, if the list was [2, 3, 1] then it would be a three-layer network, with the first layer containing 2 neurons, the second layer 3 neurons, and the third layer 1 neuron. The biases and weights for the network are initialized randomly, using a Gaussian distribution with mean 0, and variance 1. Note that the first layer is assumed to be an input layer, and by convention we won't set any biases for those neurons, since biases are only ever used in computing the outputs from later layers. :param sizes: [784, 100, 10]
625941bc32920d7e50b280ac
def dup2rgb(single_channel_img): <NEW_LINE> <INDENT> ret = torch.cat([single_channel_img]*3) <NEW_LINE> return ret
Convert a black-white image to RGB image by duplicate channel 3 times.
625941bc30c21e258bdfa37b
def _on_act(self, packet: Packet, channel_info: ChannelInfo): <NEW_LINE> <INDENT> agent = self.agents[packet.sender_id].agent <NEW_LINE> if packet.data.get("MEPHISTO_is_submit") is True: <NEW_LINE> <INDENT> data_files = packet.data.get("files") <NEW_LINE> if data_files is not None: <NEW_LINE> <INDENT> save_dir = agent.get_data_dir() <NEW_LINE> architect = channel_info.job.architect <NEW_LINE> for f_obj in data_files: <NEW_LINE> <INDENT> architect.download_file(f_obj["filename"], save_dir) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> agent.pending_actions.append(packet) <NEW_LINE> agent.has_action.set()
Handle an action as sent from an agent
625941bc4a966d76dd550eec
def getMenu(self): <NEW_LINE> <INDENT> button = self.__button <NEW_LINE> menu = button.menu() <NEW_LINE> if menu is None: <NEW_LINE> <INDENT> menu = qt.QMenu(button) <NEW_LINE> button.setMenu(menu) <NEW_LINE> <DEDENT> return menu
Returns the menu. :rtype: qt.QMenu
625941bc0383005118ecf4c4
def choose_cluster(clusters_list, cluster_id, cluster_name): <NEW_LINE> <INDENT> LOGGER.debug("Name provided: {}".format(cluster_name)) <NEW_LINE> if not cluster_name: <NEW_LINE> <INDENT> identifier = cluster_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> identifier = cluster_name <NEW_LINE> LOGGER.debug("Cluster identified by name: {}".format(identifier)) <NEW_LINE> <DEDENT> LOGGER.debug("Target cluster id: {}".format(cluster_id)) <NEW_LINE> for cluster in clusters_list: <NEW_LINE> <INDENT> LOGGER.debug("Current cluster id: {}".format(cluster.cluster_id)) <NEW_LINE> LOGGER.debug("Current cluster name: {}".format(cluster.name)) <NEW_LINE> LOGGER.debug("Identifier: {}".format(identifier)) <NEW_LINE> if cluster.name == identifier: <NEW_LINE> <INDENT> LOGGER.debug("Found cluster {}".format(cluster_id)) <NEW_LINE> cluster.cluster_id = cluster_id <NEW_LINE> return cluster
Choose cluster with the correct identifier from clusters_list If the cluster has a name, use it as identifier, else use cluster_id
625941bc0a50d4780f666d6f
def heidelberger_welch(self, pvalue=0.05, eps=0.1, fp=sys.stdout): <NEW_LINE> <INDENT> hw_output = self.coda.heidel_diag(self.codamcmc, eps=eps, pvalue=pvalue) <NEW_LINE> sys.stdout.write("Computing Heidelberger Welch stationarity and the half width test, please be patient...\n") <NEW_LINE> df = pandas.DataFrame(np.array(hw_output), columns=hw_output.colnames, index=self.labels) <NEW_LINE> df.to_csv(fp, sep=" ", na_rep="NaN", index_label="Variable") <NEW_LINE> return df
Performs the Heidelberger Welch test for stationarity and half width. :Parameters: - pvalue : Optional pvalue to be used to decide the success or failure of the stationarity test (default=0.05) - eps : Optional argument which sets a tolerance level to decide the success of the halfwidth test (default=0.1) :Returns: - Prints diagnostic text output, can optionally return the quantitative results of the test in an array as well :Examples: >>> from py_coda import mcmc >>> a = mcmc(label_array, mcmc_array, 1) >>> a.heidelberger_welch()
625941bc566aa707497f4457
def write(self, _data): <NEW_LINE> <INDENT> LOGGER.debug("Writing data [%s] to device %s", ", ".join((hex(d) for d in _data)), self.__device_id) <NEW_LINE> try: <NEW_LINE> <INDENT> self.__ep_out.write(_data) <NEW_LINE> <DEDENT> except usb.core.USBError: <NEW_LINE> <INDENT> LOGGER.error("Could not write data to device %s", self.__device_id)
send data to the device
625941bc009cb60464c63293
def _check_symbol(self, addr, name, symbols): <NEW_LINE> <INDENT> if addr in symbols: <NEW_LINE> <INDENT> if name == symbols[addr]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if name.endswith('.o'): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> oldname = symbols[addr] <NEW_LINE> lendiff = abs(len(name) - len(oldname)) <NEW_LINE> minlen = min(len(name), len(oldname)) <NEW_LINE> if not (oldname.endswith('.o') or (lendiff < 3 and minlen > 3 and (name.endswith(oldname) or oldname.endswith(name) or name.startswith(oldname) or oldname.startswith(name)))): <NEW_LINE> <INDENT> self.warning("replacing '%s' at 0x%x with '%s'" % (oldname, addr, name)) <NEW_LINE> <DEDENT> <DEDENT> return True
return True if symbol is OK for addition
625941bc5510c4643540f2cc
def setSessionId(self, *args, **kwargs): <NEW_LINE> <INDENT> return _VISHNU.Session_setSessionId(self, *args, **kwargs)
setSessionId(self, EString _sessionId)
625941bc97e22403b379ce78
def coco_to_conner_form(boxes: list): <NEW_LINE> <INDENT> boxes = np.array(boxes) <NEW_LINE> boxes = np.concatenate([boxes[..., :2], boxes[..., :2] + boxes[..., 2:]], axis=-1) <NEW_LINE> return boxes.tolist()
:param boxes: [n, 4] shape >> 4=(x, y, w, h) :return: new_boxes : [n, 4] shape >> 4=(x1, y1, x2, y2)
625941bcb830903b967e97f3
def roots(self, discontinuity=True, extrapolate=None): <NEW_LINE> <INDENT> return self.solve(0, discontinuity, extrapolate)
Find real roots of the the piecewise polynomial. Parameters ---------- discontinuity : bool, optional Whether to report sign changes across discontinuities at breakpoints as roots. extrapolate : bool, optional Whether to return roots from the polynomial extrapolated based on first and last intervals. Returns ------- roots : ndarray Roots of the polynomial(s). If the PPoly object describes multiple polynomials, the return value is an object array whose each element is an ndarray containing the roots. See Also -------- PPoly.solve
625941bc92d797404e304069
def main(connection, info, args) : <NEW_LINE> <INDENT> badwords = shelve.open("badwords.db", writeback=True) <NEW_LINE> if not badwords.has_key(connection.host) : <NEW_LINE> <INDENT> badwords[connection.host] = {} <NEW_LINE> badwords.sync() <NEW_LINE> <DEDENT> if not badwords[connection.host].has_key(info["channel"]) : <NEW_LINE> <INDENT> badwords[connection.host][info["channel"]] = {"users":{}, "badwords":[]} <NEW_LINE> badwords.sync() <NEW_LINE> <DEDENT> if args[1] in badwords[connection.host][info["channel"]]["badwords"] : <NEW_LINE> <INDENT> badwords[connection.host][info["channel"]]["badwords"].remove(args[1]) <NEW_LINE> badwords.sync() <NEW_LINE> connection.msg(info["channel"], _("Bad word deleted successfully.")) <NEW_LINE> <DEDENT> else : connection.msg(info["channel"], _("That word is not on the list")) <NEW_LINE> badwords.close()
Removes a word from the bad word list
625941bc82261d6c526ab37e
def setEditable(self, state): <NEW_LINE> <INDENT> self._editable = state <NEW_LINE> if state: <NEW_LINE> <INDENT> self.setEditTriggers(self.AllEditTriggers) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setEditTriggers(self.NoEditTriggers)
Sets whether or not the user can edit the items in the list by typing. :param state | <bool>
625941bc99cbb53fe6792ac7
def get_authorization_filters(self, client_id): <NEW_LINE> <INDENT> application = self.get_client(client_id=client_id) <NEW_LINE> filters = [org_relation.to_jwt_filter_claim() for org_relation in application.organizations.all()] <NEW_LINE> if application.authorization_grant_type != application.GRANT_CLIENT_CREDENTIALS: <NEW_LINE> <INDENT> filters.append(self.FILTER_USER_ME) <NEW_LINE> <DEDENT> return filters
Get the authorization filters for the given client application.
625941bcaad79263cf39091c
def __len__(self): <NEW_LINE> <INDENT> pass
:return: 전체 데이터의 수를 리턴합니다
625941bcf7d966606f6a9ee0
def p_func_wr_capi(subexpressions): <NEW_LINE> <INDENT> expression = subexpressions[3] <NEW_LINE> isTerminal = False <NEW_LINE> if not(isString(expression, isTerminal)): <NEW_LINE> <INDENT> raise SemanticException("Capitalizar recibe solo strings") <NEW_LINE> <DEDENT> subexpressions[0] = {"value": "capitalizar(" + expression["value"] + ")", "type": {"tipo": "string", "tipoInterno": None}, "line": subexpressions[1]["line"]}
function_with_return : CAPITALIZAR LPAREN expression RPAREN
625941bc3539df3088e2e22b
def cut(value,arg): <NEW_LINE> <INDENT> return value.replace(arg,'')
THis cuts the arg
625941bc851cf427c661a3f2
def update(self, **kwargs): <NEW_LINE> <INDENT> return self.update_instance(**kwargs)
Updates the Message instance :param sid: Message instance identifier :param service_sid: Service instance identifier :param channel_sid: Channel instance identifier :param body: Message's body :return: the updated instance
625941bccc40096d61595832
def __delattr__(self, attribute): <NEW_LINE> <INDENT> namespaces = attribute.split(".") <NEW_LINE> object.__delattr__(reduce(object.__getattribute__, namespaces[:-1], self), namespaces[-1])
Deletes given attribute with. :param attribute: Attribute name. :type attribute: unicode
625941bc009cb60464c63294
def response_generator(intent, session, mod=""): <NEW_LINE> <INDENT> speech_output = "<speak><prosody rate='90%'>" <NEW_LINE> session_attributes = {} <NEW_LINE> should_end_session = False <NEW_LINE> card_title = "Horrible Movie Plots" <NEW_LINE> if mod in ["QUESTION", "HELP", "LAUNCH"]: <NEW_LINE> <INDENT> speech_text, reprompt_text, session_attributes = get_question_response(intent, session, mod) <NEW_LINE> <DEDENT> elif mod == "ANSWER": <NEW_LINE> <INDENT> speech_output += get_answer_response(intent, session) <NEW_LINE> speech_text, reprompt_text, session_attributes = get_question_response(intent, session, mod) <NEW_LINE> <DEDENT> speech_output += speech_text + "</prosody></speak>" <NEW_LINE> return build_response(session_attributes, build_speechlet_response( speech_output, reprompt_text, should_end_session, card_title, output_type="SSML"))
Concatenates multiple responses together.
625941bc4d74a7450ccd40a2
def get_total(self): <NEW_LINE> <INDENT> base_price = self.get_base_price() <NEW_LINE> if self.species == "Christmas": <NEW_LINE> <INDENT> base_price=1.5*base_price <NEW_LINE> <DEDENT> total = (1 + self.tax) * self.qty * base_price <NEW_LINE> if self.order_type=="international" and self.qty<10: <NEW_LINE> <INDENT> total+=3 <NEW_LINE> <DEDENT> return total
Gets the price for ALL melons!
625941bc099cdd3c635f0b3c
def remake_board_array(self,width,height): <NEW_LINE> <INDENT> board_array = [[BoardContent.EMPTY for i in range(width)] for j in range(height)] <NEW_LINE> for v in self._board.keys(): <NEW_LINE> <INDENT> board_array[v[0]][v[1]] = self._board[v][0] <NEW_LINE> <DEDENT> return board_array
Translates board graph representation back to multidimensional array representation for UI purposes. Args: width: width of board. height: height of board.
625941bc8a43f66fc4b53f48
def update(self, instance, validated_data): <NEW_LINE> <INDENT> instance.badge = validated_data.get('badge', instance.badge) <NEW_LINE> instance.pw = validated_data.get('pw', instance.pw) <NEW_LINE> instance.desktop = validated_data.get('desktop', instance.desktop) <NEW_LINE> instance.save() <NEW_LINE> return instance
Update and return an existing `Profile` instance, given the validated data.
625941bc462c4b4f79d1d5b0
def __ge__(self, other): <NEW_LINE> <INDENT> if type(other) is type(self): <NEW_LINE> <INDENT> return self._default_unit().value() >= other._default_unit().value() <NEW_LINE> <DEDENT> elif isinstance(other, str): <NEW_LINE> <INDENT> return self._default_unit().value() >= self._from_string(other)._default_unit().value() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("unorderable types: '%s' >= '%s'" % (self.__class__.__qualname__, other.__class__.__qualname__))
Greater than or equal to operator.
625941bcd268445f265b4d4e
@task <NEW_LINE> def stop_process(name, venv_dir=None): <NEW_LINE> <INDENT> _supervisor_command(' stop ' + name, venv_dir)
关闭进程
625941bc6fece00bbac2d61c
def member_function( self, name=None, function=None, return_type=None, arg_types=None, header_dir=None, header_file=None, recursive=None ): <NEW_LINE> <INDENT> return self._find_single( self._impl_matchers[ scopedef_t.member_function ] , name=name , function=function , decl_type=self._impl_decl_types[ scopedef_t.member_function ] , return_type=return_type , arg_types=arg_types , header_dir=header_dir , header_file=header_file , recursive=recursive )
returns reference to member declaration, that is matched defined criteria
625941bc26238365f5f0ed4a
def to_networkx_web(knots): <NEW_LINE> <INDENT> import networkx as nx <NEW_LINE> if not isinstance(knots, (tuple, list)): <NEW_LINE> <INDENT> knots = [knots] <NEW_LINE> <DEDENT> graph = nx.DiGraph() <NEW_LINE> def draw_fn(knot_, _, __): <NEW_LINE> <INDENT> for i in knot_.input_knots_list: <NEW_LINE> <INDENT> graph.add_edge(i, knot_) <NEW_LINE> <DEDENT> graph.add_node(knot_, label=knot_.layer_name) <NEW_LINE> <DEDENT> for knot in knots: <NEW_LINE> <INDENT> knot.map_web(draw_fn) <NEW_LINE> <DEDENT> return graph
Converts some Knots into a NetworkX DiGraph. The resulting DiGraph will have the Knots as its nodes. Note that there are good reasons not to just use NetworkX DiGraphs in the first place: NetworkX is based around using a Graph as the central object, for which its nodes may be anything. Here, however, we use Knots as the central object, which are assembled into a web. (Note the capitalisation in each case: the nodes of a Graph are not themselves NetworkX objects; nor is a web of Knots an object itself.) Arguments: knots: A Knot or tuple or list of Knots, specifing the 'outputs' of the web. All 'earlier' Knots, in the sense of Knot.map_web.__doc__, will feature as part of the result DiGraph. Returns: A NetworkX DiGraph representing the web, with Knots as nodes.
625941bc4527f215b584c33a
def getRawBody(self): <NEW_LINE> <INDENT> return self._body
Body getter. :return: String - Raw body content.
625941bce5267d203edcdb80
def _mkarg(self, kargs): <NEW_LINE> <INDENT> def enc(x): <NEW_LINE> <INDENT> return x.encode('utf-8') if isinstance(x, str) else str(x) <NEW_LINE> <DEDENT> kargs = kargs.copy() <NEW_LINE> kargs['token'] = self._api.token <NEW_LINE> for (k, v) in kargs.items(): <NEW_LINE> <INDENT> if isinstance(v, Iterable) and not isinstance(v, str): <NEW_LINE> <INDENT> kargs[k] = ','.join([str(i) for i in v]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kargs[k] = enc(v) <NEW_LINE> <DEDENT> <DEDENT> return kargs
change the argument list (encode value, add api key/secret) :return: the new argument list
625941bc56b00c62f0f14537
def test_parameter_lose_units(): <NEW_LINE> <INDENT> g = Gaussian1D(1 * u.Jy, 3, 0.1) <NEW_LINE> with pytest.raises(UnitsError) as exc: <NEW_LINE> <INDENT> g.amplitude = 2 <NEW_LINE> <DEDENT> assert exc.value.args[0] == ("The 'amplitude' parameter should be given as " "a Quantity because it was originally " "initialized as a Quantity")
Check that parameters that have been set to a quantity that are then set to a value with no units raise an exception. We do this because setting a parameter to a value with no units is ambiguous if units were set before: if a paramter is 1 * u.Jy and the parameter is then set to 4, does this mean 2 without units, or 2 * u.Jy?
625941bc4f6381625f11491d
def _json_to_internal(json_scorecard, params): <NEW_LINE> <INDENT> model_name = json_scorecard["model_name"] <NEW_LINE> data_fields = [ DataField(df["name"], df["dataType"], df["optype"], df.get("values")) for df in json_scorecard["data_fields"] ] <NEW_LINE> characteristics = [ Characteristic(c["name"], [ Attribute(a["reasonCode"], str(a["partialScore"]), _read_predicate(a["predicate"], params)) for a in c["attributes"] ] ) for c in json_scorecard["characteristics"] ] <NEW_LINE> return model_name, data_fields, characteristics
Converts a JSON scorecard description to the internal representation
625941bc23849d37ff7b2f71
def partition(lists, left, right): <NEW_LINE> <INDENT> key = lists[left] <NEW_LINE> while left < right: <NEW_LINE> <INDENT> while left < right and lists[right] >= key: <NEW_LINE> <INDENT> right -= 1 <NEW_LINE> <DEDENT> lists[left] = lists[right] <NEW_LINE> while left < right and lists[left] <= key: <NEW_LINE> <INDENT> left += 1 <NEW_LINE> <DEDENT> lists[right] = lists[left] <NEW_LINE> <DEDENT> lists[left] = key <NEW_LINE> return left
分堆
625941bc627d3e7fe0d68d2e
def set_instance_name(self, instance): <NEW_LINE> <INDENT> if self.ALLOW_MULTIINSTANCE: <NEW_LINE> <INDENT> self.__instance = instance <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.warning("Plugin does not allow more then one instance")
set instance name of the plugin
625941bc94891a1f4081b988
def extractZonesAsList(self, supportedAreas): <NEW_LINE> <INDENT> allZones = [ self.vtypes.VehicleAreaZone.ROW_1_LEFT, self.vtypes.VehicleAreaZone.ROW_1_CENTER, self.vtypes.VehicleAreaZone.ROW_1_RIGHT, self.vtypes.VehicleAreaZone.ROW_2_LEFT, self.vtypes.VehicleAreaZone.ROW_2_CENTER, self.vtypes.VehicleAreaZone.ROW_2_RIGHT, self.vtypes.VehicleAreaZone.ROW_3_LEFT, self.vtypes.VehicleAreaZone.ROW_3_CENTER, self.vtypes.VehicleAreaZone.ROW_3_RIGHT, self.vtypes.VehicleAreaZone.ROW_4_LEFT, self.vtypes.VehicleAreaZone.ROW_4_CENTER, self.vtypes.VehicleAreaZone.ROW_4_RIGHT, ] <NEW_LINE> extractedZones = [] <NEW_LINE> for zone in allZones: <NEW_LINE> <INDENT> if (zone & supportedAreas == zone): <NEW_LINE> <INDENT> extractedZones.append(zone) <NEW_LINE> <DEDENT> <DEDENT> return extractedZones
Converts bitwise area flags to list of zones
625941bc8e7ae83300e4aeac
def read_files(self): <NEW_LINE> <INDENT> self.n_files = len(self.train_files) <NEW_LINE> self.users = [] <NEW_LINE> self.items = [] <NEW_LINE> for train_file in self.train_files: <NEW_LINE> <INDENT> train_set = ReadFile(train_file, sep=self.sep, as_binary=self.as_binary).read() <NEW_LINE> self.users += train_set['users'] <NEW_LINE> self.items += train_set['items'] <NEW_LINE> self.train_set_list.append(train_set) <NEW_LINE> self.dir_name = os.path.dirname(train_file) <NEW_LINE> <DEDENT> self.users = set(self.users) <NEW_LINE> self.items = set(self.items) <NEW_LINE> if self.test_file is not None: <NEW_LINE> <INDENT> self.test_set = ReadFile(self.test_file).read() <NEW_LINE> self.users = sorted(set(list(self.users) + list(self.test_set['users']))) <NEW_LINE> self.items = sorted(set(list(self.items) + list(self.test_set['items']))) <NEW_LINE> <DEDENT> for i, item in enumerate(self.items): <NEW_LINE> <INDENT> self.item_to_item_id.update({item: i}) <NEW_LINE> self.item_id_to_item.update({i: item}) <NEW_LINE> <DEDENT> for u, user in enumerate(self.users): <NEW_LINE> <INDENT> self.user_to_user_id.update({user: u}) <NEW_LINE> self.user_id_to_user.update({u: user})
Method to initialize recommender algorithm.
625941bce1aae11d1e749b95
def remove(self, s): <NEW_LINE> <INDENT> if isinstance(s, float) or isinstance(s, int): <NEW_LINE> <INDENT> s = Set(s) <NEW_LINE> <DEDENT> raise NotImplemented
Remove a set or single point from the MultiSet.
625941bcd486a94d0b98e025
def conforms_to(obj, source): <NEW_LINE> <INDENT> return conforms(source)(obj)
Checks if `obj` conforms to `source` by invoking the predicate properties of `source` with the corresponding property values of `obj`. Args: obj (dict|list): The object to inspect. source (dict|list): The object of property predicates to conform to. Example: >>> conforms_to({'b': 2}, {'b': lambda n: n > 1}) True >>> conforms_to({'b': 0}, {'b': lambda n: n > 1}) False >>> conforms_to([2, 0], [lambda n: n > 1, lambda n: n == 0]) True >>> conforms_to([0, 0], [lambda n: n > 1, lambda n: n == 0]) False .. versionadded:: 4.0.0
625941bcab23a570cc250060
def deserialize_block(serialized_block): <NEW_LINE> <INDENT> as_tuple = msgpack.unpackb(serialized_block, encoding="utf-8") <NEW_LINE> index, fingers, items, aux = as_tuple <NEW_LINE> return Block(items, index, fingers, aux)
Decode a block from msgpack-serialized bytes.
625941bcd99f1b3c44c67476
def __init__(self, project_id, bucket_name): <NEW_LINE> <INDENT> self.project_id = project_id <NEW_LINE> self.bucket_name = bucket_name <NEW_LINE> self.client = storage.Client(project=project_id) <NEW_LINE> self.bucket = self.client.get_bucket(bucket_name)
Initialize client with project id and name of the storage bucket.
625941bc507cdc57c6306bb4
def on_datamodel_recordUpdated(self, bRecord): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if bRecord: <NEW_LINE> <INDENT> current = self.selTableView.currentIndex() <NEW_LINE> if current and current.row() != -1: <NEW_LINE> <INDENT> self.curSelection = current.row() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.curSelection = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.curSelection is not None: <NEW_LINE> <INDENT> self.selTableView.selectRow(self.curSelection) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print >> sys.stderr, str(e)
Deal with the selection changed problem.
625941bcb5575c28eb68dede
def _load_processed_file(file, **kwargs): <NEW_LINE> <INDENT> if "_gps_" in file or "_iridium" in file: <NEW_LINE> <INDENT> from .gps import gps <NEW_LINE> return gps(file=file) <NEW_LINE> <DEDENT> elif "emission" in file: <NEW_LINE> <INDENT> from .source import source_rtsys <NEW_LINE> return source_rtsys(file=file) <NEW_LINE> <DEDENT> elif "ctd" in file: <NEW_LINE> <INDENT> from .ctd import ctd <NEW_LINE> return ctd(file=file) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return file+" not loaded"
load preprocessed file, select object type based on filename
625941bc4c3428357757c20a
def _get_folder_path(csidl): <NEW_LINE> <INDENT> buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH) <NEW_LINE> SHGetFolderPathW(None, csidl, None, SHGFP_TYPE_CURRENT, buf) <NEW_LINE> return str(buf.value)
Get the path of a folder identified by a CSIDL value.
625941bcbde94217f3682cdb
def swap_strategy(score, opponent_score, margin=8, num_rolls=4): <NEW_LINE> <INDENT> my_score = 0 <NEW_LINE> my_score = free_bacon(opponent_score) <NEW_LINE> my_score = primenize(my_score) <NEW_LINE> if opponent_score == 2 * (score + my_score): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> elif my_score >= margin: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return num_rolls
This strategy rolls 0 dice when it triggers a beneficial swap. It also rolls 0 dice if it gives at least MARGIN points. Otherwise, it rolls NUM_ROLLS.
625941bc3c8af77a43ae367e
def test_h2_title(self): <NEW_LINE> <INDENT> h2 = self.result.xpath("/h:html/h:body/h:div[@class='article']/h:div[@class='sect1']/h:div[@class='titlepage']/h:div/h:div/h:h2[@class='title']/h:a", namespaces=self.ns) <NEW_LINE> assert len(h2)
Checks, if /h:html/h:body/h:div[@class='article']/h:div[@class='sect1']/h:div[@class='titlepage']/h:div/h:div/h:h2[@class='title']/h:a is available
625941bc85dfad0860c3ad3a
def __find_complex_relations( self, **kwargs ): <NEW_LINE> <INDENT> kwargs['async_req'] = kwargs.get( 'async_req', False ) <NEW_LINE> kwargs['_return_http_data_only'] = kwargs.get( '_return_http_data_only', True ) <NEW_LINE> kwargs['_preload_content'] = kwargs.get( '_preload_content', True ) <NEW_LINE> kwargs['_request_timeout'] = kwargs.get( '_request_timeout', None ) <NEW_LINE> kwargs['_check_input_type'] = kwargs.get( '_check_input_type', True ) <NEW_LINE> kwargs['_check_return_type'] = kwargs.get( '_check_return_type', True ) <NEW_LINE> kwargs['_host_index'] = kwargs.get('_host_index') <NEW_LINE> return self.call_with_http_info(**kwargs)
Returns complex relations matching the given search criteria. # noqa: E501 Returns complex relations matching the given search criteria. Only parameters that are specified in this request and have not <code>null</code> values are used for filtering. All other parameters are ignored. The returned complex relations satisfy all constraints that are specified in this search criteria. By default a result containing 1000 complex relations is returned. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.find_complex_relations(async_req=True) >>> result = thread.get() Keyword Args: offset (int): The first result to retrieve. If not set (offset = <code>0</code>), results will be retrieved starting from row <code>0</code>.. [optional] if omitted the server will use the default value of 0 limit (int): The maximum number of results to retrieve. If not set (limit = <code>0</code>), the default limit will be used.. [optional] if omitted the server will use the default value of 0 asset_id (str): The ID of the asset for which complex relations should be found.. [optional] type_id (str): The ID of the type of complex relations to search for.. [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. async_req (bool): execute request asynchronously Returns: PagedResponseComplexRelation If the method is called asynchronously, returns the request thread.
625941bc4428ac0f6e5ba6d2
def __init__(self, prob=None): <NEW_LINE> <INDENT> if isinstance(prob, Dirichlet): <NEW_LINE> <INDENT> self.prob_prior = prob <NEW_LINE> <DEDENT> self.prob = prob
construct categorical distribution Parameters ---------- prob : (n_classes,) np.ndarray or Dirichlet probability of each class
625941bc76e4537e8c351558
def form_total_price(forms, real: bool = False) -> float: <NEW_LINE> <INDENT> price = 0.0 <NEW_LINE> for form in forms: <NEW_LINE> <INDENT> if real: <NEW_LINE> <INDENT> price += float(form.real_quantity * form.price) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> price += float(form.quantity * form.price) <NEW_LINE> <DEDENT> <DEDENT> return price
多条表单总价计算 计算多个order_form表单总价 :param forms: 多条order_form :param real: False:form.quantity True:form.real_quantity :return: 多条表单总价
625941bc460517430c39406d
def test_200_run_android_release_snapshot(self): <NEW_LINE> <INDENT> run_hello_world_js_ts(self.app_name, Platform.ANDROID, self.emu, default_andr_sdk='28', release=True, snapshot=True)
Run android, verify app is built with api28
625941bc0a50d4780f666d70
def assert_arcs3_data(covdata): <NEW_LINE> <INDENT> assert_line_counts(covdata, SUMMARY_3) <NEW_LINE> assert_measured_files(covdata, MEASURED_FILES_3) <NEW_LINE> assert_count_equal(covdata.lines("x.py"), X_PY_LINES_3) <NEW_LINE> assert_count_equal(covdata.arcs("x.py"), X_PY_ARCS_3) <NEW_LINE> assert_count_equal(covdata.lines("y.py"), Y_PY_LINES_3) <NEW_LINE> assert_count_equal(covdata.arcs("y.py"), Y_PY_ARCS_3) <NEW_LINE> assert covdata.has_arcs()
Check that `covdata` has the data from ARCS3.
625941bcd486a94d0b98e026
def start(self) -> "FilesystemCleanup": <NEW_LINE> <INDENT> self.clean_output_files() <NEW_LINE> return self
Starts the cleanup of everything unneeded.
625941bc091ae35668666e45
def post(self, request): <NEW_LINE> <INDENT> serializer = CreateJoinGroupRequestSerializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> data = serializer.data <NEW_LINE> try: <NEW_LINE> <INDENT> group = Group.objects.get(id=data["group_id"]) <NEW_LINE> <DEDENT> except Group.DoesNotExist: <NEW_LINE> <INDENT> return error_response(u"小组不存在") <NEW_LINE> <DEDENT> if group.join_group_setting == 0: <NEW_LINE> <INDENT> if join_group(request.user, group): <NEW_LINE> <INDENT> return success_response(u"你已经成功的加入该小组") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return error_response(u"你已经是小组成员了") <NEW_LINE> <DEDENT> <DEDENT> elif group.join_group_setting == 1: <NEW_LINE> <INDENT> if not data["message"]: <NEW_LINE> <INDENT> return error_response(u"申请信息是必填项。") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> JoinGroupRequest.objects.get(user=request.user, group=group, status=False) <NEW_LINE> return error_response(u"你已经提交过申请了,请等待审核") <NEW_LINE> <DEDENT> except JoinGroupRequest.DoesNotExist: <NEW_LINE> <INDENT> JoinGroupRequest.objects.create(user=request.user, group=group, message=data["message"]) <NEW_LINE> <DEDENT> return success_response(u"申请提交成功,请等待审核") <NEW_LINE> <DEDENT> elif group.join_group_setting == 2: <NEW_LINE> <INDENT> return error_response(u"该小组不允许任何人加入") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return serializer_invalid_response(serializer)
加入某个小组的api --- request_serializer: CreateJoinGroupRequestSerializer
625941bc9c8ee82313fbb655
def test_decode_jwt(self): <NEW_LINE> <INDENT> gcp_collector = gcp.GCPCloudProvider({}, audience_url="https://subscription.rhsm.redhat.com:443/subscription") <NEW_LINE> jose_header_str, metadata_str, encoded_signature = gcp_collector.decode_jwt(GCP_JWT_TOKEN) <NEW_LINE> self.assertIsNotNone(jose_header_str) <NEW_LINE> self.assertIsNotNone(metadata_str) <NEW_LINE> self.assertIsNotNone(encoded_signature) <NEW_LINE> self.assertEqual(jose_header_str, GCP_JOSE_HEADER) <NEW_LINE> self.assertEqual(metadata_str, GCP_METADATA) <NEW_LINE> self.assertEqual(encoded_signature, GCP_SIGNATURE) <NEW_LINE> jose_header = json.loads(jose_header_str) <NEW_LINE> self.assertIn("typ", jose_header) <NEW_LINE> self.assertEqual(jose_header["typ"], "JWT") <NEW_LINE> metadata = json.loads(metadata_str) <NEW_LINE> self.assertIn("google", metadata) <NEW_LINE> self.assertIn("compute_engine", metadata["google"]) <NEW_LINE> self.assertIn("instance_id", metadata["google"]["compute_engine"]) <NEW_LINE> self.assertEqual("2589221140676718026", metadata["google"]["compute_engine"]["instance_id"])
Test decoding of JWT token
625941bc4a966d76dd550eed
def create_local_cluster(num_workers, num_ps, protocol="grpc"): <NEW_LINE> <INDENT> worker_ports = [portpicker.pick_unused_port() for _ in range(num_workers)] <NEW_LINE> ps_ports = [portpicker.pick_unused_port() for _ in range(num_ps)] <NEW_LINE> cluster_dict = { "worker": ["localhost:%s" % port for port in worker_ports], "ps": ["localhost:%s" % port for port in ps_ports] } <NEW_LINE> cs = server_lib.ClusterSpec(cluster_dict) <NEW_LINE> workers = [ server_lib.Server( cs, job_name="worker", protocol=protocol, task_index=ix, start=True) for ix in range(num_workers) ] <NEW_LINE> ps_servers = [ server_lib.Server( cs, job_name="ps", protocol=protocol, task_index=ix, start=True) for ix in range(num_ps) ] <NEW_LINE> return workers, ps_servers
Create local GRPC servers and return their servers.
625941bccc0a2c11143dcd71
def perform_app_delete(self, rpc_app): <NEW_LINE> <INDENT> app = AppOperator.Application(rpc_app) <NEW_LINE> try: <NEW_LINE> <INDENT> self._dbapi.kube_app_destroy(app.name) <NEW_LINE> self._cleanup(app) <NEW_LINE> LOG.info("Application (%s) has been purged from the system." % app.name) <NEW_LINE> msg = None <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.exception(e) <NEW_LINE> msg = str(e) <NEW_LINE> <DEDENT> return msg
Process application remove request This method removes the application entry from the database and performs cleanup which entails removing node labels where applicable and purge all application files from the system. :param rpc_app: application object in the RPC request
625941bc29b78933be1e5596
def index_doc(self, fname): <NEW_LINE> <INDENT> meta = self.iterSent.get_metadata(fname) <NEW_LINE> self.add_meta_keywords(meta) <NEW_LINE> meta['n_words'] = self.numWords <NEW_LINE> meta['n_sents'] = self.numSents <NEW_LINE> if len(self.settings['languages']) > 1: <NEW_LINE> <INDENT> for i in range(len(self.languages)): <NEW_LINE> <INDENT> meta['n_words_' + self.languages[i]] = self.numWordsLang[i] <NEW_LINE> meta['n_sents_' + self.languages[i]] = self.numSentsLang[i] <NEW_LINE> <DEDENT> <DEDENT> self.numWords = 0 <NEW_LINE> self.numSents = 0 <NEW_LINE> self.numWordsLang = [0] * len(self.languages) <NEW_LINE> self.numSentsLang = [0] * len(self.languages) <NEW_LINE> try: <NEW_LINE> <INDENT> self.es.index(index=self.name + '.docs', doc_type='doc', id=self.dID, body=meta) <NEW_LINE> <DEDENT> except RequestError as err: <NEW_LINE> <INDENT> shortMeta = {} <NEW_LINE> if 'filename' in meta: <NEW_LINE> <INDENT> shortMeta['filename'] = meta['filename'] <NEW_LINE> <DEDENT> if 'title' in meta: <NEW_LINE> <INDENT> shortMeta['title'] = meta['title'] <NEW_LINE> shortMeta['title_kw'] = meta['title'] <NEW_LINE> self.es.index(index=self.name + '.docs', doc_type='doc', id=self.dID, body=shortMeta) <NEW_LINE> <DEDENT> <DEDENT> self.dID += 1
Store the metadata of the source file.
625941bc627d3e7fe0d68d2f
def is_full(self): <NEW_LINE> <INDENT> assert len(self.indexes) <= len(self.candidate) <NEW_LINE> return len(self.indexes) == len(self.candidate)
Return whether the builder is ready to create an MWEOccurrence.
625941bc91af0d3eaac9b8f6
def _enable_alpha_channel(self, pil_image): <NEW_LINE> <INDENT> if pil_image.mode != self._color_mode: <NEW_LINE> <INDENT> pil_image = pil_image.convert(self._color_mode) <NEW_LINE> <DEDENT> return pil_image
Enable alpha channel for PNG images by converting to RGBA.
625941bc38b623060ff0accf
def test_fill_value(): <NEW_LINE> <INDENT> r = list(range(5)) <NEW_LINE> assert partitions(r, 3) == [[0, 1, 2], [3, 4]] <NEW_LINE> assert partitions(r, 3, fill_value=None) == [[0, 1, 2], [3, 4, None]] <NEW_LINE> with cute_testing.RaiseAssertor(text='fill_value'): <NEW_LINE> <INDENT> partitions(r, 2, fill_value=None, allow_remainder=False) <NEW_LINE> <DEDENT> assert partitions([], 3, fill_value=None) == []
Test `fill_value` keyword arguemnt for `partitions`.
625941bc67a9b606de4a7d9d
def OnEnterScript(self,) -> 'Any': <NEW_LINE> <INDENT> pass
None Args: Returns: Any
625941bc44b2445a33931f80
def HaveTimeOfDayCondition(date_flags): <NEW_LINE> <INDENT> if GetBit(date_flags, 0): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Проверяет, есть ли среди временнЫх условий условие по времени дня
625941bc38b623060ff0acd0
def next_reasonable_head(ea, max_ea): <NEW_LINE> <INDENT> while ea < max_ea: <NEW_LINE> <INDENT> ea = idc.next_head(ea, max_ea) <NEW_LINE> flags = idc.get_full_flags(ea) <NEW_LINE> if not idaapi.is_align(flags): <NEW_LINE> <INDENT> return ea <NEW_LINE> <DEDENT> <DEDENT> return idc.BADADDR
Returns the next 'reasonable' head, skipping over alignments. One heuristic for matching strings is to see if there's an unmatched string between two matched ones. If the next logical head is a string, but the actual head is an alignment, then we really want to find the head of the string. TODO(pag): Investigate using `ida_bytes.next_not_tail(ea)`.
625941bcd58c6744b4257b42
def testtransport_start_event_handler(self): <NEW_LINE> <INDENT> sig_dict, signal = self.setup_vars_mocks() <NEW_LINE> transport = XmppTransport.factory(1, sig_dict["Signal"]["Overlays"]["A0FB389"], signal, None, None, None) <NEW_LINE> transport._sig.sig_log = MagicMock() <NEW_LINE> transport.add_event_handler = MagicMock() <NEW_LINE> transport.register_handler = MagicMock() <NEW_LINE> transport.get_roster = MagicMock() <NEW_LINE> transport.start_event_handler(event=None) <NEW_LINE> transport.add_event_handler.assert_called_once() <NEW_LINE> transport.get_roster.assert_called_once() <NEW_LINE> print("Passed : testtransport_start_event_handler")
Test to check the start of the event handler of the signal class.
625941bc7b25080760e3933b
def __init__(self, uri: str, verbose=False, test_environment=False): <NEW_LINE> <INDENT> self.test_environment = test_environment <NEW_LINE> if self.test_environment: <NEW_LINE> <INDENT> self.printer = utils.VerbosePrinter(verbose) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super().__init__(uri, verbose)
Constructor
625941bc4e696a04525c932d
def _check_dimensions_objects(R): <NEW_LINE> <INDENT> print(R) <NEW_LINE> dimensions = {} <NEW_LINE> for r in R: <NEW_LINE> <INDENT> t1, t2 = r <NEW_LINE> for l in range(len(R[r])): <NEW_LINE> <INDENT> if dimensions.get(t1) is not None: <NEW_LINE> <INDENT> if R[r][l].shape[0] != dimensions[t1]: <NEW_LINE> <INDENT> raise ValueError("Object type " + t1 + " has" "mismatching dimensions") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> dimensions[t1] = R[r][l].shape[0] <NEW_LINE> <DEDENT> if dimensions.get(t2) is not None: <NEW_LINE> <INDENT> if R[r][l].shape[1] != dimensions[t2]: <NEW_LINE> <INDENT> raise ValueError("Object type " + t2 + " has" "mismatching dimensions") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print(R[r][l].shape) <NEW_LINE> dimensions[t2] = R[r][l].shape[1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dimensions
:param R: dictionary from (type, type) to relational matrix :return: dictionary string to int (dimension of that type)
625941bc15fb5d323cde09ec
def get_matplotlib_layout(widget, flag_toolbar=True): <NEW_LINE> <INDENT> fig = plt.figure() <NEW_LINE> canvas = FigureCanvas(fig) <NEW_LINE> layout = QVBoxLayout(widget) <NEW_LINE> if flag_toolbar: <NEW_LINE> <INDENT> toolbar = NavigationToolbar2QT(canvas, widget) <NEW_LINE> layout.addWidget(toolbar) <NEW_LINE> <DEDENT> layout.addWidget(canvas) <NEW_LINE> a99.set_margin(layout, 0) <NEW_LINE> return fig, canvas, layout
Creates figure, toolbar, layout, sets widget layout Returns figure, canvas, layout Reference: http://stackoverflow.com/questions/12459811
625941bc2eb69b55b151c78d
def getName(self): <NEW_LINE> <INDENT> return _fife.SharedResourcePointer_getName(self)
getName(SharedResourcePointer self) -> std::string const &
625941bc9f2886367277a771
def ping(self): <NEW_LINE> <INDENT> self._p__ping = 1
Send out a sonar/radar pulse
625941bc8e05c05ec3eea253
def test_basis(self): <NEW_LINE> <INDENT> sys1 = Atoms(symbols=["H"], positions=[[0, 0, 0]], cell=[2, 2, 2], pbc=True) <NEW_LINE> sys2 = Atoms(symbols=["O"], positions=[[0, 0, 0]], cell=[2, 2, 2], pbc=True) <NEW_LINE> sys3 = sys2*[2, 2, 2] <NEW_LINE> desc = MBTR( atomic_numbers=[1, 8], k=[1, 2, 3], periodic=True, grid={ "k1": { "min": 1, "max": 8, "sigma": 0.1, "n": 50, }, "k2": { "min": 0, "max": 1/0.7, "sigma": 0.1, "n": 50, }, "k3": { "min": -1, "max": 1, "sigma": 0.1, "n": 50, } }, weighting={ "k2": { "function": "exponential", "scale": 1, "cutoff": 1e-4 }, "k3": { "function": "exponential", "scale": 1, "cutoff": 1e-4 } }, flatten=True ) <NEW_LINE> vec1 = desc.create(sys1).toarray()[0, :] <NEW_LINE> vec1 /= np.linalg.norm(vec1) <NEW_LINE> vec2 = desc.create(sys2).toarray()[0, :] <NEW_LINE> vec2 /= np.linalg.norm(vec2) <NEW_LINE> vec3 = desc.create(sys3).toarray()[0, :] <NEW_LINE> vec3 /= np.linalg.norm(vec3) <NEW_LINE> dot = np.dot(vec1, vec2) <NEW_LINE> self.assertEqual(dot, 0) <NEW_LINE> dot = np.dot(vec2, vec3) <NEW_LINE> self.assertTrue(abs(dot-1) < 1e-3)
Tests that the output vectors behave correctly as a basis.
625941bc15baa723493c3e54
def is_valid_input(filepath): <NEW_LINE> <INDENT> with open(filepath, "r") as file: <NEW_LINE> <INDENT> for line in file: <NEW_LINE> <INDENT> if line=="THIS FILE IS CREATED BY SAMPLEINFO.PY, ALTERING THIS LINE MAY CAUSE PROBLEMS\n": <NEW_LINE> <INDENT> return filepath <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> raise SystemExit("Input does not appear to be a the location of a valid text file created by the sampleinfo.py script")
Check if command line input is a valid text file created by this script
625941bcad47b63b2c509e61
def dismiss(self): <NEW_LINE> <INDENT> self._dismissed.set() <NEW_LINE> self._logger.info('WorkerThread are requred to dismiss.')
设置一个标志,表示完成当前work之后,退出 :return:
625941bcc432627299f04b25
def get_blockchain_events_adaptive( web3: Web3, blockchain_state: BlockchainState, token_network_addresses: List[TokenNetworkAddress], latest_confirmed_block: BlockNumber, ) -> Optional[List[Event]]: <NEW_LINE> <INDENT> from_block = BlockNumber(blockchain_state.latest_committed_block + 1) <NEW_LINE> to_block = min( latest_confirmed_block, BlockNumber(from_block + blockchain_state.current_event_filter_interval - 1), ) <NEW_LINE> try: <NEW_LINE> <INDENT> before_query = time.monotonic() <NEW_LINE> events = get_blockchain_events( web3=web3, token_network_addresses=token_network_addresses, chain_state=blockchain_state, from_block=from_block, to_block=to_block, ) <NEW_LINE> after_query = time.monotonic() <NEW_LINE> filter_query_duration = after_query - before_query <NEW_LINE> if filter_query_duration < ETH_GET_LOGS_THRESHOLD_FAST: <NEW_LINE> <INDENT> blockchain_state.current_event_filter_interval = BlockTimeout( min(MAX_FILTER_INTERVAL, blockchain_state.current_event_filter_interval * 2) ) <NEW_LINE> <DEDENT> elif filter_query_duration > ETH_GET_LOGS_THRESHOLD_SLOW: <NEW_LINE> <INDENT> blockchain_state.current_event_filter_interval = BlockTimeout( max(MIN_FILTER_INTERVAL, blockchain_state.current_event_filter_interval // 2) ) <NEW_LINE> <DEDENT> return events <NEW_LINE> <DEDENT> except ReadTimeout: <NEW_LINE> <INDENT> old_interval = blockchain_state.current_event_filter_interval <NEW_LINE> blockchain_state.current_event_filter_interval = BlockTimeout( max(MIN_FILTER_INTERVAL, old_interval // 5) ) <NEW_LINE> log.debug( "Failed to query events in time, reducing interval", old_interval=old_interval, new_interval=blockchain_state.current_event_filter_interval, ) <NEW_LINE> <DEDENT> return None
Queries new events from the blockchain. Uses an adaptive interval, so that the ethereum nodes aren't overloaded. Args: web3: Web3 object blockchain_state: The blockchain state objected. This is mutated and should be reused. token_network_addresses: List of known token network addresses. This is mutated when a new token network is found. However, additionally a `ReceiveTokenNetworkCreatedEvent` is created as well and it is recommended to use that instead and to not reuse this list. latest_confirmed_block: The latest block to query to Returns: A list of events if successful, otherwise ``None``
625941bc99cbb53fe6792ac8
def __init__(self, R): <NEW_LINE> <INDENT> classical.SymmetricFunctionAlgebra_classical.__init__(self, R, "power", 'p')
TESTS:: sage: p = SFAPower(QQ) sage: p == loads(dumps(p)) True
625941bc460517430c39406e
def run(self, filename, file): <NEW_LINE> <INDENT> root = Index.create().parse( filename, options=TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD).cursor <NEW_LINE> self.print_node(root, filename)
This bear is meant for debugging purposes relating to clang. It just prints out the whole AST for a file to the DEBUG channel.
625941bcad47b63b2c509e62
def __init__(self, evar_defs): <NEW_LINE> <INDENT> self.evar_defs = evar_defs
:param dict evar_defs: Pass in a dict whose keys are config names and the values are :py:class:`EnvironmentVariable` instances.
625941bc0fa83653e4656e9e
def test_init_debug(app): <NEW_LINE> <INDENT> app.debug = True <NEW_LINE> InvenioAssets(app) <NEW_LINE> assert app.config['WEBPACKEXT_STORAGE_CLS'] != FileStorage
Test module initialization with debug enabled.
625941bc01c39578d7e74d1d
def addBinary(a, b): <NEW_LINE> <INDENT> res = '' <NEW_LINE> index = 0 <NEW_LINE> carry = '0' <NEW_LINE> while index < max(len(a), len(b)) or carry == '1': <NEW_LINE> <INDENT> num_a = a[-1-index] if index < len(a) else '0' <NEW_LINE> num_b = b[-1-index] if index < len(b) else '0' <NEW_LINE> val = int(num_a) + int(num_b) + int(carry) <NEW_LINE> res = str(val % 2) + res <NEW_LINE> carry = '1' if val > 1 else '0' <NEW_LINE> index += 1 <NEW_LINE> <DEDENT> return res
:type a: str :type b: str :rtype: str
625941bcc432627299f04b26
def gen_url(*args): <NEW_LINE> <INDENT> join_args = [base_url] <NEW_LINE> join_args.extend(args) <NEW_LINE> return '/'.join(map(str, join_args))
Generate an "old"-style URL. As far as I know this is only needed for auth.
625941bce5267d203edcdb81
def test_offset_calc(self): <NEW_LINE> <INDENT> class MockImage(): <NEW_LINE> <INDENT> size = [100, 100] <NEW_LINE> <DEDENT> image = MockImage() <NEW_LINE> offset_tuple = self.thumbnailer.calculate_offset(image) <NEW_LINE> self.assertEqual(offset_tuple, (0, 0))
test the offset is calculated correctly
625941bcac7a0e7691ed3fba
def bright_green(text): <NEW_LINE> <INDENT> return codes['bright_green'] + text + codes['reset']
Style text bright-green for the Script Console and Beacon Console. :param text: Text to style :return: Styled text
625941bcbf627c535bc130b3
def event_loop(**kwargs): <NEW_LINE> <INDENT> host_name = kwargs["host_name"] <NEW_LINE> is_status_all = kwargs["status_all"] <NEW_LINE> triggers = kwargs["triggers"] <NEW_LINE> now = datetime.datetime.utcnow() <NEW_LINE> system_status = CHARGE_CONTROLLER.SystemStatus(host_name) <NEW_LINE> got_data = system_status.get(is_status_all) <NEW_LINE> rawdata = {} <NEW_LINE> rawdata["source"] = "solar" <NEW_LINE> rawdata["data"] = got_data <NEW_LINE> rawdata["at"] = now <NEW_LINE> for key, data in got_data.items(): <NEW_LINE> <INDENT> logger.info( "{date}: {group}, {elem}, {value}[{unit}]".format( date=now, group=data["group"], elem=key, value=str(data["value"]), unit=data["unit"])) <NEW_LINE> <DEDENT> put_to_triggers(triggers, rawdata)
Monitor charge controller and update database like xively or internal database. This method should be called with a timer. Args: kwargs: keyword argument object Returns: None Exceptions: queue.Full: If queue of event handler is full
625941bc711fe17d82542253
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ChangeNotificationRuleOrderPayload): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__
Returns true if both objects are equal
625941bce8904600ed9f1e0b
def precision_at_k(predictions, actuals, k): <NEW_LINE> <INDENT> N = len(actuals) <NEW_LINE> hits = len(set(predictions[-k:]).intersection(set(actuals))) <NEW_LINE> precision = hits / min(N, k) <NEW_LINE> return precision
Computes the precision at k :param predictions: array, predicted values :param actuals: array, actual values :param k: int, value to compute the metric at :returns precision: float, the precision score at k
625941bc31939e2706e4cd50
def test_state_default_list_table( basic_entities_text, basic_entities_table_text ) -> None: <NEW_LINE> <INDENT> output_formats( ["state", "list"], basic_entities_text, basic_entities_table_text )
Test table.
625941bc45492302aab5e1a2
def listify(p:OptListOrItem=None, q:OptListOrItem=None): <NEW_LINE> <INDENT> if p is None: p=[] <NEW_LINE> elif isinstance(p, str): p=[p] <NEW_LINE> elif not isinstance(p, Iterable): p=[p] <NEW_LINE> n = q if type(q)==int else len(p) if q is None else len(q) <NEW_LINE> if len(p)==1: p = p * n <NEW_LINE> assert len(p)==n, f'List len mismatch ({len(p)} vs {n})' <NEW_LINE> return list(p)
Make `p` same length as `q`
625941bc0a50d4780f666d71
def find_modules_by_type(pipeline, moduletypes): <NEW_LINE> <INDENT> moduletypes = tuple(moduletypes) <NEW_LINE> result = [] <NEW_LINE> for module in pipeline.module_list: <NEW_LINE> <INDENT> desc = module.module_descriptor <NEW_LINE> if issubclass(desc.module, moduletypes): <NEW_LINE> <INDENT> result.append(module) <NEW_LINE> <DEDENT> <DEDENT> return result
Finds all modules that subclass one of the given types in the pipeline.
625941bc4428ac0f6e5ba6d3
def _log(self, message, verbosity=0): <NEW_LINE> <INDENT> if int(verbosity) <= self.verbosity: <NEW_LINE> <INDENT> self._output.write(message) <NEW_LINE> self._output.write("\n")
verbosity: lower=more important, -1 = quiet 0 = default +1 verbose
625941bc99fddb7c1c9de274
def create(self, name, count): <NEW_LINE> <INDENT> self.InsertColumn(0, "#") <NEW_LINE> self.InsertColumn(1, "Title") <NEW_LINE> for i in range(count): <NEW_LINE> <INDENT> self.InsertStringItem(sys.maxsize, str(i)) <NEW_LINE> self.SetStringItem(i, 1, "%s-%d" % (name, i))
Set up some test data.
625941bc4d74a7450ccd40a4
def check_extra_tags(self, kwargs): <NEW_LINE> <INDENT> if kwargs: <NEW_LINE> <INDENT> msg = ( "Unknown tags have been detected in this proforma: {}.\nIf " "you wish to continue, the behavior of the object might be " "affected. Please contact the package developers or submit " "an issue.\n Do you wish to continue anyways?".format( ", ".join(kwargs.keys()) ) ) <NEW_LINE> shall = input("%s (y/N) " % msg).lower() == "y" <NEW_LINE> if not shall: <NEW_LINE> <INDENT> raise NotImplementedError()
Detect extra tags in the proforma and warn. Args: kwargs (dict): dictionary of extra keyword-arguments that would be passed to the constructor.
625941bc009cb60464c63295
def get_distribution_table(self): <NEW_LINE> <INDENT> return _pypl.plLearnDistributionTable_get_distribution_table(self)
get_distribution_table(plLearnDistributionTable self) -> plDistributionTable
625941bc8e71fb1e9831d68f
def list_tools_scanagents(self): <NEW_LINE> <INDENT> uri = 'tools/scanagents/' <NEW_LINE> result = self.phpipam.api_send_request(path=uri, method='get') <NEW_LINE> return result
get scanagents list
625941bc16aa5153ce36235a
def format(self): <NEW_LINE> <INDENT> return "(" + ",".join( self.nColumns() * ["%s"] ) + ")"
Returns a tuple to be passed to INSERT INTO VALUES command. Return: format string
625941bccc40096d61595834
def modeSlow(self, latestCount): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> avgCt = self.bufferAvg(latestCount, self.__c_ct_slow) <NEW_LINE> self.__liveCountPrint(latestCount, avg = avgCt) <NEW_LINE> if self.__stg is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__stg.storeDatapoint([datetime.datetime.utcnow(), round(avgCt * 60.0, 3)]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("Failed to store data point: %s" %traceback.format_exc()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> return
Slow mode handler. Stores up to 22 seconds worth of data in a buffer and averages those samples.
625941bcdd821e528d63b08c
def __init__(self, inicial, ecuacion, err=0.001, treeView=None): <NEW_LINE> <INDENT> self.inicial = inicial <NEW_LINE> self.err = sp.sympify(sp.Float(err)) <NEW_LINE> self.ec = str(ecuacion).lower() <NEW_LINE> self.root = 0 <NEW_LINE> self.real = [] <NEW_LINE> self.complex = [] <NEW_LINE> self.tree = treeView
Constructor de la clase
625941bcdd821e528d63b08d
def save_vocabulary(vocab, vocabulary_path): <NEW_LINE> <INDENT> print("Creating vocabulary %s" % (vocabulary_path)) <NEW_LINE> with codecs.open(vocabulary_path, "w", "utf-8") as vocab_file: <NEW_LINE> <INDENT> for symbol in sorted(vocab, key=vocab.get): <NEW_LINE> <INDENT> vocab_file.write(symbol + '\n')
Save vocabulary file in vocabulary_path. We write vocabulary to vocabulary_path in a one-token-per-line format, so that later token in the first line gets id=0, second line gets id=1, and so on. Args: vocab: vocabulary dictionary. vocabulary_path: path where the vocabulary will be created.
625941bce5267d203edcdb82