code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
@remote_compatible <NEW_LINE> def test_nfc_p2p_tag_enable_disable(dev): <NEW_LINE> <INDENT> if "FAIL" in dev[0].request("WPS_NFC_TOKEN NDEF").rstrip(): <NEW_LINE> <INDENT> raise Exception("Failed to generate password token") <NEW_LINE> <DEDENT> if "OK" not in dev[0].request("P2P_SET nfc_tag 1"): <NEW_LINE> <INDENT> rai... | NFC tag enable/disable for P2P | 625941bdcb5e8a47e48b79a3 |
def OpenFeatureClass(self, strName: 'char const *') -> "GsSmarterPtr< GsFeatureClass >": <NEW_LINE> <INDENT> return _gskernel.GsDataRoomHouseDataRoom_OpenFeatureClass(self, strName) | 打开矢量地物类对象:type strName: string
:param strName: 矢量地物的名称:rtype: GsSmarterPtr< GsFeatureClass >
:return: 返回矢量地物类的指针或者NULL | 625941bd7b25080760e39350 |
def main(): <NEW_LINE> <INDENT> m_a = input() <NEW_LINE> s_a = "bob" <NEW_LINE> c_a = 0 <NEW_LINE> for i_a in range(0, len(m_a) - 2): <NEW_LINE> <INDENT> j_a = 0 <NEW_LINE> k_a = i_a <NEW_LINE> v_a = 0 <NEW_LINE> while(j_a < 3 and m_a[k_a] == s_a[j_a]): <NEW_LINE> <INDENT> v_a += 1 <NEW_LINE> k_a += 1 <NEW_LINE> j_a +=... | Write a program that prints the number of times the string 'bob' occurs in s.
For example, if s = 'azcbobobegghakl', then your program should print
Number of times bob occurs is: 2 | 625941bd3617ad0b5ed67dee |
def _init(): <NEW_LINE> <INDENT> return memcachewrapper.MemcacheWrapper(HOSTNAME, PORT, PREFIX, DEBUG) | memcache init | 625941bde8904600ed9f1e1f |
def __contains__(self, table_name): <NEW_LINE> <INDENT> return table_name in self.table_names | Whether or not the named table is in this db | 625941bdf7d966606f6a9ef6 |
def exitFilled(self): <NEW_LINE> <INDENT> return self.__exitOrder is not None and self.__exitOrder.is_filled | Returns True if the exit order was filled. | 625941bdcc40096d61595847 |
def _compile_and_test(self, fn, arg_tys, asserts=[], equivs=[]): <NEW_LINE> <INDENT> test_pipeline = ArrayAnalysisTester.mk_pipeline(arg_tys) <NEW_LINE> analysis = test_pipeline.compile_to_ir( fn, test_idempotence=self.compare_ir) <NEW_LINE> if equivs: <NEW_LINE> <INDENT> for func in equivs: <NEW_LINE> <INDENT> func(an... | Compile the given function and get its IR. | 625941bd8da39b475bd64e66 |
def __init__(self, mat): <NEW_LINE> <INDENT> self.mat = mat | Create a transformation from a rotation matrix (unsafe, but faster). | 625941bd6aa9bd52df036c98 |
def schedule_downtime(self, client, object_type, filters, comment, author, duration): <NEW_LINE> <INDENT> try_count = 0 <NEW_LINE> while try_count < 3: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> try_count = try_count + 1 <NEW_LINE> now = time.time() <NEW_LINE> end_time = now + duration <NEW_LINE> results = [] <NEW_LI... | Schedule downtime for the provided filter | 625941bd57b8e32f5248338f |
def test_gaussian_kernel(self): <NEW_LINE> <INDENT> crkr = CrKr(self.S_2x3, self.C_2x2, self.D_2x3, self.ridge_factor_05, self.sigma_05, self.a_1) <NEW_LINE> s1 = np.array([[1, 2, 3]]) <NEW_LINE> s2 = np.array([[4, 5, 6]]) <NEW_LINE> expected_gk = np.exp(-(self.a_1 * np.power(npla.norm(s1 - s2), 2) / (2 * (self.sigma_0... | Tests if the gaussian kernel is correctly computed.
| 625941bd5fdd1c0f98dc0127 |
def GetLabel(self): <NEW_LINE> <INDENT> return self._label | Returns the tool label. | 625941bd236d856c2ad446ce |
def upper_bound(min_length, max_length, floor, ceiling, min_slope, max_slope): <NEW_LINE> <INDENT> from sage.functions.all import floor as flr <NEW_LINE> if max_length < infinity: <NEW_LINE> <INDENT> return sum( [ ceiling(j) for j in range(max_length)] ) <NEW_LINE> <DEDENT> elif max_slope < 0 and ceiling(1) < infinity:... | Compute a coarse upper bound on the size of a vector satisfying the
constraints.
TESTS::
sage: import sage.combinat.integer_list as integer_list
sage: f = lambda x: lambda i: x
sage: integer_list.upper_bound(0,4,f(0), f(1),-infinity,infinity)
4
sage: integer_list.upper_bound(0, infinity, f(0), f(1... | 625941bd009cb60464c632a9 |
def get_winner(player1, player2, defeat_mapping=None): <NEW_LINE> <INDENT> defeat_mapping = defeat_mapping or _create_defeat_mapping() <NEW_LINE> if player1 not in rps or player2 not in rps: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> key = player1+"_"+player2 <NEW_LINE> if defeat_mapping[key] == 'win': <N... | Given player1 and player2 determine game output returning the
appropriate string:
Tie
Player1
Player2
(where Player1 and Player2 are the names passed in)
Raise a ValueError if invalid player strings are passed in. | 625941bd2ae34c7f2600d027 |
def multi(fn): <NEW_LINE> <INDENT> @wraps(fn) <NEW_LINE> def wrapper(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.target.startswith("file://"): <NEW_LINE> <INDENT> original_target = self.target <NEW_LINE> original_port = self.port <NEW_LINE> _, _, feed_path = self.target.partition("file://") <NEW_LINE> try: <NEW... | Decorator for exploit.Exploit class
Decorator that allows to feed exploit using text file containing
multiple targets definition. Decorated function will be executed
as many times as there is targets in the feed file.
WARNING:
Important thing to remember is fact that decorator will
supress values returned by decorate... | 625941bd3539df3088e2e241 |
def _cleanWord(self, word): <NEW_LINE> <INDENT> cleaned = sub(r'[\W]', '', word) <NEW_LINE> is_valid = cleaned.isalpha() and (len(cleaned) >= self.minLength) <NEW_LINE> return cleaned.upper() if is_valid else False | Returns word in uppercase with all non-alphabethic characters removed.
Args:
word: String of the word being added to the Trie.
Returns:
Uppercase string or False | 625941bd16aa5153ce36236e |
def hash_password(password): <NEW_LINE> <INDENT> return xsha1(password.lower().encode()).digest() | Returns the XSha1 hash of the given password. Used for account creation. | 625941bd63f4b57ef0001016 |
def generate_policy_string(attribute_master, n_attr): <NEW_LINE> <INDENT> policy_str = '' <NEW_LINE> OPS = ['and', 'or'] <NEW_LINE> attr_indices = np.random.choice(range(len(attribute_master)), n_attr, replace=False) <NEW_LINE> for attr_index in attr_indices: <NEW_LINE> <INDENT> attribute = attribute_master[attr_index]... | :param attribute_master: List of all attributes in the system
:param n_attr: Number of attributes to be contained in the generated policy
:return: A Generated policy string | 625941bdbf627c535bc130c4 |
def noam_decay(d_model, warmup_steps): <NEW_LINE> <INDENT> with default_main_program()._lr_schedule_guard(): <NEW_LINE> <INDENT> if imperative_base.enabled(): <NEW_LINE> <INDENT> decay = imperate_lr.NoamDecay(d_model, warmup_steps) <NEW_LINE> return decay <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> global_step = _dec... | Noam decay method. The numpy implementation of noam decay as follows.
.. code-block:: python
import padde.fluid as fluid
import numpy as np
# set hyper parameters
d_model = 2
current_steps = 20
warmup_steps = 200
# compute
lr_value = np.power(d_model, -0.5) * np.min([
np.po... | 625941bd596a8972360899b9 |
def calc_ages(self,dob_rows, recent_rows, statuses=True): <NEW_LINE> <INDENT> if len(dob_rows) != len(recent_rows): <NEW_LINE> <INDENT> raise RuntimeError('dob_rows(%d)'%len(dob_rows) + ' and recent rows(%d)'%len(recent_rows) +'do not have the same length.') <NEW_LINE> <DEDENT> ages = [] <... | Creates a set of rows matched to the provided rows
filled with the ages calculated between them.
returns ages and statuses if statuses is true
else just the list of ages.
statuses is a string describing the problem if any while
calculating the ages.
possibilities are :
okay, recent_row/dob_row is missing/not in the ... | 625941bd4e4d5625662d42d1 |
def get_label_ids_by_category(crop: Dict[str, Any], category: str) -> List[int]: <NEW_LINE> <INDENT> return [ll[0] for ll in crop['labels'][category]] | Get all label ids from a crop that belong to a certain category
Args:
crop: Instance of an entry in the crop database.
category: one of "present_annotated", "present_unannotated", "absent_annotated", "present_partial_annotation"
Returns:
All label ids that belong to the `category` for that crop. | 625941bdd4950a0f3b08c247 |
def moveLeft(self): <NEW_LINE> <INDENT> newXPos = self.positionA.x <NEW_LINE> newYPos = self.positionA.y - 1 <NEW_LINE> moveCoordinate = Coordinate([newXPos, newYPos]) <NEW_LINE> return self.__move(moveCoordinate) | Move the player to the left | 625941bdc432627299f04b39 |
def _delete_asteroid(self, asteroid): <NEW_LINE> <INDENT> self._screen.unregister_asteroid(asteroid) <NEW_LINE> self.__asteroids.remove(asteroid) | the function unregisters the asteroid and deletes it from the
asteroids list.
:param asteroid: the asteroid that will be deleted (type asteroid).
:return: None. | 625941bd97e22403b379ce8e |
def validate(self): <NEW_LINE> <INDENT> if not self.help and not self.version and not self.diagnostics: <NEW_LINE> <INDENT> if self.actions is None or len(self.actions) == 0: <NEW_LINE> <INDENT> raise ValueError("At least one action must be specified.") <NEW_LINE> <DEDENT> <DEDENT> if self.managed and self.managedOnly:... | Validates command-line options represented by the object.
Unless ``--help`` or ``--version`` are supplied, at least one action must
be specified. Other validations (as for allowed values for particular
options) will be taken care of at assignment time by the properties
functionality.
*Note:* The command line format ... | 625941bd56ac1b37e62640ca |
def delete_lb(self, lb_id): <NEW_LINE> <INDENT> url = "%s/loadbalancers/%s" % (self.api_user_url, lb_id) <NEW_LINE> request_result = requests.delete(url, headers=self.api_headers, verify=False) <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> self.logging.info(request_result.status_code) <NEW_LINE> self.logging.info(req... | Delete the loadbalancer identified by 'lb_id' | 625941bd8a349b6b435e8069 |
def findCycle(self) -> List[Tuple[QuantumNode, QuantumNode]]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return nx.find_cycle(self._connectedQuanta) <NEW_LINE> <DEDENT> except nx.NetworkXNoCycle: <NEW_LINE> <INDENT> return [] | Check a graph for the presense of cycles and returns the edges of
any cycles found, or an empty list if there is no cycle.
Returns
-------
result : list of tuple of `QuantumNode`, `QuantumNode`
A list of any graph edges that form a cycle, or an empty list if
there is no cycle. Empty list to so support if graph... | 625941bd046cf37aa974cc40 |
@login_manager.user_loader <NEW_LINE> def login_loader(userid): <NEW_LINE> <INDENT> return User.query.get(int(userid)) | Pull a user object from the database.
This is used for loading users from existing sessions. | 625941bdf7d966606f6a9ef7 |
def unget_service(self, bundle, reference, service=None): <NEW_LINE> <INDENT> with self.__svc_lock: <NEW_LINE> <INDENT> if reference.is_prototype(): <NEW_LINE> <INDENT> return self.__unget_service_from_factory( bundle, reference, service ) <NEW_LINE> <DEDENT> elif reference.is_factory(): <NEW_LINE> <INDENT> return self... | Removes the usage of a service by a bundle
:param bundle: The bundle that used the service
:param reference: A service reference
:param service: Service instance (for Prototype Service Factories)
:return: True if the bundle usage has been removed | 625941bd4428ac0f6e5ba6e7 |
def check_out_sz(self) -> int: <NEW_LINE> <INDENT> x = torch.rand((1, self.n_fpv,self.n_v)) <NEW_LINE> training = self.training <NEW_LINE> self.eval() <NEW_LINE> x = self.forward(x) <NEW_LINE> if training: self.train() <NEW_LINE> return x.size(-1) | Automatically computes the output size of the head by passing through random data of the expected shape
Returns:
x.size(-1) where x is the outgoing tensor from the head | 625941bd2c8b7c6e89b356b8 |
def arctopoints(center,start,angle,arcerror = 0.1,arcunits=2500): <NEW_LINE> <INDENT> r = wxPointUtil.distance(center,start) <NEW_LINE> maxerror = max(arcerror/r,arcunits) <NEW_LINE> if _user_stacks['Params'][-1].get('debug',None): <NEW_LINE> <INDENT> output('maxerror = max(arcerror/r,arcunits) = {}/{},{} = {}'.format(... | maxerror as a percentage (0.0-1.0) of radius | 625941bda8370b7717052797 |
@superState('spinAtHome') <NEW_LINE> @stay <NEW_LINE> def doSecondHalfSpin(player): <NEW_LINE> <INDENT> if player.firstFrame(): <NEW_LINE> <INDENT> player.brain.tracker.repeatFixedPitchLookAhead() <NEW_LINE> if player.brain.playerNumber == 3: <NEW_LINE> <INDENT> player.setWalk(0, 0, speeds.SPEED_SIX) <NEW_LINE> <DEDENT... | Keep spinning in the same direction. | 625941bdbde94217f3682cea |
def pp_random_noise(self, sample): <NEW_LINE> <INDENT> xy_ = sample["ink"][:,:,0:2] <NEW_LINE> std_ = tf.math.reduce_std(xy_, axis=1) <NEW_LINE> noise_ = tf.transpose(tf.random.normal([tf.shape(xy_)[1], 1, 1], 0, std_/self.random_noise_factor), [1,0,2]) <NEW_LINE> if self.gt_targets: <NEW_LINE> <INDENT> if "target_ink"... | Applies random gaussian noise. | 625941bd4527f215b584c350 |
def find_location(self, pos: xr.Dataset): <NEW_LINE> <INDENT> p = pos.interp(time=self.time) <NEW_LINE> self.lon = p.lon.data <NEW_LINE> self.lat = p.lat.data | Find ship location for sounding time in GPS track.
Parameters
----------
pos : xr.Dataset
GPS track in xarray.Dataset with data variables lon and lat and
coordinate time. | 625941bdd10714528d5ffbd6 |
def on_property_update(self, name, value): <NEW_LINE> <INDENT> with self._properties_lock: <NEW_LINE> <INDENT> self.Logger.debug("updating %s (%s)", name, value) <NEW_LINE> self._updated_properties.add(name) <NEW_LINE> <DEDENT> self.try_update() | Handler for when a watched property is updated.
We only store the name, since the value may change a lot.
The value is also of unknown type, not the json-compatible
types needed. | 625941bd0a366e3fb873e70d |
def CreateVendorData(self,numberVendors): <NEW_LINE> <INDENT> indices = range(numberVendors) <NEW_LINE> entityCount = 0 <NEW_LINE> vendorDic = {} <NEW_LINE> for index in indices: <NEW_LINE> <INDENT> vendorIndex = self.InitialVendorIndex + index <NEW_LINE> indexString = str(vendorIndex) <NEW_LINE> vendorKeyName = 'Ven' ... | Create random Vendor data and upload | 625941bd5510c4643540f2e2 |
def stop(universal_id: str) -> dict: <NEW_LINE> <INDENT> info = current(universal_id) <NEW_LINE> info['remaining'] = pool[universal_id]['app'].get_remaining_solutions() <NEW_LINE> info['addscore'] = pool[universal_id]['addscore'] <NEW_LINE> pool[universal_id]['app'].stop() <NEW_LINE> return info | Stop the game. | 625941bd91f36d47f21ac3e5 |
def _test_pools_4(self): <NEW_LINE> <INDENT> self.session_login() | raid5 tests | 625941bd3eb6a72ae02ec3cb |
def __single_turn(self): <NEW_LINE> <INDENT> pass | Note - this function is here to guide you and it is *not mandatory*
to implement it. The logic defined by this function must be implemented
but if you wish to do so in another function (or some other functions)
it is ok.
The function runs one round of the game :
1. Print board to the screen
2. Get user... | 625941bd3c8af77a43ae3694 |
def rational_quadratic(x, y, c=0): <NEW_LINE> <INDENT> d = dist.sqeuclidean(x, y) <NEW_LINE> return 1 - d / (d + c) | Compute a rational quadratic kernel.
The Rational Quadratic kernel is less computationally intensive than the
Gaussian kernel and can be used as an alternative when using the Gaussian
becomes too expensive:
K(x, y) = 1 - (||x - y||^2 / (||x - y||^2 + c))
where `x` and `y` are vectors in the input space... | 625941bd0a366e3fb873e70e |
def power(self, p, n): <NEW_LINE> <INDENT> if n == 0 or self.is_null(p): <NEW_LINE> <INDENT> return NULL_POINT <NEW_LINE> <DEDENT> res = NULL_POINT <NEW_LINE> while n: <NEW_LINE> <INDENT> if n & 1: <NEW_LINE> <INDENT> res = self.add(res, p) <NEW_LINE> <DEDENT> p = self.add(p, p) <NEW_LINE> n >>= 1 <NEW_LINE> <DEDENT> r... | n✕P or (P + P + ... + P) n times | 625941bd3d592f4c4ed1cf6c |
def get_total_buy_order_value(self): <NEW_LINE> <INDENT> a = self.get_open_orders_pd() <NEW_LINE> a = a[a.action == 'BUY'] <NEW_LINE> return sum(a.lmtPrice * a.totalQuantity) | Retrieves current buy orders and calculates how much in value they add up to
Return: float | 625941bd85dfad0860c3ad4f |
def p_expression_uminus(p): <NEW_LINE> <INDENT> p[0] = UnaryOpUminus("NEGATE " + p[1], p[2]) <NEW_LINE> p[0].lineno = p.lineno(1) <NEW_LINE> p[0].lexpos = p.lexpos(1) | expression : MINUS expression %prec UMINUS | 625941bdcc40096d61595848 |
def extract_entities(result): <NEW_LINE> <INDENT> return result.get('entities', []) | Extracts entities from a parsing result. | 625941bd63b5f9789fde6fdc |
def column_from_classes(models): <NEW_LINE> <INDENT> if not isinstance(models, list): <NEW_LINE> <INDENT> models = [models] <NEW_LINE> <DEDENT> items = list() <NEW_LINE> for model in models: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for col in model.__table__.c: <NEW_LINE> <INDENT> items.extend([(str(col).replace('.... | Create Graphene List to select the columns from SQLAlchemy classes | 625941bd56b00c62f0f1454e |
def polygonize(raster_path, labels_path, shapefile_path, layer_name='thematic',class_name ='class',id ='id'): <NEW_LINE> <INDENT> open_labeles = open(labels_path) <NEW_LINE> reader = csv.reader(open_labeles, delimiter='\n') <NEW_LINE> lables_lst = [] <NEW_LINE> for line in reader: <NEW_LINE> <INDENT> lables_lst += line... | This function converts Raster to shapefile and assigns a class to each polygon according to its pixel value
Args:
raster_path(str):the path from which the raster to be converted will be imported
labels_path(str):the path from which the txt file that contains the labels will be imported
shapefile_path(str): the path for... | 625941bd9b70327d1c4e0cca |
def getCurrentPortsInformations(self): <NEW_LINE> <INDENT> status = {} <NEW_LINE> for port in self.getPorts(): <NEW_LINE> <INDENT> status[port.getID()] = port.getCurrentInformations() <NEW_LINE> <DEDENT> return status | Gibt alle aktuellen Informationen zu allen Ports als dict zurück. Dies ist z.B. für die Benutzeroberfläche relevant.
:return: eine Sammlung aller Informationen zu den aktuellen Ports.
:rtype: dict | 625941bd5fcc89381b1e15b3 |
def __init__(self, num_partitions: int): <NEW_LINE> <INDENT> if num_partitions < 1: <NEW_LINE> <INDENT> raise ValueError('num_partitions must be >= 1.') <NEW_LINE> <DEDENT> if num_partitions != 1: <NEW_LINE> <INDENT> self._hasher = ColumnHasher(num_partitions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._hasher ... | Initializes KeyAndSplitByFeatureFn.
Args:
num_partitions: The number of partitions to divide features/cross-features
into. Must be >= 1. | 625941bd8e05c05ec3eea268 |
def need_to_analyze(self, event): <NEW_LINE> <INDENT> chk_rec_id = event.rec_id <NEW_LINE> if chk_rec_id > self.pool_rec_id: <NEW_LINE> <INDENT> if len(self.rec_ids) != 0: <NEW_LINE> <INDENT> self.msg_target.warning('Not all priming events were available. Missing: {0}'.format(str(self.rec_ids))) <NEW_LINE> <DEDENT> se... | If before my checkpointed rec_id then don't need to process | 625941bd7d43ff24873a2b94 |
def add_turbine(self, turbine): <NEW_LINE> <INDENT> self.turbines.append(turbine) | Adds a turbine to the windpark.
Parameters
----------
turbine : Turbine
Turbine to add. | 625941bdd99f1b3c44c6748b |
def headers(self): <NEW_LINE> <INDENT> headers = { "Authorization": "Bearer " + str(self.auth.Bearer(self.user_Id)) } <NEW_LINE> save(self.auth.prefs, self.file_prefs) <NEW_LINE> return headers | docstring for headers | 625941bd435de62698dfdb49 |
def test_html_esc(self): <NEW_LINE> <INDENT> self.assertEqual("&", General.htmlesc("&")) <NEW_LINE> self.assertEqual("<", General.htmlesc("<")) <NEW_LINE> self.assertEqual(">", General.htmlesc(">")) | Check that our HTML escaping works ok. ( & -> & etc)
| 625941bd8a349b6b435e806a |
def relevant_parameters(df, ctrls_std_rel_min=0.001, ctrls_std_rel_max=0.1, group_by="Plate"): <NEW_LINE> <INDENT> assert is_pandas(df), "df has to be a Pandas DataFrame." <NEW_LINE> relevant_table = FINAL_PARAMETERS.copy() <NEW_LINE> ctrl_set = set(df.keys()) <NEW_LINE> plates = sorted(set(df[group_by])) <NEW_LINE> fo... | ...std_rel...: mad relative to the median value
df is a PANDAS DataFrame. | 625941bdbe7bc26dc91cd4fb |
def app_SN_animated_gradient_plot(self): <NEW_LINE> <INDENT> print('this option is yet to be implemented') | Plots animated temperature and moisture gradient. | 625941bd2eb69b55b151c7a2 |
def remote_api_shell(servername, appid, path, secure, rpc_server_factory): <NEW_LINE> <INDENT> os.environ['AUTH_DOMAIN'] = "appscale" <NEW_LINE> remote_api_stub.ConfigureRemoteApi(appid, path, auth_func, servername=servername, save_cookies=True, secure=secure, rpc_server_factory=rpc_server_factory) <NEW_LINE> remote_ap... | Actually run the remote_api_shell. | 625941bd4c3428357757c220 |
def register_pvs(self, suite=None, pv_list=None): <NEW_LINE> <INDENT> if suite is None and pv_list is None: <NEW_LINE> <INDENT> raise NotImplementedError("Expecting pv_list or suite to be provided") <NEW_LINE> <DEDENT> for pv_name in set(pv_list if pv_list is not None else []): <NEW_LINE> <INDENT> self.pvs_refs[pv_name... | Check connection of all the PVs declared in suite | 625941bd507cdc57c6306bcb |
def test_list_nodes(self): <NEW_LINE> <INDENT> nodes = self.driver.list_nodes() <NEW_LINE> self.assertEqual(len(nodes), 2) <NEW_LINE> node = nodes[0] <NEW_LINE> self.assertEqual(node.id, '5') <NEW_LINE> self.assertEqual(node.name, 'Compute 5') <NEW_LINE> self.assertEqual(node.state, OpenNebulaNodeDriver.NODE_STATE_MAP[... | Test list_nodes functionality. | 625941bdbf627c535bc130c5 |
def number_to_word(number): <NEW_LINE> <INDENT> if number < 20: <NEW_LINE> <INDENT> number = first_numbers[number] <NEW_LINE> <DEDENT> elif number < 100 and number % 10 == 0: <NEW_LINE> <INDENT> number = tens[int(number/10 -1)] <NEW_LINE> <DEDENT> elif number < 1000 and number % 10 == 0: <NEW_LINE> <INDENT> number = fi... | Converts number into words, when the number is one thousand or less. | 625941bd21bff66bcd68484b |
def test_resource_None_scope_view_not_authorized(self): <NEW_LINE> <INDENT> self.scope = Scope.objects.create(name="some_new_scope") <NEW_LINE> self._request_token(scope=self.scope.name) <NEW_LINE> self._authorize_and_access_token_using_form() <NEW_LINE> response = self._oauth_signed_get("/oauth/some/") <NEW_LINE> self... | Tests that view created with @oauth_required decorator won't give access
when requested using token with scope!="all" | 625941bd4a966d76dd550f03 |
def objects(self): <NEW_LINE> <INDENT> return [node['item'].object for node in self.topLevel()] | returns a list of all the objects
:return: list of objects | 625941bd24f1403a92600a60 |
def __abs__(self): <NEW_LINE> <INDENT> val = UVal(self) <NEW_LINE> val._value = abs(val._value) <NEW_LINE> return val | >>> print(abs(UVal(-1.1, {})))
1.100 {}
>>> print(abs(UVal(1.1, {})))
1.100 {} | 625941bd94891a1f4081b99f |
def remove_schema_node(self, node_id): <NEW_LINE> <INDENT> data_typing = self.get_data_typing() <NEW_LINE> instances = keys_by_value(data_typing, node_id) <NEW_LINE> if len(instances) > 0: <NEW_LINE> <INDENT> raise RewritingError( "Cannot remove '{}' from the schema: ".format( node_id) + "'{}' has instances in the data... | Remove a schema node. | 625941bdcdde0d52a9e52f26 |
def test__TIME_to_python(self): <NEW_LINE> <INDENT> cases = [ (b'45:34:10', datetime.timedelta(hours=45, minutes=34, seconds=10)), (b'-45:34:10', datetime.timedelta(-2, 8750)), (b'45:34:10.010101', datetime.timedelta(hours=45, minutes=34, seconds=10, microseconds=10101)), (b'-45:34:10.010101', datetime.timedelta(-2, 87... | Convert a MySQL TIME to a Python datetime.time type | 625941bd91af0d3eaac9b90c |
def p_iteration_stmt(p): <NEW_LINE> <INDENT> p[0] = iast.IterationStmt(p[2], p[4]) | iterationStmt : WHILE simpleExpr COL suite
| 625941bd29b78933be1e55a8 |
def report(self, report_format: str=None, **kwargs) -> bool: <NEW_LINE> <INDENT> if report_format is None: <NEW_LINE> <INDENT> if self.default_format is None: <NEW_LINE> <INDENT> raise ValueError("Either a default format needs to be set for " "this {} or the name of the report format " "needs to be supplied to the .rep... | Create a report with the given format; if none is given, the default
format is used.
Args:
report_format (str, optional): The report format to use
**kwargs: Passed on to the ReportFormat.report() call
Returns:
bool: Whether there was a report
Raises:
ValueError: If no default format was set and no re... | 625941bdde87d2750b85fc86 |
def build_network(self) -> None: <NEW_LINE> <INDENT> self.model = Sequential() <NEW_LINE> self.model.add(Dense(self.n_classes, activation='sigmoid', input_dim=self.x_train.shape[1])) <NEW_LINE> self.model.add(Dropout(0.7)) <NEW_LINE> self.model.compile(loss='categorical_crossentropy', optimizer='sgd', metrics=['accurac... | Builds the network architecture using Keras utilities and compiles it.
One hidden layer implementing logistic regression. | 625941bd0fa83653e4656eb3 |
def test_es_source(self): <NEW_LINE> <INDENT> return self._get_es_client().ping() | Returns True if the handler can ping the Elasticsearch servers
Can be used to confirm the setup of a handler has been properly done and confirm
that things like the authentication is working properly
:return: A boolean, True if the connection against elasticserach host was successful | 625941bdcdde0d52a9e52f27 |
def test_invalid_source_type(self): <NEW_LINE> <INDENT> request_json = {'report_type': 'details', 'sources': [{'server_id': self.server_id, 'report_version': create_report_version(), 'source_name': self.net_source.name, 'source_type': 'abc'}]} <NEW_LINE> response_json = self.merge_details_from_source_expect_400( reques... | Test source_type has invalid_value. | 625941bdd7e4931a7ee9de13 |
def nearest_neighbors(self, word, k=1, exclude=[], metric="cosine"): <NEW_LINE> <INDENT> if isinstance(word, string_types): <NEW_LINE> <INDENT> assert word in self, "Word not found in the vocabulary" <NEW_LINE> v = self[word] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> v = word <NEW_LINE> <DEDENT> D = pairwise_distan... | Find nearest neighbor of given word
Parameters
----------
word: string or vector
Query word or vector.
k: int, default: 1
Number of nearest neighbours to return.
metric: string, default: 'cosine'
Metric to use.
exclude: list, default: []
Words to omit in answer
Returns
-------
n: list
... | 625941bd3617ad0b5ed67def |
def complement_bayes_prob(self, item, cat): <NEW_LINE> <INDENT> cat_prob = float(self.get_cat_count(cat)) / self.total_count() <NEW_LINE> doc_prob = self.complement_doc_prob(item, cat) <NEW_LINE> return math.log(cat_prob) - math.log(doc_prob) | アイテム item が cat 以外のカテゴリに属する確率を求める | 625941bde64d504609d74737 |
def compile_source(source: str, preprocessor: Optional[PreprocessorFunc], parser: Grammar, transformer: TransformerCallable, compiler: CompilerCallable, *, preserve_AST: bool = False) -> CompilationResult: <NEW_LINE> <INDENT> ast = None <NEW_LINE> original_text = load_if_file(source) <NEW_LINE> source_name = source if ... | Compiles a source in four stages:
1. Pre-Processing (if needed)
2. Parsing
3. AST-transformation
4. Compiling.
The later stages AST-transformation, compilation will only be invoked if
no fatal errors occurred in any of the earlier stages of the processing
pipeline.
:param source: The input text for compilation or a ... | 625941bd38b623060ff0ace5 |
def _get_height_weight(self, jitter=0.0): <NEW_LINE> <INDENT> heights = [] <NEW_LINE> weights = [] <NEW_LINE> for r in self.records: <NEW_LINE> <INDENT> if r.wtkg2 == 'NA' or r.htm3 == 'NA': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> height = r.htm3 + random.uniform(-jitter, jitter) <NEW_LINE> heights.append(heig... | Get sequences of height and weight.
Args:
jitter: float magnitude of random noise added to heights
Returns:
tuple of sequences (heights, weights) | 625941bd07d97122c417877c |
def get_nodes_degree(canonical_graph, predicates_cache_manager, predicates_blacklist, undirected=False): <NEW_LINE> <INDENT> nodes_degree = dict() <NEW_LINE> for n_canonical_index in range(0, len(canonical_graph["canonical_to_rdf_index"])): <NEW_LINE> <INDENT> nodes_degree[n_canonical_index] = 0 <NEW_LINE> <DEDENT> for... | Compute degree for each node in canonical graph (only considering predicates that are not blacklisted)
:param canonical_graph: canonical graph
:param predicates_cache_manager: cache manager for predicates
:param predicates_blacklist: blacklist for predicates
:param undirected: if the graph should be considered directed... | 625941bd2eb69b55b151c7a3 |
def distinct(self, columns: List[str], checkpoint_before: bool = False, keep: str = "first"): <NEW_LINE> <INDENT> assert keep in ("first", "last"), ValueError( "Must choose to keep 'first' or 'last' unique record.") <NEW_LINE> if checkpoint_before: <NEW_LINE> <INDENT> self.make_checkpoint() <NEW_LINE> self.data.drop_du... | Drop duplicate rows in the data (based on a specified
subset of columns).
Parameters
----------
columns : list
A list of columns used to check for duplicate
rows.
checkpoint_before : bool, optional
Whether or not to make a checkpoint
of the current data before downsizing.
Default = False. | 625941bd45492302aab5e1b7 |
def test_catalog_plot_local(self): <NEW_LINE> <INDENT> cat = read_events() <NEW_LINE> with ImageComparison(self.image_dir, 'catalog-cartopy3.png') as ic: <NEW_LINE> <INDENT> rcParams['savefig.dpi'] = 72 <NEW_LINE> cat.plot(method='cartopy', outfile=ic.name, projection='local', resolution='50m', continent_fill_color='0.... | Tests the catalog preview plot, local projection, some more non-default
parameters, using Cartopy. | 625941bd7b25080760e39351 |
def loadImage(self, data, scaled=True, fromfile=True): <NEW_LINE> <INDENT> if data is None or not data: <NEW_LINE> <INDENT> self.removeImage() <NEW_LINE> return <NEW_LINE> <DEDENT> if fromfile: <NEW_LINE> <INDENT> pix = QPixmap(data) <NEW_LINE> <DEDENT> elif isinstance(data, QPixmap): <NEW_LINE> <INDENT> pix = data <NE... | Load the image into the widget using a bytearray
An empty picture will result in the default placeholder
image. | 625941bd23e79379d52ee45e |
def can_play(self, col): <NEW_LINE> <INDENT> return self.game_won == 0 and self.num_turns < 42 and self.get_val(col-1, 0) == 0 | Check that game and column can be played on.
:param col: The column being accessed. 1-indexed.
:type col: int
:return: Whether user can play on this game in column.
:rtype: Boolean | 625941bd287bf620b61d395d |
def insert(self, index, data): <NEW_LINE> <INDENT> raise BranchDedicatedMethodError() | Insert new data to target DictBranch.
Args:
nodepath_string(str): XPath format search string.
data(any json data): Json format data under current branch.
dict, list, str, int, float, bool
Returns:
LocationPath or None: LocationPath of new branch.
Return None if target nodename already ... | 625941bd30dc7b7665901860 |
def genericValue(self, key, default): <NEW_LINE> <INDENT> return self.value(key, default) | Returns an arbitrary settings value, corrosponding to `key`.
The `default` value is used should the `key` contain no value.
:param key: the name of the key to get the value from.
:type key: string
:param default: the value to be used as fallback if `key`
contains no value. | 625941bdf548e778e58cd474 |
def get_bias(a, b, c, d): <NEW_LINE> <INDENT> if b + d > a + c: <NEW_LINE> <INDENT> (a, b, c, d) = (b, a, d, c) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return abs(b/(a+b) - d/(c+d)) / ((b+d) / (a+b+c+d)) <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> return None | Bare equation from Guo et al., 2012 (SB, strand bias method 1).
One modification: a/b and c/d will be swapped if necessary to make sure b and
d are the minor allele.. | 625941bd956e5f7376d70d66 |
def _greedy_predict(self, xx): <NEW_LINE> <INDENT> yyhat = [] <NEW_LINE> phiphi = [] <NEW_LINE> for phi in self.efeats_fnc(xx): <NEW_LINE> <INDENT> phi = phi + self.tfeats_fnc(yyhat[-self.order:]) <NEW_LINE> (yhat, _) = max(self.scores(phi).items(), key=itemgetter(1)) <NEW_LINE> yyhat.append(yhat) <NEW_LINE> phiphi.app... | Sequence classification with a greedy approximation of a Markov
model, also returning feature vectors `phiphi` | 625941bd167d2b6e31218a8e |
def list_posts(request): <NEW_LINE> <INDENT> return render(request, "feed.html", {"posts": posts}) | List existing posts | 625941bdcad5886f8bd26ed9 |
def connect_first_once(self, callback, *args, **kwargs): <NEW_LINE> <INDENT> return self._connect(callback, args, kwargs, once=True, pos=0) | Variant of :meth:`~Signal.connect_once` in which the given callback is
inserted to the front of the callback list. | 625941bdf7d966606f6a9ef8 |
def process_get_hw_modules_from_lineid(**kwargs): <NEW_LINE> <INDENT> return f.process.get_hw_modules_from_lineid(**kwargs) | :menu: (enable=True, name=LINEID GET HW MODULES, section=Config, num=8.2, args={'menu': True}) | 625941bd097d151d1a222d53 |
def set_project(self, project: 'Project'): <NEW_LINE> <INDENT> self.project = project | Set Project for Note instance
@param project: Project Instance | 625941bdcc40096d61595849 |
def workspace_from_dir(directory, recurse=True): <NEW_LINE> <INDENT> directory = os.path.abspath(directory) <NEW_LINE> pickle_path = os.path.join(directory, 'workspace.pkl') <NEW_LINE> if not os.path.exists(pickle_path): <NEW_LINE> <INDENT> if recurse: <NEW_LINE> <INDENT> parent_dir = os.path.dirname(directory) <NEW_LI... | Construct a workspace object from a directory name. If recurse=True, this
function will search down the directory tree and return the first workspace
it finds. If recurse=False, an exception will be raised if the given
directory is not a workspace. Workspace identification requires a file
called 'workspace.pkl' to b... | 625941bd0383005118ecf4dc |
def parse_charge_section(self, line, inputfile, chargestype): <NEW_LINE> <INDENT> has_spins = 'AND SPIN POPULATIONS' in line <NEW_LINE> if not hasattr(self, "atomcharges"): <NEW_LINE> <INDENT> self.atomcharges = {} <NEW_LINE> <DEDENT> if has_spins and not hasattr(self, "atomspins"): <NEW_LINE> <INDENT> self.atomspins =... | Parse a charge section, modifies class in place
Parameters
----------
line : str
the line which triggered entry here
inputfile : file
handle to file object
chargestype : str
what type of charge we're dealing with, must be one of
'mulliken', 'lowdin' or 'chelpg' | 625941bd8da39b475bd64e68 |
def test_minimizer_comparison_mode_invalid(self): <NEW_LINE> <INDENT> config_str = "[PLOTTING]\ncomparison_mode: absolute_values" <NEW_LINE> self.shared_invalid('comparison_mode', config_str) | Checks user set comparison_mode is invalid | 625941bd66673b3332b91f88 |
def test_03__get_from_manifest(self): <NEW_LINE> <INDENT> imgtag = '123' <NEW_LINE> struct = {'manifest': {'123': {'json': {'layers': [{'digest': 'd1'}, {'digest': 'd2'}], 'config': {'digest': 'dgt'}}}}} <NEW_LINE> lay_out = ['d2', 'd1'] <NEW_LINE> conf_out = 'dgt' <NEW_LINE> status = OciLocalFileAPI(self.local)._get_f... | Test03 OciLocalFileAPI()._get_from_manifest. | 625941bd30c21e258bdfa393 |
def _init_cuda_setting(self): <NEW_LINE> <INDENT> if not self.config.cuda: <NEW_LINE> <INDENT> self.config.device = -1 <NEW_LINE> return <NEW_LINE> <DEDENT> self.config.device = self.config.cuda if self.config.cuda is not True else 0 <NEW_LINE> self.use_cuda = True <NEW_LINE> if self.distributed: <NEW_LINE> <INDENT> to... | Init CUDA setting. | 625941bd442bda511e8be314 |
def _defaultsettings( self ): <NEW_LINE> <INDENT> from pluggdapps.plugin import PluginMeta <NEW_LINE> default = dict( DEFAULT().items() ) <NEW_LINE> defaultsett = { 'DEFAULT' : deepcopy(default) } <NEW_LINE> defaultsett['pluggdapps'] = deepcopy(default) <NEW_LINE> defaultsett['pluggdapps'].update( dict( pluggdapps_d... | By now it is expected that all interface specs and plugin
definitions would have been loaded by loading packages implementing
them and pluggdapps' plugin meta-classing. This function will collect
their default settings and return them as settings dictionary,::
{ "plugin:<pkgname>.<pluginname>" : default_settings,
... | 625941bd57b8e32f52483391 |
def _updateInstanceCache(self, autostacks): <NEW_LINE> <INDENT> if ((time.time() - self._lastInstanceCacheGCTimestamp) > self._INSTANCE_CACHE_GC_INTERVAL_SEC): <NEW_LINE> <INDENT> self._garbageCollectInstanceCache(self._instanceCache) <NEW_LINE> self._lastInstanceCacheGCTimestamp = time.time() <NEW_LINE> <DEDENT> autos... | Garbage-collect Autostacks instance cache if it's time. Update
Autostacks instance cache for the given Autostacks, if missing;
refresh the entire Autostacks instance cache if it's time for refreshing
Also refresh last-used timestamps of instance cache entries corresponding to
the given Autostacks.
:param autostacks: A... | 625941bd8e7ae83300e4aec3 |
def cancel_shutdown(self): <NEW_LINE> <INDENT> os.system('shutdown -a') <NEW_LINE> send_msg = '[远控信息] 此次关机已取消' <NEW_LINE> self.bot.file_helper.send(send_msg) <NEW_LINE> self._signal_3.emit(send_msg) | 取消关机 | 625941bd99cbb53fe6792adf |
def max_noutput_items(self): <NEW_LINE> <INDENT> return _howto_swig.user_device_core_sptr_max_noutput_items(self) | max_noutput_items(user_device_core_sptr self) -> int | 625941bde5267d203edcdb97 |
def CreateAccount(Name,Qfunds): <NEW_LINE> <INDENT> Users.update( {Name : {'Cash': Qfunds}}) | Creates an account filled with money.
Obviously, this is a crucial step which will require (!) verification of Bitcoin payments, an X-confirmation delay, etc. For testing we allow unconstrained (free/infinite) cash.
These accounts have simple toy names, actual accounts will probably be the bitcoin addresses themselves. | 625941bd7d43ff24873a2b95 |
def __pick_word(self, probabilities): <NEW_LINE> <INDENT> probabilities = np.log(probabilities) / self.config.temperature <NEW_LINE> exp_probs = np.exp(probabilities) <NEW_LINE> probabilities = exp_probs / np.sum(exp_probs) <NEW_LINE> pick = np.random.choice(len(probabilities), p=probabilities) <NEW_LINE> return self.i... | Pick the next word in the generated text
:param probabilities: Probabilites of the next word
:return: String of the predicted word | 625941bd16aa5153ce362370 |
def path_len_sets(self, idx, jdx): <NEW_LINE> <INDENT> min_path = self.min_path_sets(idx,jdx) <NEW_LINE> return self._path_len(min_path) if min_path else None | Return the path length (length of minimum path) between the closest
members of two sets of indexes | 625941bd283ffb24f3c55801 |
def make_json(image_id, cnt, json_path, data_path, save_path): <NEW_LINE> <INDENT> with open(json_path, 'r') as f: <NEW_LINE> <INDENT> d = json.load(f) <NEW_LINE> save_dict = {} <NEW_LINE> info_dict = {} <NEW_LINE> info_dict['version'] = 1.0 <NEW_LINE> save_dict['info'] = info_dict <NEW_LINE> save_dict['licenses'] = 'n... | 制作 COCO 数据的 json | 625941bdbe383301e01b5383 |
def get_job_names_to_comprehend(self): <NEW_LINE> <INDENT> jobs_to_comprehend_response = requests.get( self.transcribe_jobs_endpoint, headers=self.api_auth) <NEW_LINE> jobs_to_comprehend_list = jobs_to_comprehend_response.json() <NEW_LINE> job_names_to_comprehend = [] <NEW_LINE> for job_object in jobs_to_comprehend_lis... | Gets the names of all jobs which are currently being Transcribed or waiting to be passed to AWS Comprehend.
Retrieves the names of jobs which are currently waiting to be passed to AWS Comprehend
from Django framework API endpoint. For each of these jobs, we check the AWS console to see if they
have finished Transcribi... | 625941bdf8510a7c17cf95f2 |
def alive(self): <NEW_LINE> <INDENT> return self.fuzzer.alive | bool: Is the fuzzer alive and running? | 625941bd851cf427c661a40a |
def values(self): <NEW_LINE> <INDENT> return self.inverse.keys() | All values in range | 625941bd56ac1b37e62640cc |
def parse_filename(filename): <NEW_LINE> <INDENT> _indx = filename.find('[') <NEW_LINE> if _indx > 0: <NEW_LINE> <INDENT> _fname = filename[:_indx] <NEW_LINE> _extn = filename[_indx + 1:-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _fname = filename <NEW_LINE> _extn = '' <NEW_LINE> <DEDENT> return _fname, _extn | Parse out filename from any specified extensions.
Returns rootname and string version of extension name.
Parameters
----------
filename : str
The filename to be parsed
Returns
-------
A tuple with the filename root and extension | 625941bd30bbd722463cbcbb |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.