code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
def test_itemmode(self): <NEW_LINE> <INDENT> for mode in MODES: <NEW_LINE> <INDENT> for root in ROOT1, ROOT2: <NEW_LINE> <INDENT> _c.FSQ_ROOT = root <NEW_LINE> self._cycle(item_mode=mode) <NEW_LINE> queue = normalize() <NEW_LINE> _c.FSQ_ITEM_MODE = mode <NEW_LINE> self._cycle(queue) <NEW_LINE> for down in True, False: <NEW_LINE> <INDENT> for is_t in True, False: <NEW_LINE> <INDENT> for uid, gid in ((UID, GID,), (UNAME, GNAME,)): <NEW_LINE> <INDENT> for quid, qgid in ((UID, GID,), (UNAME, GNAME,)): <NEW_LINE> <INDENT> for qm in MODES: <NEW_LINE> <INDENT> self._cycle(item_mode=mode, mode=qm, is_down=down, user=quid, is_triggered=is_t, group=qgid, item_user=uid, item_group=gid) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> queue = normalize() <NEW_LINE> mode = ILLEGAL_MODE <NEW_LINE> self.assertRaises(TypeError, install, queue, item_mode=mode, is_down=True) <NEW_LINE> self.assertRaises(OSError, _valid_install, queue, item_mode=mode, is_down=True) <NEW_LINE> queue = self._install(item_mode=mode) <NEW_LINE> self.assertRaises(TypeError, uninstall, queue, item_mode=mode) <NEW_LINE> self.assertFalse(_valid_uninstall(queue))
|
Test installing with item modes for down/trigger-s
|
625941bbd6c5a10208143f06
|
def __init__(self, conf): <NEW_LINE> <INDENT> super(QNetwork, self).__init__(conf) <NEW_LINE> with tf.name_scope(self.name): <NEW_LINE> <INDENT> self.target_ph = tf.placeholder( "float32", [None], name = 'target') <NEW_LINE> if self.arch == "NIPS": <NEW_LINE> <INDENT> self.w4, self.b4, self.output_layer = self._fc('fc4', self.o3, self.num_actions, activation = "linear") <NEW_LINE> self.params = [self.w1, self.b1, self.w2, self.b2, self.w3, self.b3, self.w4, self.b4] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.w5, self.b5, self.output_layer = self._fc('fc5', self.o4, self.num_actions, activation = "linear") <NEW_LINE> self.params = [self.w1, self.b1, self.w2, self.b2, self.w3, self.b3, self.w4, self.b4, self.w5, self.b5] <NEW_LINE> <DEDENT> if "target" not in self.name: <NEW_LINE> <INDENT> output_selected_action = tf.reduce_sum(tf.mul(self.output_layer, self.selected_action_ph), reduction_indices = 1) <NEW_LINE> diff = tf.sub(self.target_ph, output_selected_action) <NEW_LINE> if self.clip_loss_delta > 0: <NEW_LINE> <INDENT> quadratic_part = tf.minimum(tf.abs(diff), tf.constant(self.clip_loss_delta)) <NEW_LINE> linear_part = tf.sub(tf.abs(diff), quadratic_part) <NEW_LINE> self.loss = tf.add(tf.nn.l2_loss(quadratic_part), tf.mul(tf.constant(self.clip_loss_delta), linear_part)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.loss = tf.nn.l2_loss(diff) <NEW_LINE> <DEDENT> with tf.control_dependencies(None): <NEW_LINE> <INDENT> grads = tf.gradients(self.loss, self.params) <NEW_LINE> self.clipped_grad_hist_op = None <NEW_LINE> if self.clip_norm_type == 'ignore': <NEW_LINE> <INDENT> self.get_gradients = grads <NEW_LINE> <DEDENT> elif self.clip_norm_type == 'global': <NEW_LINE> <INDENT> self.get_gradients = tf.clip_by_global_norm( grads, self.clip_norm)[0] <NEW_LINE> <DEDENT> elif self.clip_norm_type == 'local': <NEW_LINE> <INDENT> self.get_gradients = [tf.clip_by_norm( g, self.clip_norm) for g in grads] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.params_ph = [] <NEW_LINE> for p in self.params: <NEW_LINE> <INDENT> self.params_ph.append(tf.placeholder(tf.float32, shape=p.get_shape(), name="shared_memory_for_{}".format( (p.name.split("/", 1)[1]).replace(":", "_")))) <NEW_LINE> <DEDENT> self.sync_with_shared_memory = [] <NEW_LINE> for i in xrange(len(self.params)): <NEW_LINE> <INDENT> self.sync_with_shared_memory.append( self.params[i].assign(self.params_ph[i]))
|
Set up remaining layers, loss function, gradient compute and apply
ops, network parameter synchronization ops, and summary ops.
|
625941bb8da39b475bd64e36
|
def imu(self): <NEW_LINE> <INDENT> url = self._build_url( self.IMU_METHOD, [] ) <NEW_LINE> try: <NEW_LINE> <INDENT> response = requests.get(url, timeout=1.0) <NEW_LINE> return json.loads(response.content) <NEW_LINE> <DEDENT> except requests.exceptions.ConnectTimeout: <NEW_LINE> <INDENT> return None
|
Reads the values from the IMU (MPU-6050)
Returns x, y and z accelerations, angular velocities and temperature
with values from -32768 to 32767 and degrees C * 10
|
625941bb925a0f43d2549d33
|
def __init__(self, width, height, x=0, y=0, id=None): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.x = x <NEW_LINE> self.y = y <NEW_LINE> super().__init__(id)
|
Init method
|
625941bb0a366e3fb873e6d6
|
def is_prime(num): <NEW_LINE> <INDENT> from math import sqrt <NEW_LINE> if num < 2: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> elif num == 2: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> for i in range(3, int(sqrt(num)) + 1, 2): <NEW_LINE> <INDENT> if num % i == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> return 1
|
Determine whether num is a prime number.
|
625941bb5fc7496912cc3844
|
def restarize_events(events, durations, dt, t_max): <NEW_LINE> <INDENT> smpl_events = np.array(np.round_(np.divide(events, dt)), dtype=int) <NEW_LINE> smpl_durations = np.array(np.round_(np.divide(durations, dt)), dtype=int) <NEW_LINE> smpl_events = extend_sampled_events(smpl_events, smpl_durations) <NEW_LINE> if np.allclose(t_max % dt, 0): <NEW_LINE> <INDENT> bin_seq = np.zeros(int(t_max / dt) + 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bin_seq = np.zeros(int(np.round((t_max + dt) / dt))) <NEW_LINE> <DEDENT> bin_seq[smpl_events] = 1 <NEW_LINE> return bin_seq
|
build a binary sequence of events. Each event start is approximated
to the nearest time point on the time grid defined by dt and t_max.
|
625941bbec188e330fd5a664
|
def read_parameters(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> output = {} <NEW_LINE> par_file = open(DIR_MCSRED+PAR_FILENAME, 'r') <NEW_LINE> line = par_file.readline().strip() <NEW_LINE> while line != "": <NEW_LINE> <INDENT> idx = line.index(',') <NEW_LINE> output[line[:idx]] = line[idx+1:] <NEW_LINE> line = par_file.readline().strip() <NEW_LINE> <DEDENT> return output <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> pass
|
Get the last parameters that were used for mesoffset
@returns:
A dictionary where keys are parameter names, and values are values,
or None if the file could not be found or was in the wrong format.
|
625941bb851cf427c661a3d1
|
def __colr__(self): <NEW_LINE> <INDENT> typename = C(type(self).__name__, 'blue') <NEW_LINE> filepath = C(self.filepath, 'cyan') <NEW_LINE> if not self.parts: <NEW_LINE> <INDENT> return C('{typ}(filepath={filepath!r}, parts={parts!r})'.format( typ=typename, filepath=filepath, parts=C(self.parts, 'cyan'), )) <NEW_LINE> <DEDENT> return C('\n '.join(( '{typ}(', 'filepath={fname!r},', 'parts=[', '{parts}', ']\n)' )).format( typ=typename, filepath=filepath, parts='\n '.join(C(p) for p in self.parts), ))
|
Format this TigerFile as a Colr when passed directly to Colr().
|
625941bb442bda511e8be2e3
|
def collect_file_system_types(device): <NEW_LINE> <INDENT> supported_types = set(get_supported_filesystems()) <NEW_LINE> if device.format.type: <NEW_LINE> <INDENT> supported_types.add(device.format.type) <NEW_LINE> <DEDENT> if device.exists and device.original_format.type: <NEW_LINE> <INDENT> supported_types.add(device.original_format.type) <NEW_LINE> <DEDENT> return sorted(supported_types)
|
Collect supported file system types for the given device.
:param device: a device
:return: a list of file system types
|
625941bb7d847024c06be178
|
def exist(self, board, word): <NEW_LINE> <INDENT> length = len(word) <NEW_LINE> rows, cols = len(board), len(board[0]) <NEW_LINE> def is_legal_coordinate(x, y): <NEW_LINE> <INDENT> if x < 0 or x > rows - 1: return False <NEW_LINE> if y < 0 or y > cols - 1: return False <NEW_LINE> return True <NEW_LINE> <DEDENT> def backtrace_from(coor, currArr, pos): <NEW_LINE> <INDENT> if coor in currArr: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> x, y = coor[0], coor[1] <NEW_LINE> if not is_legal_coordinate(x,y) or board[x][y] != word[pos]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if pos == length - 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> neighbors = [[x-1,y], [x+1,y], [x,y-1], [x,y+1]] <NEW_LINE> res = False <NEW_LINE> for nextCoor in neighbors: <NEW_LINE> <INDENT> currArr.append(coor) <NEW_LINE> res = res or backtrace_from(nextCoor, currArr, pos+1) <NEW_LINE> currArr.pop() <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> for i in range(rows): <NEW_LINE> <INDENT> for j in range(cols): <NEW_LINE> <INDENT> if backtrace_from([i,j], [], 0): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False
|
:type board: List[List[str]]
:type word: str
:rtype: bool
|
625941bb1b99ca400220a970
|
def lasonce(s: str = '3333') -> str: <NEW_LINE> <INDENT> return str(len(s)) + s[0]
|
:param s: A repeating string of the same number.
:return: How many numbers there are of a certain number.
Example:
f('33333') -> '53'
Invalid usage:
f('33311')
|
625941bb167d2b6e31218a55
|
def move(self, uncoupled_groups=None, force=False): <NEW_LINE> <INDENT> uncoupled_groups = [GroupQuery._object(self.client, g) for g in uncoupled_groups or []] <NEW_LINE> return self.client.request('move_group', [self.id, {'uncoupled_groups': [g.id for g in uncoupled_groups]}, force])
|
Create group move job.
Job will move group's node backend to uncoupled group's node backend.
Uncoupled group will be replaces, source group node backend will be disabled.
Args:
uncoupled_groups: list of uncoupled group that should be merged together
and replaced by source group.
force: cancel all pending jobs of low priority (e.g. recover-dc and defragmentation).
Returns:
A json of created job (or a dict with a single error key and value).
|
625941bb8e71fb1e9831d66c
|
def make_public(self, ttl=1440): <NEW_LINE> <INDENT> headers = {'x-container-read': '.r:*', 'x-cdn-ttl': str(ttl)} <NEW_LINE> return self.make_request('POST', headers=headers)
|
Make container public
@param ttl: time in seconds to set as the TTL
@raises ResponseError
|
625941bb7047854f462a12cb
|
def _consensus_assignments( cmd, ref_taxa, min_consensus=0.51, output_no_hits=False, exp_seq_ids=None, unassignable_label=_get_default_unassignable_label()): <NEW_LINE> <INDENT> with tempfile.NamedTemporaryFile() as output: <NEW_LINE> <INDENT> cmd = cmd + [output.name] <NEW_LINE> _run_command(cmd) <NEW_LINE> obs_taxa = _import_blast_format_assignments( output.name, ref_taxa, unassignable_label=unassignable_label) <NEW_LINE> consensus = _compute_consensus_annotations( obs_taxa, min_consensus=min_consensus, unassignable_label=unassignable_label) <NEW_LINE> if output_no_hits is True: <NEW_LINE> <INDENT> consensus = _output_no_hits( consensus, exp_seq_ids, unassignable_label) <NEW_LINE> <DEDENT> if not consensus: <NEW_LINE> <INDENT> consensus = {'': ('', '')} <NEW_LINE> <DEDENT> result = pd.DataFrame.from_dict(consensus, 'index') <NEW_LINE> result.index.name = 'Feature ID' <NEW_LINE> result.columns = ['Taxon', 'Consensus'] <NEW_LINE> return result
|
Run command line subprocess and find consensus taxonomy.
|
625941bb99fddb7c1c9de251
|
def dump_reassemble_stream(self, client_ads, server_ads): <NEW_LINE> <INDENT> tcp_stream = self.dump_tcp_content() <NEW_LINE> reassemble_stream = TcpData( tcp_stream, client_ads, server_ads).reassemble_tcp() <NEW_LINE> return reassemble_stream
|
传入tcp Stream,对其进行过滤,返回无重传,无重流的tcpstream
:param client_ads:获取client端的ip,port List
:param server_ads:获取server端的ip,port List
:return: 返回无重传,无重流的tcpstream List
|
625941bb76e4537e8c351536
|
def encrypt(self, text_decrypted): <NEW_LINE> <INDENT> if len(text_decrypted) % self.num != 0: <NEW_LINE> <INDENT> raise RuntimeError("待加密的长度需要是栏数的倍数") <NEW_LINE> <DEDENT> text_encrypted = str() <NEW_LINE> groups = ZhaLan.__divide_group(text_decrypted, self.num) <NEW_LINE> for order in range(self.num): <NEW_LINE> <INDENT> for each_group in groups: <NEW_LINE> <INDENT> text_encrypted += each_group[order] <NEW_LINE> <DEDENT> <DEDENT> return text_encrypted
|
栅栏密码的加密操作
:param text_decrypted: "WoShiZhongWenBuShiYingWen"
:return: 栅栏数为 5 时的加密结果, "WZWSnohehgSoniWhnBYeiguin"
|
625941bb6e29344779a624d4
|
def is_set_max_noutput_items(self): <NEW_LINE> <INDENT> return _filter_swig.fractional_resampler_cc_sptr_is_set_max_noutput_items(self)
|
is_set_max_noutput_items(fractional_resampler_cc_sptr self) -> bool
|
625941bb1f5feb6acb0c4a13
|
def generate_examples_from_timelines(timelines, movies_df, min_timeline_len=3, max_context_len=100, max_context_movie_genre_len=320, train_data_fraction=0.9, random_seed=None, shuffle=True): <NEW_LINE> <INDENT> examples = [] <NEW_LINE> movies_dict = generate_movies_dict(movies_df) <NEW_LINE> progress_bar = tf.keras.utils.Progbar(len(timelines)) <NEW_LINE> for timeline in timelines.values(): <NEW_LINE> <INDENT> if len(timeline) < min_timeline_len: <NEW_LINE> <INDENT> progress_bar.add(1) <NEW_LINE> continue <NEW_LINE> <DEDENT> single_timeline_examples = generate_examples_from_single_timeline( timeline=timeline, movies_dict=movies_dict, max_context_len=max_context_len, max_context_movie_genre_len=max_context_movie_genre_len) <NEW_LINE> examples.extend(single_timeline_examples) <NEW_LINE> progress_bar.add(1) <NEW_LINE> <DEDENT> if shuffle: <NEW_LINE> <INDENT> random.seed(random_seed) <NEW_LINE> random.shuffle(examples) <NEW_LINE> <DEDENT> last_train_index = round(len(examples) * train_data_fraction) <NEW_LINE> train_examples = examples[:last_train_index] <NEW_LINE> test_examples = examples[last_train_index:] <NEW_LINE> return train_examples, test_examples
|
Convert user timelines to tf examples.
Convert user timelines to tf examples by adding all possible context-label
pairs in the examples pool.
Args:
timelines: The user timelines to process.
movies_df: The dataframe of all movies.
min_timeline_len: The minimum length of timeline. If the timeline length is
less than min_timeline_len, empty examples list will be returned.
max_context_len: The maximum length of the context. If the context history
length is less than max_context_length, features will be padded with
default values.
max_context_movie_genre_len: The length of movie genre feature.
train_data_fraction: Fraction of training data.
random_seed: Seed for randomization.
shuffle: Whether to shuffle the examples before splitting train and test
data.
Returns:
train_examples: TF example list for training.
test_examples: TF example list for testing.
|
625941bb63f4b57ef0000fe0
|
def remove_phantom_bytes(data): <NEW_LINE> <INDENT> data_stripped = [] <NEW_LINE> for index in range(0, len(data), 2): <NEW_LINE> <INDENT> data_stripped.append(data[index]) <NEW_LINE> <DEDENT> data_stripped_binarray = array('B') <NEW_LINE> data_stripped_binarray.fromlist(data_stripped) <NEW_LINE> return data_stripped_binarray
|
Remove every 2nd byte from the data
|
625941bb67a9b606de4a7d7b
|
def md5( str ): <NEW_LINE> <INDENT> m = hashlib.md5() <NEW_LINE> m.update( str ) <NEW_LINE> return m.hexdigest()
|
MD5加密字符串函数
|
625941bbd164cc6175782c0c
|
def __init__(self, serviceId=None, destRegion=None): <NEW_LINE> <INDENT> self.serviceId = serviceId <NEW_LINE> self.destRegion = destRegion
|
:param serviceId: (Optional) 跨地域备份同步服务ID
:param destRegion: (Optional) 备份同步的目标地域
|
625941bb44b2445a33931f5f
|
def get_encryption_names(email): <NEW_LINE> <INDENT> encryption_names = [] <NEW_LINE> address = get_email(email) <NEW_LINE> if address and len(address) > 0: <NEW_LINE> <INDENT> query_results = get_contacts_crypto(address) <NEW_LINE> if query_results: <NEW_LINE> <INDENT> log_message("{} has {} address(es)".format(address, len(query_results))) <NEW_LINE> for contacts_encryption in query_results: <NEW_LINE> <INDENT> encryption_name = contacts_encryption.encryption_software.name <NEW_LINE> encryption_names.append(encryption_name) <NEW_LINE> log_message("{} encryption software: {}".format(email, encryption_name)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log_message("no encryption software for {}".format(email)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log_message("unable to get address from {}".format(email)) <NEW_LINE> <DEDENT> return encryption_names
|
Get a list of all the active encryption program names for this email.
# Test extreme case. See unittests to see how to use this function.
>>> get_encryption_names(None)
[]
|
625941bbb57a9660fec3373f
|
def get_token(self, username, password): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.password = password <NEW_LINE> url_auth = f"https://{self.ip}:{self.port}/mgmt/shared/authn/login" <NEW_LINE> post_data = { 'username': username, 'password': password, 'loginProviderName': 'tmos' } <NEW_LINE> r = requests.post( url_auth, json.dumps(post_data), auth=(username, password), headers=self.headers, verify=False ) <NEW_LINE> try: <NEW_LINE> <INDENT> if r.status_code == 200: <NEW_LINE> <INDENT> self.f5token = r.json()['token']['token'] <NEW_LINE> self.headers['X-F5-Auth-Token'] = self.f5token <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise SystemExit(f'was not possible to generate a token, status code = {r.status_code}') <NEW_LINE> <DEDENT> <DEDENT> except requests.exceptions.HTTPError as err: <NEW_LINE> <INDENT> raise RuntimeError(err)
|
:param username: F5 username
:param password: F5 password
:return:
|
625941bb23849d37ff7b2f51
|
def graph_roc_curve(response, prediction, filename, verbose=False): <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> print('Generating ROC curve...') <NEW_LINE> <DEDENT> (fpr, tpr, thresholds) = sklearn.metrics.roc_curve(response, prediction) <NEW_LINE> roc_auc = sklearn.metrics.auc(fpr, tpr) <NEW_LINE> fig = plt.figure() <NEW_LINE> ax = fig.add_subplot(1,1,1) <NEW_LINE> ax.plot(fpr, tpr) <NEW_LINE> ax.plot([0,1], [0,1], 'k--') <NEW_LINE> ax.set_xlabel('False positive rate') <NEW_LINE> ax.set_ylabel('True positive rate') <NEW_LINE> ax.set_title('ROC curve for Bechdel test') <NEW_LINE> ax.legend(['AUC = %6.4f' % roc_auc], loc='lower right') <NEW_LINE> fig.savefig(filename) <NEW_LINE> plt.close(fig) <NEW_LINE> if verbose: <NEW_LINE> <INDENT> print('AUC of ROC: %6.4f' % roc_auc) <NEW_LINE> print('ROC graph output to %s.' % filename)
|
Generates an ROC graph.
|
625941bbb545ff76a8913cdd
|
def changeUri(self, gid, fileIndex, delUris, addUris, position=None): <NEW_LINE> <INDENT> if self.useSecret: <NEW_LINE> <INDENT> return self.server.aria2.changeUri("token:"+self.fixedSecret, gid, fileIndex, delUris, addUris, position) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.server.aria2.changeUri(gid, fileIndex, delUris, addUris, position)
|
This method removes URIs in delUris from and appends URIs in addUris to download denoted by gid.
gid: string, GID.
fileIndex: integer, file to affect (1-based)
delUris: list, URIs to be removed
addUris: list, URIs to be added
position: integer, where URIs are inserted, after URIs have been removed
return: This method returns a list which contains 2 integers. The first integer is the number of URIs deleted. The second integer is the number of URIs added.
|
625941bbe8904600ed9f1de8
|
def test_2_pence_known_case(self): <NEW_LINE> <INDENT> self.assertEqual(coin_sum_combos(2), 2)
|
test the known case for 2 pence
2 pence only has a limited combination of coin combos:
2p
1p + 1p
|
625941bb9f2886367277a74f
|
def getShutterOutputMode(self): <NEW_LINE> <INDENT> index = self._getSomethingSimple( self.at.getEnumerated, Features.ShutterOutputMode ) <NEW_LINE> return self._getSomethingWithIndex( self.at.getEnumStringByIndex, Features.ShutterOutputMode, index )
|
Gets the current value of ShutterOutputMode.
Returns
-------
str
The current value of ShutterOutputMode.
|
625941bb31939e2706e4cd2e
|
def getWriterPlugIn(self): <NEW_LINE> <INDENT> return ProductWriterPlugIn(ProductWriter_getWriterPlugIn(self._obj))
|
Returns the plug-in which created this product writer.
@return the product writer plug-in, should never be <code>null</code>
|
625941bb3617ad0b5ed67db7
|
def test_add_and_remove_child_node(): <NEW_LINE> <INDENT> document = Document() <NEW_LINE> root = Node('root') <NEW_LINE> node = Node('node') <NEW_LINE> document.add(root) <NEW_LINE> document.add(node) <NEW_LINE> assert node in document.roots <NEW_LINE> assert node not in root.children <NEW_LINE> root.children.append(node) <NEW_LINE> assert node not in document.roots <NEW_LINE> assert node in root.children <NEW_LINE> del root.children[0] <NEW_LINE> assert node in document.roots <NEW_LINE> assert node not in root.children <NEW_LINE> document.remove(node) <NEW_LINE> assert node not in document.roots <NEW_LINE> assert node not in root.children
|
Add and remove a child node.
|
625941bb7b25080760e3931a
|
def search(self, data): <NEW_LINE> <INDENT> current = self.head <NEW_LINE> found = False <NEW_LINE> while current and not found: <NEW_LINE> <INDENT> if current.data == data: <NEW_LINE> <INDENT> found = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current = current.next <NEW_LINE> <DEDENT> <DEDENT> if not current: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return found
|
Searches the list for a node with payload data. Returns the node object or None if not found. Time complexity is O(n) in worst case.
|
625941bbadb09d7d5db6c651
|
def currentYearPRs(userID, minDistance, jsonData): <NEW_LINE> <INDENT> import pandas <NEW_LINE> from datetime import timedelta <NEW_LINE> df = pandas.DataFrame(jsonData, columns=["user_id", "start", "distance", "pace"]) <NEW_LINE> df["date"] = pandas.to_datetime(df["start"]) <NEW_LINE> df["month"] = df["date"].dt.month <NEW_LINE> df = df.loc[df["user_id"] == userID] <NEW_LINE> df = df.loc[df["distance"] > minDistance] <NEW_LINE> df = df.sort_values(by="date", ascending=True) <NEW_LINE> prevYearDf = df.loc[df["date"].dt.year == (pandas.to_datetime("today") - timedelta(days=365)).year] <NEW_LINE> prevYearMaxPace = round(prevYearDf["pace"].max(), 2) <NEW_LINE> if pandas.isnull(prevYearMaxPace): <NEW_LINE> <INDENT> prevYearMaxPace = 0 <NEW_LINE> <DEDENT> df = df.loc[df["date"].dt.year == pandas.to_datetime("today").year] <NEW_LINE> temp = df.groupby("month").max().add_prefix("max_") <NEW_LINE> paceArr = temp["max_pace"].tolist() <NEW_LINE> paceArr = [round(num, 2) for num in paceArr] <NEW_LINE> currentMax = prevYearMaxPace <NEW_LINE> timesPRd = 0 <NEW_LINE> for pace in paceArr: <NEW_LINE> <INDENT> if pace > currentMax: <NEW_LINE> <INDENT> timesPRd = timesPRd + 1 <NEW_LINE> <DEDENT> <DEDENT> return timesPRd
|
Inputs:
userID: string
minDistance: integer, the minimum number of kilometers to count a run toward bettering their pace
jsonData: json data. If data is stored in a directory, open and load the data before passing it to this function.
Outputs:
An integer representing the number of times the user broke their record for fastest pace for minDistance kilometer runs
|
625941bbad47b63b2c509e45
|
def get_posts_num(group_id): <NEW_LINE> <INDENT> return int(vk.api_method("wall.get", owner_id="-"+group_id)["response"]["count"])
|
returns number of posts of the scpecified public or group
|
625941bb167d2b6e31218a56
|
def send_link(link, excludeid=None): <NEW_LINE> <INDENT> reg_ids = [] <NEW_LINE> query = GCMRegIdModel.query(GCMRegIdModel.userid == link.userid) <NEW_LINE> for reg_model in query: <NEW_LINE> <INDENT> reg_ids.append(reg_model.regid) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> reg_ids.remove(excludeid) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if len(reg_ids) < 1: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> _send(link.userid, reg_ids, to_dict(link))
|
Transmits the link specified by the sha to the users devices.
Does not run in a separate thread because App-Engine did not
seem to support that.
|
625941bb5510c4643540f2ac
|
def collapse_unweighted_edges(self, threshold=0.0000001, update_splits=False): <NEW_LINE> <INDENT> for e in self.postorder_edge_iter(): <NEW_LINE> <INDENT> if e.length <= threshold: <NEW_LINE> <INDENT> e.collapse() <NEW_LINE> <DEDENT> <DEDENT> if update_splits: <NEW_LINE> <INDENT> self.update_splits()
|
Collapse all edges with edge lengths less than or equal to
``threshold``.
|
625941bb4e696a04525c930b
|
def __init__(self, lattice_sizes, monotonicities, output_min, output_max, unimodalities=None): <NEW_LINE> <INDENT> lattice_lib.verify_hyperparameters( lattice_sizes=lattice_sizes, monotonicities=monotonicities, unimodalities=unimodalities, output_min=output_min, output_max=output_max) <NEW_LINE> self.lattice_sizes = lattice_sizes <NEW_LINE> self.monotonicities = monotonicities <NEW_LINE> self.output_min = output_min <NEW_LINE> self.output_max = output_max <NEW_LINE> self.unimodalities = unimodalities
|
Initializes an instance of `LinearInitializer`.
Args:
lattice_sizes: Lattice sizes of `tfl.layers.Lattice` to initialize.
monotonicities: Monotonic dimensions for initialization. Does not need to
match `monotonicities` of `tfl.layers.Lattice`.
output_min: Minimum layer output after initialization.
output_max: Maximum layer output after initialization.
unimodalities: None or unimodal dimensions after initialization. Does not
need to match `unimodalities` of `tfl.layers.Lattice`.
Raises:
ValueError: If there is a mismatch between `monotonicities` and
`lattice_sizes`.
|
625941bb8e05c05ec3eea231
|
def make_original_jobs_from_counts(jobs_arr_arr, eg_array, fur_array, num_levels): <NEW_LINE> <INDENT> result_jobs_arr = np.zeros(eg_array.size) <NEW_LINE> eg = 0 <NEW_LINE> for job_arr in jobs_arr_arr: <NEW_LINE> <INDENT> eg += 1 <NEW_LINE> this_job_list = np.repeat((np.arange(len(job_arr)) + 1), job_arr) <NEW_LINE> np.put(result_jobs_arr, np.where((eg_array == eg) & (fur_array == 0))[0][:sum(job_arr)], this_job_list) <NEW_LINE> np.put(result_jobs_arr, np.where(result_jobs_arr == 0)[0], num_levels + 1) <NEW_LINE> <DEDENT> return result_jobs_arr.astype(int)
|
Short_Form
This function grabs jobs from standalone job count arrays (normally stovepiped)
for each employee group and inserts those jobs into
a proposed integrated list, or a standalone list.
Each eg (employee group) is assigned jobs from their standalone list in order
top to bottom.
Resut is a combined list of jobs with each eg maintaining
ordered independent stovepipe jobs within the combined list of jobs
jobs_arr_arr is an array of arrays, likely output[0] from
make_array_of_job_lists function.
Order of job count arrays within jobs_arr_arr input
must match emp group codes order (1, 2, 3, etc.).
If total group counts of job(s) is less than slots available to that group,
remaining slots will be assigned (remain) a zero job number (0).
eg_array is list (order sequence) of employee group codes from proposed list
with length equal to length of proposed list.
Result of this function is ultimately merged into long form
for no bump no flush routine.
employees who are originally marked as furloughed are assigned the furlough
level number which is 1 greater than the number of job levels.
|
625941bb30dc7b7665901829
|
def test_bind_new_file(self): <NEW_LINE> <INDENT> url_list = ['http://pulpserver'] <NEW_LINE> repolib.bind(self.TEST_REPO_FILENAME, self.TEST_MIRROR_LIST_FILENAME, self.TEST_KEYS_DIR, self.TEST_CERT_DIR, REPO_ID, REPO_NAME, url_list, {}, CLIENTCERT, ENABLED, self.LOCK) <NEW_LINE> self.assertTrue(os.path.exists(self.TEST_REPO_FILENAME)) <NEW_LINE> self.assertTrue(not os.path.exists(self.TEST_MIRROR_LIST_FILENAME)) <NEW_LINE> repo_file = RepoFile(self.TEST_REPO_FILENAME) <NEW_LINE> repo_file.load() <NEW_LINE> self.assertEqual(1, len(repo_file.all_repos())) <NEW_LINE> loaded = repo_file.get_repo(REPO_ID) <NEW_LINE> self.assertTrue(loaded is not None) <NEW_LINE> self.assertEqual(loaded['name'], REPO_NAME) <NEW_LINE> self.assertTrue(loaded['enabled']) <NEW_LINE> self.assertEqual(loaded['gpgcheck'], '0') <NEW_LINE> self.assertEqual(loaded['gpgkey'], None) <NEW_LINE> self.assertEqual(loaded['baseurl'], url_list[0]) <NEW_LINE> self.assertTrue('mirrorlist' not in loaded) <NEW_LINE> path = loaded['sslclientcert'] <NEW_LINE> f = open(path) <NEW_LINE> content = f.read() <NEW_LINE> f.close() <NEW_LINE> self.assertEqual(CLIENTCERT, content) <NEW_LINE> self.assertTrue(loaded['sslverify'], '1') <NEW_LINE> self.assertEqual(loaded['sslcacert'], DEFAULT_CA_PATH)
|
Tests binding a repo when the underlying .repo file does not exist.
|
625941bb6fece00bbac2d5fb
|
def swapPairs(self, head): <NEW_LINE> <INDENT> if not head or not head.next: return head <NEW_LINE> dummy = ListNode(0) <NEW_LINE> dummy.next = head <NEW_LINE> l1 = dummy <NEW_LINE> l2 = head <NEW_LINE> while l2 and l2.next: <NEW_LINE> <INDENT> node = l2.next.next <NEW_LINE> l1.next = l2.next <NEW_LINE> l2.next.next = l2 <NEW_LINE> l2.next = node <NEW_LINE> l1 = l2 <NEW_LINE> l2 = node <NEW_LINE> <DEDENT> return dummy.next
|
:type head: ListNode
:rtype: ListNode
|
625941bb046cf37aa974cc0a
|
def test_add_cart(self): <NEW_LINE> <INDENT> pass
|
Test case for add_cart
Create a cart # noqa: E501
|
625941bb8e71fb1e9831d66d
|
def _get_schema(self, endpoint): <NEW_LINE> <INDENT> data = None <NEW_LINE> if os.path.exists(self._context.schema_loc): <NEW_LINE> <INDENT> with io.open(self._context.schema_loc, 'rb') as f: <NEW_LINE> <INDENT> fdata = f.read() <NEW_LINE> data = {} <NEW_LINE> if fdata: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = json.loads(fdata) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> data = json.loads(fdata.decode('utf-8')) <NEW_LINE> <DEDENT> <DEDENT> if self._version in data: <NEW_LINE> <INDENT> return data[self._version] <NEW_LINE> <DEDENT> data[self._version] = self.read_endpoint(endpoint, sensitive=True) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> data = {self._version: self.read_endpoint(endpoint, sensitive=True)} <NEW_LINE> <DEDENT> with io.open(self._context.schema_loc, 'wb+') as f: <NEW_LINE> <INDENT> jdata = json.dumps(data) <NEW_LINE> try: <NEW_LINE> <INDENT> f.write(jdata) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> f.write(jdata.encode('utf-8')) <NEW_LINE> <DEDENT> <DEDENT> return data[self._version]
|
Tries to access cached schema, if not available, pulls new schema
from the remote box.
|
625941bb9f2886367277a750
|
def solve( A, x0, b, nb_iter_max, epsilon, Prec = None ) : <NEW_LINE> <INDENT> rk = b - A.dot(x0) <NEW_LINE> error = rk.dot(rk) <NEW_LINE> error_init = b.dot(b) <NEW_LINE> if (Prec is None): <NEW_LINE> <INDENT> pk = rk <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pk = Prec.solve(rk) <NEW_LINE> <DEDENT> zk = pk <NEW_LINE> xk = x0.copy() <NEW_LINE> iter = 0 <NEW_LINE> while ( iter < nb_iter_max ) and ( error > epsilon*epsilon*error_init): <NEW_LINE> <INDENT> Apk = A.dot(pk) <NEW_LINE> rkdzk = rk.dot(zk) <NEW_LINE> alpha = rkdzk/Apk.dot(pk) <NEW_LINE> xk = xk + alpha * pk <NEW_LINE> rk = rk - alpha * Apk <NEW_LINE> error = rk.dot(rk) <NEW_LINE> if (Prec is None) : <NEW_LINE> <INDENT> zk = rk <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> zk = Prec.solve(rk) <NEW_LINE> <DEDENT> betak = rk.dot(zk)/rkdzk <NEW_LINE> pk = zk + betak*pk <NEW_LINE> iter += 1 <NEW_LINE> <DEDENT> return xk, iter, error
|
Résoud un système linéaire par gradient conjugué.
Le gradient conjugué peut être conjugué si on passe
un préconditionneur via Prec. Le préconditionneur doit
posséder dans ce cas une méthode solve.
Les arguments d'entrée sont :
A : La matrice Lhs
x0: La solution initiale
b : Le second membre
nb_iter_max : Le nombre maximal d'itérations avant que le gc s'arrête.
epsilon : L'erreur relative faite sur la solution -> ||rk||/||b||avec rk = b-A.xk
Prec ( optionnel ) : Le préconditionneur
|
625941bb15fb5d323cde09cb
|
def get_paginated_response(self, data): <NEW_LINE> <INDENT> return Response( { "meta": {"count": self.count}, "links": { "first": self.get_first_link(), "previous": self.get_previous_link(), "next": self.get_next_link(), "last": self.get_last_link(), }, "data": data, } )
|
Returns paginated response.
|
625941bbfb3f5b602dac3550
|
def start(self): <NEW_LINE> <INDENT> if self.is_runnning: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> debug(DEBUG, '%s start', self) <NEW_LINE> self.is_runnning = True
|
Starts the media engine.
|
625941bb50485f2cf553cc58
|
def print_first_word(words): <NEW_LINE> <INDENT> first_word = words.pop(0) <NEW_LINE> print(first_word)
|
Print first word after popping it off
|
625941bb498bea3a759b9970
|
def get_all_attributes(self): <NEW_LINE> <INDENT> return self.entries[0].attributes_names
|
Return all the attributes.
:return: All the attributes.
|
625941bbd99f1b3c44c67455
|
def test_return_job_status(self): <NEW_LINE> <INDENT> with mock.patch('data.views.export_csv.AsyncResult', spec=celery.result.AsyncResult) as mock_status: <NEW_LINE> <INDENT> mock_status.return_value.state = celery.states.PENDING <NEW_LINE> mock_status.return_value.info = dict() <NEW_LINE> response = self.admin_client.get(self.base_url + 'fake-id/') <NEW_LINE> self.assertEqual({'status': celery.states.PENDING, 'info': {}}, response.data) <NEW_LINE> mock_status.return_value.state = celery.states.STARTED <NEW_LINE> response = self.admin_client.get(self.base_url + 'fake-id/') <NEW_LINE> self.assertEqual({'status': celery.states.STARTED, 'info': {}}, response.data) <NEW_LINE> mock_status.return_value.state = celery.states.SUCCESS <NEW_LINE> mock_status.return_value.get.return_value = 'filepath-here.tar.gz' <NEW_LINE> response = self.admin_client.get(self.base_url + 'fake-id/') <NEW_LINE> self.assertEqual({'status': celery.states.SUCCESS, 'result': 'http://testserver/download/filepath-here.tar.gz'}, response.data) <NEW_LINE> mock_status.return_value.state = celery.states.FAILURE <NEW_LINE> mock_status.return_value.get.return_value = 'error msg here' <NEW_LINE> response = self.admin_client.get(self.base_url + 'fake-id/') <NEW_LINE> self.assertEqual({'status': celery.states.FAILURE, 'error': 'error msg here'}, response.data)
|
Test that GETs return job status
|
625941bb92d797404e304049
|
def show_editor_buffer(self, editor_buffer): <NEW_LINE> <INDENT> self.active_tab.show_editor_buffer(editor_buffer)
|
Show this EditorBuffer in the current window.
|
625941bb462c4b4f79d1d590
|
def create_new_coordinates(data, new_tweet): <NEW_LINE> <INDENT> coordinates = data.get('coordinates') <NEW_LINE> if coordinates: <NEW_LINE> <INDENT> for value in coordinates: <NEW_LINE> <INDENT> longitude, latitude = value.get('coordinates') <NEW_LINE> new_coordinate = Coordinate(longitude=longitude, latitude=latitude, tweet_id_str=new_tweet.tweet_id_str, tweet=new_tweet ) <NEW_LINE> session.add(new_coordinate) <NEW_LINE> session.commit()
|
insert new hashtags into db
|
625941bba8370b7717052760
|
def init_platform(): <NEW_LINE> <INDENT> initializer = None <NEW_LINE> try: <NEW_LINE> <INDENT> initializer = globals()[_system + "Initializer"] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if initializer: <NEW_LINE> <INDENT> logger.debug("Running initializer for %s" % (platform.system(),)) <NEW_LINE> initializer() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.debug("Initializer not found for %s" % (platform.system(),))
|
Returns the right initializer for the platform we are running in, or
None if no proper initializer is found
|
625941bb5e10d32532c5eded
|
def dict_deep_overlay(*data, list_replace=False): <NEW_LINE> <INDENT> if len(data) == 1: <NEW_LINE> <INDENT> return data[0] <NEW_LINE> <DEDENT> elif len(data) != 2: <NEW_LINE> <INDENT> head = dict_deep_overlay(data[0], data[1], list_replace=list_replace) <NEW_LINE> return dict_deep_overlay(head, *data[2:], list_replace=list_replace) <NEW_LINE> <DEDENT> original, overlay = data <NEW_LINE> if isinstance(original, (list, tuple)) and isinstance(overlay, dict): <NEW_LINE> <INDENT> for key, item in overlay.items(): <NEW_LINE> <INDENT> assert isinstance(key, int) <NEW_LINE> original[key] = dict_deep_overlay(original[key], item) <NEW_LINE> <DEDENT> <DEDENT> elif not isinstance(original, type(overlay)): <NEW_LINE> <INDENT> return overlay <NEW_LINE> <DEDENT> elif isinstance(overlay, dict): <NEW_LINE> <INDENT> for key, item in overlay.items(): <NEW_LINE> <INDENT> _dict_deep_overlay_item(original, key, item, list_replace) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(overlay, list) and not list_replace: <NEW_LINE> <INDENT> raise ValueError("Cannot implicitly merge two lists, use key* or key+ " + "when inheriting: (list1: %s, list2: %s)" % (str(original), str(overlay))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return overlay <NEW_LINE> <DEDENT> return original
|
Overlay dictionaries deeply
|
625941bba17c0f6771cbdf13
|
def _step(self, challenge=''): <NEW_LINE> <INDENT> log.debug('GSSAPI step challenge="{0}"'.format(challenge)) <NEW_LINE> return deferToThread(kerberos.authGSSClientStep, self._context, challenge)
|
Processes a single GSSAPI client-side step using the supplied server
data.
@param challenge: a string containing the base64-encoded server data
(which may be empty for the first step).
@return: a result code
|
625941bbc432627299f04b04
|
def addAtIndex(self, index, val): <NEW_LINE> <INDENT> if index == self.length: <NEW_LINE> <INDENT> self.addAtTail(val) <NEW_LINE> <DEDENT> elif index > self.length: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> index_before = index -1 <NEW_LINE> start_index =0 <NEW_LINE> starting_node = self.head <NEW_LINE> while starting_node.next != None and start_index != index_before: <NEW_LINE> <INDENT> starting_node = starting_node.next <NEW_LINE> start_index += 1 <NEW_LINE> <DEDENT> before = starting_node <NEW_LINE> new_node = Node(val) <NEW_LINE> after = before.next <NEW_LINE> before.next = new_node <NEW_LINE> new_node.next = after <NEW_LINE> self.length += 1
|
Add a node of value val before the index-th node in the linked list. If index equals to the length of linked list, the node will be appended to the end of linked list. If index is greater than the length, the node will not be inserted.
:type index: int
:type val: int
:rtype: void
|
625941bb63b5f9789fde6fa6
|
def numbers(x: int) -> int: <NEW_LINE> <INDENT> for x in range(1, connectfour.BOARD_COLUMNS+1): <NEW_LINE> <INDENT> print(x, end = " ") <NEW_LINE> <DEDENT> print("")
|
Prints numbers on top of the columns of the game board.
|
625941bbc4546d3d9de728f1
|
def get_builds(self, id_user): <NEW_LINE> <INDENT> raise NotImplementedError('%s.get_builds' % class_name(self))
|
Возвращает список зданий.
<id_user> - ID пользователя
|
625941bb4c3428357757c1ea
|
def system_node_reboot(self, node): <NEW_LINE> <INDENT> return self.request( "system-node-reboot", { 'node': [ node, 'node', [ basestring, 'None' ], False ], }, { } )
|
Reboot the specified node. Only an admin can reboot the node.
If attempted by a user with insufficient privileges EAPIPRIVILEGE
is returned.
:param node: The textual name of a node.
|
625941bb45492302aab5e180
|
def _browser_url(service: ServiceCall) -> None: <NEW_LINE> <INDENT> webbrowser.open(service.data[ATTR_URL])
|
Browse to URL.
|
625941bb187af65679ca4fde
|
def lnprob(c, xs, ys, sigma): <NEW_LINE> <INDENT> guess = series(c, xs) <NEW_LINE> return -sum((guess-ys)**2)/(2* sigma**2)
|
log-likelihood for coefficients c given data, assuming gaussian errors
|
625941bb6fb2d068a760ef5a
|
def where(self , condition , *arguments): <NEW_LINE> <INDENT> condition = str(condition).strip() <NEW_LINE> conditions = condition.split(' ') <NEW_LINE> where = '' <NEW_LINE> for v in conditions: <NEW_LINE> <INDENT> where += self.filterColumn(v) + ' ' <NEW_LINE> <DEDENT> where = len(self._where) > 0 and ' AND ( ' + where + ') ' or ' ( ' +where + ') ' <NEW_LINE> self._where.append( where ) <NEW_LINE> for v in arguments: <NEW_LINE> <INDENT> self._param['where'].append(v) <NEW_LINE> <DEDENT> return self
|
与查询条件
|
625941bbbe7bc26dc91cd4c5
|
def _update_scores(key_id, key_info, known_keys_list): <NEW_LINE> <INDENT> key_info["score"] = sum([score for source, (score, reason) in key_info.get('scores', {}).iteritems() if source != 'Known encryption keys']) <NEW_LINE> if key_id in known_keys_list: <NEW_LINE> <INDENT> score, reason = _score_validity(known_keys_list[key_id]["validity"], local=True) <NEW_LINE> if score == 0: <NEW_LINE> <INDENT> score += 9 <NEW_LINE> reason = _('Encryption key has been imported') <NEW_LINE> <DEDENT> key_info["on_keychain"] = True <NEW_LINE> key_info['score'] += score <NEW_LINE> key_info['scores']['Known encryption keys'] = [score, reason] <NEW_LINE> <DEDENT> if "keysize" in key_info: <NEW_LINE> <INDENT> bits = int(key_info["keysize"]) <NEW_LINE> score = bits // 1024 <NEW_LINE> key_info['score'] += score <NEW_LINE> if bits >= 4096: <NEW_LINE> <INDENT> key_strength = _('Encryption key is very strong') <NEW_LINE> <DEDENT> elif bits >= 3072: <NEW_LINE> <INDENT> key_strength = _('Encryption key is strong') <NEW_LINE> <DEDENT> elif bits >= 2048: <NEW_LINE> <INDENT> key_strength = _('Encryption key is average') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key_strength = _('Encryption key is weak') <NEW_LINE> <DEDENT> key_info['scores']['Encryption key strength'] = [score, key_strength] <NEW_LINE> <DEDENT> sc, reason = max([(abs(score), reason) for score, reason in key_info['scores'].values()]) <NEW_LINE> key_info['score_reason'] = '%s' % reason <NEW_LINE> log_score = math.log(3 * abs(key_info['score']), 3) <NEW_LINE> key_info['score_stars'] = (max(1, min(int(round(log_score)), 5)) * (-1 if (key_info['score'] < 0) else 1))
|
Update scores and score explanations
|
625941bba8ecb033257d2f96
|
def exists_in_cf(self, current_cf_stacks): <NEW_LINE> <INDENT> for stack in current_cf_stacks: <NEW_LINE> <INDENT> if str(stack.stack_name) == self.cf_stack_name: <NEW_LINE> <INDENT> return stack <NEW_LINE> <DEDENT> <DEDENT> return False
|
Check if this stack exists in CloudFormation
|
625941bbec188e330fd5a665
|
def get_client(api_version, **kwargs): <NEW_LINE> <INDENT> cli_kwargs = { 'username': kwargs.get('os_username'), 'password': kwargs.get('os_password'), 'tenant_name': kwargs.get('os_tenant_name'), 'token': kwargs.get('os_auth_token'), 'auth_url': kwargs.get('os_auth_url'), 'endpoint': kwargs.get('tuskar_url'), } <NEW_LINE> client = Client(api_version, **cli_kwargs) <NEW_LINE> if client: <NEW_LINE> <INDENT> return client <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Need correct set of parameters")
|
Get an authtenticated client, based on the credentials
in the keyword args.
:param api_version: the API version to use (only '2' is valid)
:param kwargs: keyword args containing credentials, either:
* os_auth_token: pre-existing token to re-use
* tuskar_url: tuskar API endpoint
or:
* os_username: name of user
* os_password: user's password
* os_auth_url: endpoint to authenticate against
* os_tenant_{name|id}: name or ID of tenant
|
625941bb8a43f66fc4b53f29
|
def prep(): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser(description='How to run plunder.') <NEW_LINE> parser.add_argument('-q', '--query', help='Your query to search', dest="query", default=".php?id=1") <NEW_LINE> parser.add_argument('-v', '--version', action='version', version=__version__) <NEW_LINE> parser.add_argument('-j','--json', help='Output in JSON file, otherwise output to screen only.', dest="json_out", action='store_true', default=False) <NEW_LINE> args = parser.parse_args() <NEW_LINE> return args
|
Get the args and set them.
args
----
q or query for the search parameters you want to send
v or version for the current version
j or json to output to a json file
|
625941bb67a9b606de4a7d7c
|
def fsl_reverse_xfm(self, in_file=None, out_file=None): <NEW_LINE> <INDENT> invmat = [self.fsl_laucher, 'convert_xfm', '-omat', out_file, '-inverse', in_file, ] <NEW_LINE> logging.debug(' '.join(invmat)) <NEW_LINE> subprocess.call(invmat)
|
Runs FSL convert xfm with with invert options
:param in_file: matrix transform
:param out_file: inverse matrix transform
:return:
convert_xfm -omat <outmat> -inverse <inmat>
|
625941bb4d74a7450ccd4083
|
def fit(self,Y,X,weights = None): <NEW_LINE> <INDENT> n,m = np.shape(X) <NEW_LINE> if weights is not None: <NEW_LINE> <INDENT> w = np.sqrt(weights) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> w = np.ones(n) <NEW_LINE> weights = w <NEW_LINE> <DEDENT> X_w = (X.T*w).T <NEW_LINE> Y_w = Y*w <NEW_LINE> if self.theta is None: <NEW_LINE> <INDENT> self.init_params(m) <NEW_LINE> <DEDENT> theta_recovery = self.theta <NEW_LINE> var_recovery = self.var <NEW_LINE> log_like_before = self.log_likelihood(X,Y,weights) <NEW_LINE> if self.solver == "cholesky": <NEW_LINE> <INDENT> part_one = np.dot(X_w.T,X_w) <NEW_LINE> part_two = np.dot(X_w.T,Y_w) <NEW_LINE> self.theta = cholesky_solver_least_squares(part_one, part_two) <NEW_LINE> <DEDENT> elif self.solver == "qr": <NEW_LINE> <INDENT> Q,R = np.linalg.qr(X_w) <NEW_LINE> self.theta = qr_solver(Q,R,Y_w) <NEW_LINE> <DEDENT> elif self.solver == "lapack_solver": <NEW_LINE> <INDENT> self.theta = lstsq_wrapper(Y_w,X_w) <NEW_LINE> <DEDENT> vec_1 = (Y_w - np.dot(X_w,self.theta)) <NEW_LINE> self.var = np.dot(vec_1,vec_1)/np.sum(weights) <NEW_LINE> log_like_after = self.log_likelihood(X,Y,weights) <NEW_LINE> delta_log_like = ( log_like_after - log_like_before)/n <NEW_LINE> if delta_log_like < self.stop_learning: <NEW_LINE> <INDENT> self.theta = theta_recovery <NEW_LINE> self.var = var_recovery <NEW_LINE> delta_log_like = 0 <NEW_LINE> <DEDENT> delta = self.theta - theta_recovery <NEW_LINE> self.delta_param_norm = np.sum(np.dot(delta.T,delta)) <NEW_LINE> self.delta_log_like = delta_log_like
|
Fits weighted regression, updates coefficients and variance
Parameters:
-----------
X: numpy array of size 'n x m'
Explanatory variables
Y: numpy array of size 'n x 1'
Target variable can take only values 0 or 1
weights: numpy array of size 'n x 1'
Weights for observations
|
625941bb7d847024c06be179
|
def message_post(self, cr, uid, thread_id, context=None, **kwargs): <NEW_LINE> <INDENT> partner_id = self._message_post_get_pid(cr, uid, thread_id, context=context) <NEW_LINE> return self.pool.get('res.partner').message_post(cr, uid, partner_id, context=context, **kwargs)
|
Redirect the posting of message on res.users to the related partner.
This is done because when giving the context of Chatter on the
various mailboxes, we do not have access to the current partner_id.
|
625941bbd10714528d5ffba0
|
@click.group(name='product') <NEW_LINE> def product_cli(): <NEW_LINE> <INDENT> pass
|
Product related commands.
|
625941bb1d351010ab8559dd
|
def init(t_nrow, t_ncol, t_nAction=4): <NEW_LINE> <INDENT> nState = t_nrow * t_ncol <NEW_LINE> Qtable = np.zeros((nState, t_nAction), dtype=np.float32) <NEW_LINE> States = np.empty((nState,2), np.int16) <NEW_LINE> for i in range(t_nrow): <NEW_LINE> <INDENT> for j in range(t_ncol): <NEW_LINE> <INDENT> index = i * t_ncol + j <NEW_LINE> States[index] = i, j <NEW_LINE> <DEDENT> <DEDENT> movements = np.array([[+1,0], [0,+1], [0,-1], [-1,0]], np.int16) <NEW_LINE> rewards = np.array([-100,-1000,+1000,+1], np.int16) <NEW_LINE> return Qtable, movements, States, rewards
|
| DOWN Right LEFT UP
--------|----------------------------------------
(0, 0) |
(0, 1) |
(., .) |
|
625941bb3eb6a72ae02ec395
|
def get_search_index(self, index): <NEW_LINE> <INDENT> raise NotImplementedError
|
Returns a yokozuna search index or None.
|
625941bb3346ee7daa2b2c2a
|
def launch_slave(self): <NEW_LINE> <INDENT> pass
|
For multi-node services, the master node is responsible for starting/stopping
all slave nodes. Launch slave is the method called when the master monitor
needs to start up a new slave.
|
625941bbab23a570cc250040
|
def load_value(self, review_request_details): <NEW_LINE> <INDENT> return review_request_details.extra_data.get(self.field_id)
|
Loads a value from the review request or draft.
By default, this loads the value as-is from the extra_data field.
This can be overridden if you need to deserialize the value in some
way.
This must use ``review_request_details`` instead of
``self.review_request_details``.
|
625941bbcdde0d52a9e52ef0
|
@pytest.fixture(scope='session') <NEW_LINE> def headers(): <NEW_LINE> <INDENT> return {'AUTHORIZATION': http_auth(username, password)}
|
Return headers valid for a test request.
|
625941bb5fc7496912cc3846
|
def test_package_is_uploaded_with_releases_using_cache(default_repo): <NEW_LINE> <INDENT> default_repo._releases_json_data = {"fake": {"0.1": [{"filename": "fake.whl"}]}} <NEW_LINE> package = pretend.stub( safe_name="fake", basefilename="fake.whl", metadata=pretend.stub(version="0.1"), ) <NEW_LINE> assert default_repo.package_is_uploaded(package) is True
|
Return True when the package is in the releases cache.
|
625941bb4e4d5625662d429d
|
def circle(x, y, center_x, center_y, radius): <NEW_LINE> <INDENT> r = np.sqrt((x - center_x)**2 + (y - center_y)**2) <NEW_LINE> circle_draw = np.zeros_like(r) <NEW_LINE> circle_draw[r < radius] = 1 <NEW_LINE> return circle_draw
|
uniform density circle
:param x: x-coordinates
:param y: y-coordinates
:param center_x: center of x-coordinates
:param center_y: center of y-coordinates
:param radius: radius of circle
:return:
|
625941bb01c39578d7e74d03
|
def startOfSequence(self): <NEW_LINE> <INDENT> pass
|
QXmlSerializer.startOfSequence()
|
625941bbec188e330fd5a666
|
def put(self, key, value): <NEW_LINE> <INDENT> if key in self.cache: <NEW_LINE> <INDENT> item = self.cache[key] <NEW_LINE> item.value = value <NEW_LINE> self.queue.remove(item) <NEW_LINE> self.queue.add_front(item) <NEW_LINE> return <NEW_LINE> <DEDENT> if len(self.cache) == self.capacity: <NEW_LINE> <INDENT> item_to_remove = self.queue.remove_last() <NEW_LINE> self.cache.pop(item_to_remove.key) <NEW_LINE> <DEDENT> item = Node(key, value) <NEW_LINE> self.cache[key] = item <NEW_LINE> self.queue.add_front(item)
|
:type key: int
:type value: int
:rtype: None
|
625941bb090684286d50eba2
|
def __init__(self, jobject=None, options=None): <NEW_LINE> <INDENT> if jobject is None: <NEW_LINE> <INDENT> jobject = DatabaseUtils.new_instance("weka.experiment.DatabaseUtils") <NEW_LINE> <DEDENT> self.enforce_type(jobject, "weka.experiment.DatabaseUtils") <NEW_LINE> super(DatabaseUtils, self).__init__(jobject=jobject, options=options)
|
Initializes a DatabaseUtils object from scratch or uses the provided JB_Object.
:param jobject: the JB_Object to use
:type jobject: JB_Object
:param options: the list of commandline options to use
:type options: list
|
625941bbd10714528d5ffba1
|
def reset_weights(self, W): <NEW_LINE> <INDENT> logger.fatal("not implemented yet") <NEW_LINE> return
|
reset the layer framework weight tensor
|
625941bb23849d37ff7b2f52
|
def last_event(self): <NEW_LINE> <INDENT> for attr_name in ('challenge2', 'response', 'challenge1', 'action'): <NEW_LINE> <INDENT> event = getattr(self, attr_name, None) <NEW_LINE> if event is not None: <NEW_LINE> <INDENT> return event <NEW_LINE> <DEDENT> <DEDENT> return None
|
Gets the last event that was done in this game
Return: Action or Response object that was done last, or None if N/A
|
625941bb1f037a2d8b9460c0
|
def __str__(self): <NEW_LINE> <INDENT> return yaml.dump(self.config, default_flow_style=False)
|
returns the config object as a string.
|
625941bb167d2b6e31218a57
|
@subscriber(InternalServerError) <NEW_LINE> def log_internal_server_error(event: InternalServerError): <NEW_LINE> <INDENT> request = event.request <NEW_LINE> user_context = get_logging_user_context(event.request) <NEW_LINE> request.raven.user_context(user_context) <NEW_LINE> request.raven.captureException()
|
Catch 500 errors and send them to Sentry with additional details.
|
625941bb099cdd3c635f0b1d
|
def __init__(self, data_dir, receptive_fields, sample_size=0, sample_rate=8000, in_channels=256, batch_size=1, shuffle=True): <NEW_LINE> <INDENT> dataset = Dataset(data_dir, sample_rate, in_channels) <NEW_LINE> super(DataLoader, self).__init__(dataset, batch_size, shuffle) <NEW_LINE> if sample_size <= receptive_fields: <NEW_LINE> <INDENT> raise Exception("sample_size has to be bigger than receptive_fields") <NEW_LINE> <DEDENT> self.sample_size = sample_size <NEW_LINE> self.receptive_fields = receptive_fields <NEW_LINE> self.collate_fn = self._collate_fn
|
DataLoader for WaveNet
:param data_dir:
:param receptive_fields: integer. size(length) of receptive fields
:param sample_size: integer. number of timesteps to train at once.
sample size has to be bigger than receptive fields.
|-- receptive field --|---------------------|
|------- samples -------------------|
|---------------------|-- outputs --|
:param sample_rate: sound sampling rates
:param in_channels: number of input channels
:param batch_size:
:param shuffle:
|
625941bb7b25080760e3931b
|
def test_edit_categories_no_disclaimer(self): <NEW_LINE> <INDENT> response = self.client.get(self.describe_edit_url) <NEW_LINE> doc = pq(response.content) <NEW_LINE> assert doc('#addon-categories-edit div.addon-app-cats').length == 1 <NEW_LINE> assert doc('#addon-categories-edit > p').length == 0
|
Ensure that there is a not disclaimer for non-creatured add-ons.
|
625941bbc4546d3d9de728f2
|
def linear_search(test_list,search_value): <NEW_LINE> <INDENT> test_len=len(test_list) <NEW_LINE> for index in range(0,test_len): <NEW_LINE> <INDENT> if(test_list[index]==search_value): <NEW_LINE> <INDENT> return(index) <NEW_LINE> <DEDENT> <DEDENT> print("List doesn't contain the value")
|
Performs a simple linear search
|
625941bbd58c6744b4257b22
|
def _element_constructor_(self, *args, **kwds): <NEW_LINE> <INDENT> from train_track.free_group import FreeGroup_class <NEW_LINE> if len(args)!=1: <NEW_LINE> <INDENT> return self.element_class(self, *args, **kwds) <NEW_LINE> <DEDENT> x = args[0] <NEW_LINE> if x==1 or x == [] or x == (): <NEW_LINE> <INDENT> return self.one() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> P = x.parent() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return self.element_class(self, x, **kwds) <NEW_LINE> <DEDENT> if isinstance(P, FreeGroup_class): <NEW_LINE> <INDENT> names = set(P._names[abs(i)-1] for i in x.Tietze()) <NEW_LINE> if names.issubset(self._names): <NEW_LINE> <INDENT> return self([i.sign()*(self._names.index(P._names[abs(i)-1])+1) for i in x.Tietze()]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('generators of %s not in the group'%x) <NEW_LINE> <DEDENT> <DEDENT> return self.element_class(self, x, **kwds)
|
TESTS::
sage: from train_track import *
sage: G.<a,b> = FreeGroup()
sage: G([1, 2, 1]) # indirect doctest
a*b*a
sage: G([1, 2, -2, 1, 1, -2]) # indirect doctest
a^3*b^-1
sage: G( G.gap().GeneratorsOfGroup()[0] )
a
sage: type(_)
<class 'train_track.free_group.FreeGroup_class_with_category.element_class'>
Check that conversion between free groups follow the convention that
names are preserved::
sage: F = FreeGroup('a,b')
sage: G = FreeGroup('b,a')
sage: G(F.gen(0))
a
sage: F(G.gen(0))
b
sage: a,b = F.gens()
sage: G(a^2*b^-3*a^-1)
a^2*b^-3*a^-1
Check that :trac:`17246` is fixed::
sage: F = FreeGroup(0)
sage: F([])
1
Check that 0 isn't considered the identity::
sage: F = FreeGroup('x')
sage: F(0)
Traceback (most recent call last):
...
TypeError: 'sage.rings.integer.Integer' object is not iterable
|
625941bbd18da76e23532393
|
def __init__(self): <NEW_LINE> <INDENT> simulation_manager.SimulationManager.__init__(self)
|
Constructor
|
625941bb6e29344779a624d6
|
def verifyPassword_async(self, _cb, name, pw, current=None): <NEW_LINE> <INDENT> pass
|
Verify the password of a user. You can use this to verify a user's credentials.
Arguments:
name User name. See RegisteredUser.name.
pw User password.
Returns:
User ID of registered user (See RegisteredUser.userid), -1 for failed authentication or -2 for unknown usernames.
|
625941bb85dfad0860c3ad1a
|
def squareAt(self, point, orientation=None): <NEW_LINE> <INDENT> sqSize = self.squareSize <NEW_LINE> x = point.x() // sqSize.width() <NEW_LINE> y = point.y() // sqSize.height() <NEW_LINE> if (x < 0) or (x > 13) or (y < 0) or (y > 13): <NEW_LINE> <INDENT> return QPoint() <NEW_LINE> <DEDENT> elif orientation == 'b': <NEW_LINE> <INDENT> return QPoint(self.board.files - (y + 1), self.board.ranks - (x + 1)) <NEW_LINE> <DEDENT> elif orientation == 'y': <NEW_LINE> <INDENT> return QPoint(self.board.files - (x + 1), y) <NEW_LINE> <DEDENT> elif orientation == 'g': <NEW_LINE> <INDENT> return QPoint(y, x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return QPoint(x, self.board.ranks - (y + 1))
|
Returns square (file, rank) of type QPoint that contains point.
|
625941bba05bb46b383ec6e5
|
def __init__(self, data_pin, clock_pin, latch_pin, L0_pin, L1_pin, L2_pin, L3_pin, debug=True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.data = data_pin <NEW_LINE> self.clock = clock_pin <NEW_LINE> self.latch = latch_pin <NEW_LINE> self.Lv = [L0_pin, L1_pin, L2_pin, L3_pin] <NEW_LINE> self.da = None <NEW_LINE> self.cl = None <NEW_LINE> self.la = None <NEW_LINE> self.lv = [None, None, None, None] <NEW_LINE> self.debug = debug <NEW_LINE> <DEDENT> except Exception as E: <NEW_LINE> <INDENT> if self.debug: <NEW_LINE> <INDENT> print("4x4x4 LED Cube __init__ error: ",E)
|
constructor 4x4x4 LED cube
|
625941bb4f88993c3716bf2d
|
def _convert_dataset(image_list, layer_list, bboxes_list, tfrecord_dir): <NEW_LINE> <INDENT> with tf.Graph().as_default(): <NEW_LINE> <INDENT> with tf.Session() as sess: <NEW_LINE> <INDENT> if not os.path.exists(tfrecord_dir): <NEW_LINE> <INDENT> os.makedirs(tfrecord_dir) <NEW_LINE> <DEDENT> output_filename = os.path.join(tfrecord_dir, "train.tfrecord") <NEW_LINE> tfrecord_writer = tf.python_io.TFRecordWriter(output_filename) <NEW_LINE> length = len(image_list) <NEW_LINE> for i in range(length): <NEW_LINE> <INDENT> image_data = Image.open(image_list[i], 'r') <NEW_LINE> image_data = image_data.tobytes() <NEW_LINE> label = layer_list[i] <NEW_LINE> bboxes = bboxes_list[i] <NEW_LINE> example = image_to_tfexample(image_data, label, bboxes) <NEW_LINE> tfrecord_writer.write(example.SerializeToString()) <NEW_LINE> sys.stdout.write('\r>> Converting image %d/%d' % (i + 1, length)) <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> sys.stdout.write('\n') <NEW_LINE> sys.stdout.flush()
|
Convert data to TFRecord format.
|
625941bbcad5886f8bd26ea3
|
def GetComment(self): <NEW_LINE> <INDENT> return _ITKCommonBasePython.itkSimpleFilterWatcher_GetComment(self)
|
GetComment(self) -> string
|
625941bb711fe17d82542232
|
def transform_ner_from_standoff(file): <NEW_LINE> <INDENT> command_line = ''.join(['./' + dirs['standoff2other_path']['path'] + 'standoff2conll.py ', dirs['annotated']['path'], ' ', ' > ' + dirs['transformed']['path'] + 'documents.tsv']) <NEW_LINE> os.system(command_line)
|
Entry point for data transformation.
Entry point as to transform annotated Brat files (standoff format) to
Stanford's NER training format.
While the library is executed externally given python3 vs. python2 compatibility issues,
note that the library version of the entry point call is left commented below for future reference.
Args:
file: A string with the directory to be transformed.
|
625941bb9c8ee82313fbb635
|
def Write( self, Channel, MessageBuffer): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = self.__m_dllBasic.CAN_Write(Channel, byref(MessageBuffer)) <NEW_LINE> return TPCANStatus(res) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("Exception on PCANBasic.Write") <NEW_LINE> raise
|
Transmits a CAN message
Parameters:
Channel : A TPCANHandle representing a PCAN Channel
MessageBuffer: A TPCANMsg representing the CAN message to be sent
Returns:
A TPCANStatus error code
|
625941bb004d5f362079a1f7
|
def state_to_cookie(state, name, path, encryption_key): <NEW_LINE> <INDENT> cookie_data = "" if state.delete else state.urlstate(encryption_key) <NEW_LINE> max_age = 0 if state.delete else STATE_COOKIE_MAX_AGE <NEW_LINE> satosa_logging(logger, logging.DEBUG, "Saving state as cookie, secure: %s, max-age: %s, path: %s" % (STATE_COOKIE_SECURE, STATE_COOKIE_MAX_AGE, path), state) <NEW_LINE> cookie = SimpleCookie() <NEW_LINE> cookie[name] = cookie_data <NEW_LINE> cookie[name]["secure"] = STATE_COOKIE_SECURE <NEW_LINE> cookie[name]["path"] = path <NEW_LINE> cookie[name]["max-age"] = max_age <NEW_LINE> return cookie
|
Saves a state to a cookie
:type state: satosa.state.State
:type name: str
:type path: str
:type encryption_key: str
:rtype: http.cookies.SimpleCookie
:param state: The state to save
:param name: Name identifier of the cookie
:param path: Endpoint path the cookie will be associated to
:param encryption_key: Key to encrypt the state information
:return: A cookie
|
625941bbbe8e80087fb20b09
|
def get_debug_info_bytes(self): <NEW_LINE> <INDENT> return self.debug_info.file.getvalue()
|
Get the .debug_info bytes as a python string.
|
625941bb5510c4643540f2ad
|
def index(request): <NEW_LINE> <INDENT> if request.user.is_authenticated: <NEW_LINE> <INDENT> return redirect('post:list') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return redirect('member:signup')
|
유저가 로그인했을 경우 post:list로 이동
로그인하지 않았을 경우 member:signup으로 이동
테스트 작성
1. index URL로 접근했을 때, 로그인 하지 않앗을 경우 member:signup으로 가는지 확인
2. 위와 같은데 로그인 했을 경우 post:list로 가는지 확인
|
625941bb5166f23b2e1a501a
|
def generate_list_embeddings(self): <NEW_LINE> <INDENT> with open(self.path_model + self.name_dataset + "_model.lf", "rb") as file_model: <NEW_LINE> <INDENT> dict_model = pickle.load(file_model) <NEW_LINE> <DEDENT> self.w2v = dict_model["word2vec"] <NEW_LINE> self.dict_issue = {} <NEW_LINE> try: <NEW_LINE> <INDENT> self.list_chunck <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> nb_files_per_chunck = int(len(self.list_classes) / (os.cpu_count() * 2)) <NEW_LINE> if nb_files_per_chunck < 1: <NEW_LINE> <INDENT> nb_files_per_chunck = 2 <NEW_LINE> <DEDENT> self.list_chunck = [self.list_classes[i:i + nb_files_per_chunck] for i in range(0, len(self.list_classes), nb_files_per_chunck)] <NEW_LINE> <DEDENT> self.list_vocab = [] <NEW_LINE> for word in self.w2v.vocab: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> int(word) <NEW_LINE> self.list_vocab.append(int(word)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> new_list_classes = [] <NEW_LINE> with Pool(os.cpu_count()) as mp: <NEW_LINE> <INDENT> for res in tqdm(mp.imap(Embedding.clear_list, zip(self.list_chunck, repeat(self.list_vocab))), total=len(self.list_chunck)): <NEW_LINE> <INDENT> new_list_classes += res.tolist() <NEW_LINE> <DEDENT> <DEDENT> logger.info(str(len(self.list_classes) - len(new_list_classes)) + " elements are removed due to not enought examples") <NEW_LINE> self.list_classes = new_list_classes <NEW_LINE> f = h5py.File(self.path_data + self.name_dataset + "_embedding.lf", "w") <NEW_LINE> f.create_dataset('list_classes', data=self.list_classes) <NEW_LINE> f.close()
|
Filter the list of patterns according to the learned embeddings. The word2vec model requires at least a minimum of examples per word to be learned. We remove the words excluded of the word2vec learning.
|
625941bb55399d3f05588574
|
def get_safe_name(string): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for c in string: <NEW_LINE> <INDENT> if c.isalnum(): <NEW_LINE> <INDENT> result.append(c) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append("_") <NEW_LINE> <DEDENT> <DEDENT> return "".join(result)
|
Creates C++ identifier from string
|
625941bb23849d37ff7b2f53
|
def setup_database(): <NEW_LINE> <INDENT> with sqlite3.connect(DB_STRING) as c: <NEW_LINE> <INDENT> c.execute("CREATE TABLE user_string (session_id, value)")
|
Create the `user_string` table in the database
on server startup
|
625941bb31939e2706e4cd30
|
def sim_distance(self, p1, p2): <NEW_LINE> <INDENT> si = {} <NEW_LINE> for item in self.train[p1]: <NEW_LINE> <INDENT> if item in self.train[p2]: <NEW_LINE> <INDENT> si[item] = 1 <NEW_LINE> <DEDENT> <DEDENT> if len(si) == 0: return 0 <NEW_LINE> sum_of_squares = sum([pow(self.train[p1][item] - self.train[p2][item], 2) for item in self.train[p1] if item in self.train[p2]]) <NEW_LINE> return 1 / (1 + sqrt(sum_of_squares))
|
欧几里得距离计算相似度
|
625941bb3617ad0b5ed67db9
|
def forward(self, input: torch.Tensor, target: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: <NEW_LINE> <INDENT> if mask is not None: <NEW_LINE> <INDENT> if input.dim() != mask.dim(): <NEW_LINE> <INDENT> raise AssertionError(f"dim of input ({input.shape}) is different from mask ({mask.shape})") <NEW_LINE> <DEDENT> if not (input.shape[0] == mask.shape[0] or mask.shape[0] == 1): <NEW_LINE> <INDENT> raise AssertionError(f" batch size of mask ({mask.shape}) must be 1 or equal to input ({input.shape})") <NEW_LINE> <DEDENT> if target.dim() > 1: <NEW_LINE> <INDENT> if mask.shape[1] != 1: <NEW_LINE> <INDENT> raise AssertionError(f"mask ({mask.shape}) must have only 1 channel") <NEW_LINE> <DEDENT> if input.shape[2:] != mask.shape[2:]: <NEW_LINE> <INDENT> raise AssertionError(f"spatial size of input ({input.shape}) is different from mask ({mask.shape})") <NEW_LINE> <DEDENT> <DEDENT> input = input * mask <NEW_LINE> target = target * mask <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> warnings.warn("no mask value specified for the MaskedDiceLoss.") <NEW_LINE> <DEDENT> return super().forward(input=input, target=target)
|
Args:
input: the shape should be BNH[WD].
target: the shape should be BNH[WD].
mask: the shape should B1H[WD] or 11H[WD].
|
625941bb377c676e9127206b
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.