code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
def create_files(dir_path, m): <NEW_LINE> <INDENT> has_tests = False <NEW_LINE> for fn in m.get_value('test/files'): <NEW_LINE> <INDENT> path = join(m.path, fn) <NEW_LINE> if isdir(path): <NEW_LINE> <INDENT> shutil.copytree(path, join(dir_path, fn)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shutil.copy(path, dir_path) <NEW_LINE> <DEDENT> <DEDENT> with open(join(dir_path, 'run_test.py'), 'w') as fo: <NEW_LINE> <INDENT> fo.write("# tests for %s (this is a generated file)\n" % m.dist()) <NEW_LINE> fo.write("print('===== testing package: %s =====')\n" % m.dist()) <NEW_LINE> with open(join(dirname(__file__), 'header_test.py')) as fi: <NEW_LINE> <INDENT> fo.write(fi.read() + '\n') <NEW_LINE> <DEDENT> for cmd in m.get_value('test/commands'): <NEW_LINE> <INDENT> fo.write('print(%r)\n'% ("command: %r" % cmd)) <NEW_LINE> fo.write('call_args(%r)\n\n' % cmd) <NEW_LINE> has_tests = True <NEW_LINE> <DEDENT> for name in m.get_value('test/imports'): <NEW_LINE> <INDENT> fo.write('print("import: %r")\n' % name) <NEW_LINE> fo.write('import %s\n' % name) <NEW_LINE> fo.write('\n') <NEW_LINE> has_tests = True <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with open(join(m.path, 'run_test.py')) as fi: <NEW_LINE> <INDENT> fo.write("# --- run_test.py (begin) ---\n") <NEW_LINE> fo.write(fi.read()) <NEW_LINE> fo.write("# --- run_test.py (end) ---\n") <NEW_LINE> <DEDENT> has_tests = True <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> fo.write("# no run_test.py exists for this package\n") <NEW_LINE> <DEDENT> fo.write("\nprint('===== %s OK =====')\n" % m.dist()) <NEW_LINE> <DEDENT> return has_tests
|
Create the test files for pkg in the directory given. The resulting
test files are configuration (i.e. platform, architecture, Python and
numpy version, CE/Pro) independent.
Return False, if the package has no tests (for any configuration), and
True if it has.
|
625941bc7b180e01f3dc46dd
|
def gen_logger(logger_name, fName, logger_obj, disable_formatting=False): <NEW_LINE> <INDENT> if not isinstance(logger_obj, list): <NEW_LINE> <INDENT> logger_obj = [logger_obj] <NEW_LINE> <DEDENT> if logger_obj[0]: <NEW_LINE> <INDENT> handlers = logger_obj[0].handlers <NEW_LINE> if handlers and isinstance(handlers, (list, tuple)): <NEW_LINE> <INDENT> handler = handlers[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> handler = handlers <NEW_LINE> <DEDENT> if datetime.datetime.today().strftime("%d%m%Y") in handler.baseFilename: <NEW_LINE> <INDENT> return logger_obj[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger_obj[0].handlers = [] <NEW_LINE> <DEDENT> <DEDENT> logger_obj[0] = getLogger(logger_name, fName, logging.DEBUG, disable_formatting) <NEW_LINE> return logger_obj[0]
|
Method to create log file with today's date in file name.
|
625941bc7cff6e4e8111785f
|
def synthesize_text(self, text, audioFile,): <NEW_LINE> <INDENT> input_text = texttospeech.SynthesisInput(text=text) <NEW_LINE> response = self.client.synthesize_speech( request={"input": input_text, "voice": self.voice, "audio_config": self.audio_config} ) <NEW_LINE> with open(audioFile, "wb") as out: <NEW_LINE> <INDENT> out.write(response.audio_content) <NEW_LINE> print('text %s Audio content written to file %s' % (text, audioFile))
|
Synthesizes speech from the input string of text.
|
625941bc5510c4643540f2c6
|
def strictly_decreasing(values): <NEW_LINE> <INDENT> return all(x > y for x, y in zip(values, values[1:]))
|
True if values are stricly decreasing.
|
625941bccc40096d6159582b
|
def group_blocks(self, stream): <NEW_LINE> <INDENT> return stream
|
Default for files that do not have multiple groups.
|
625941bc3d592f4c4ed1cf55
|
def test_init_valid(self): <NEW_LINE> <INDENT> lang = Lang() <NEW_LINE> assert lang.language == 'en' <NEW_LINE> assert lang.config_intro
|
Tests init with a valid language.
Success conditions :
- self.language == en
- self.config_intro exists and is not empty
|
625941bc82261d6c526ab37b
|
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, APICounterV2Out): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__
|
Returns true if both objects are equal
|
625941bcaad79263cf390916
|
def update_test_execution(self, test_exec_key, add=None, remove=None): <NEW_LINE> <INDENT> if add is None: <NEW_LINE> <INDENT> add = [] <NEW_LINE> <DEDENT> if remove is None: <NEW_LINE> <INDENT> remove = [] <NEW_LINE> <DEDENT> update = {"add": add, "remove": remove} <NEW_LINE> url = "rest/raven/1.0/api/testexec/{0}/test".format(test_exec_key) <NEW_LINE> return self.post(url, update)
|
Associate tests with the given test execution.
:param test_exec_key: Test execution key (eg. 'EXEC-001').
:param add: OPTIONAL: List of Test Keys to associate with the test execution (eg. ['TEST-2', 'TEST-3'])
:param remove: OPTIONAL:
List of Test Keys no longer associate with the test execution (eg. ['TEST-4', 'TEST-5'])
:return:
|
625941bc851cf427c661a3ec
|
def quat_from_matrix(matrix): <NEW_LINE> <INDENT> fourXSquaredMinus1 = matrix.matrix[0][0] - matrix.matrix[1][1] - matrix.matrix[2][2] <NEW_LINE> fourYSquaredMinus1 = matrix.matrix[1][1] - matrix.matrix[0][0] - matrix.matrix[2][2] <NEW_LINE> fourZSquaredMinus1 = matrix.matrix[2][2] - matrix.matrix[0][0] - matrix.matrix[1][1] <NEW_LINE> fourWSquaredMinus1 = matrix.matrix[0][0] + matrix.matrix[1][1] + matrix.matrix[2][2] <NEW_LINE> biggestIndex = 0 <NEW_LINE> fourBiggestSquaredMinus1 = fourWSquaredMinus1 <NEW_LINE> if (fourXSquaredMinus1 > fourBiggestSquaredMinus1): <NEW_LINE> <INDENT> biggestIndex = 1 <NEW_LINE> <DEDENT> elif(fourYSquaredMinus1 > fourBiggestSquaredMinus1): <NEW_LINE> <INDENT> biggestIndex = 2 <NEW_LINE> <DEDENT> elif(fourZSquaredMinus1 > fourBiggestSquaredMinus1): <NEW_LINE> <INDENT> biggestIndex = 3 <NEW_LINE> <DEDENT> biggestVal = math.sqrt(fourBiggestSquaredMinus1 + 1) * 0.5 <NEW_LINE> mult = 0.25 / biggestVal <NEW_LINE> rquat = Quaternion() <NEW_LINE> if biggestIndex is 0: <NEW_LINE> <INDENT> rquat.data[0] = biggestVal <NEW_LINE> rquat.data[1] = (matrix.matrix[1][2] - matrix.matrix[2][1]) * mult <NEW_LINE> rquat.data[2] = (matrix.matrix[2][0] - matrix.matrix[0][2]) * mult <NEW_LINE> rquat.data[3] = (matrix.matrix[0][1] - matrix.matrix[1][0]) * mult <NEW_LINE> return rquat <NEW_LINE> <DEDENT> if biggestIndex is 1: <NEW_LINE> <INDENT> rquat.data[0] = (matrix.matrix[1][2] - matrix.matrix[2][1]) * mult <NEW_LINE> rquat.data[1] = biggestVal <NEW_LINE> rquat.data[2] = (matrix.matrix[0][1] + matrix.matrix[1][0]) * mult <NEW_LINE> rquat.data[3] = (matrix.matrix[2][0] + matrix.matrix[0][2]) * mult <NEW_LINE> return rquat <NEW_LINE> <DEDENT> if biggestIndex is 2: <NEW_LINE> <INDENT> rquat.data[0] = (matrix.matrix[2][0] - matrix.matrix[0][2]) * mult <NEW_LINE> rquat.data[1] = (matrix.matrix[0][1] + matrix.matrix[1][0]) * mult <NEW_LINE> rquat.data[2] = biggestVal <NEW_LINE> rquat.data[3] = (matrix.matrix[1][2] + matrix.matrix[2][1]) * mult <NEW_LINE> return rquat <NEW_LINE> <DEDENT> if biggestIndex is 3: <NEW_LINE> <INDENT> rquat.data[0] = (matrix.matrix[0][1] - matrix.matrix[1][0]) * mult <NEW_LINE> rquat.data[1] = (matrix.matrix[2][0] + matrix.matrix[0][2]) * mult <NEW_LINE> rquat.data[2] = (matrix.matrix[1][2] + matrix.matrix[2][1]) * mult <NEW_LINE> rquat.data[3] = biggestVal <NEW_LINE> return rquat
|
Converts a 4x4 rotational matrix to quaternion.
|
625941bcf8510a7c17cf95d4
|
def test_add_to_src(): <NEW_LINE> <INDENT> ipf = ilf.Ip4Filter() <NEW_LINE> ipf.add(0, ['10/8'], ['11/8'], ['80-81/tcp'], 'deny', 'NoGo', None) <NEW_LINE> ipf.add(0, ['9/8'], [], []) <NEW_LINE> assert len(ipf) == 1 <NEW_LINE> dct = ipf.as_dict <NEW_LINE> assert '9.0.0.0/8' in dct[0]['src'] <NEW_LINE> assert '10.0.0.0/8' in dct[0]['src']
|
add to src only
|
625941bc60cbc95b062c6423
|
def get_paths(root_dir, allowed_exts={'.png', '.jpg', '.gif'}): <NEW_LINE> <INDENT> return_paths = [] <NEW_LINE> for root, dirs, files in os.walk(root_dir): <NEW_LINE> <INDENT> valid_files = [ f for f in files if os.path.splitext(f)[1].lower() in allowed_exts ] <NEW_LINE> return_paths.extend([os.path.join(root, f) for f in valid_files]) <NEW_LINE> <DEDENT> return return_paths
|
Recursively walk root_dir to find paths to all files with an allowed
extension (e.g. to find all images in a directory).
|
625941bc596a8972360899a3
|
def test_format_pair(): <NEW_LINE> <INDENT> assert bitmex.format_pair("xbt-usd") == "XBTUSD"
|
test string formating to match API expectations
|
625941bc73bcbd0ca4b2bf57
|
def get(self, entity_id): <NEW_LINE> <INDENT> post = PostModel.get(entity_id) <NEW_LINE> if not post: <NEW_LINE> <INDENT> return redirect(url_for('Main:index')) <NEW_LINE> <DEDENT> comment_form = None <NEW_LINE> if current_user.is_authenticated: <NEW_LINE> <INDENT> comment_form = CommentForm() <NEW_LINE> <DEDENT> return render_template("post/post.html", post=post, comment_form=comment_form)
|
Retrieve a post, if not exits redirect to home,
and if authenticated render a comment form.
|
625941bc6aa9bd52df036c7c
|
def ends_bs(string): <NEW_LINE> <INDENT> return string.endswith('\\')
|
Return True if 'string' ends with a backslash, and False otherwise.
Define this as a named function for no other reason than that pep8
now forbids binding of a lambda expression to a name:
'E731 do not assign a lambda expression, use a def'
|
625941bce1aae11d1e749b8e
|
def generate_redis_protocal(*args): <NEW_LINE> <INDENT> proto = '' <NEW_LINE> proto += '*{0}\r\n'.format(str(len(args))) <NEW_LINE> for arg in args: <NEW_LINE> <INDENT> arg = str(arg) <NEW_LINE> proto += '${0}\r\t'.format(str(len(arg))) <NEW_LINE> proto += '{0}\r\n'.format(str(arg)) <NEW_LINE> <DEDENT> return proto
|
Helper function generates Redis Protocal
|
625941bcc4546d3d9de7290b
|
def checkAwarded(self): <NEW_LINE> <INDENT> pass
|
Check if the Rating should be awarded
|
625941bca8370b771705277a
|
def test_rebuild_server_no_wait(self): <NEW_LINE> <INDENT> with patch("shade.OpenStackCloud"): <NEW_LINE> <INDENT> mock_server = Mock(status="ACTIVE") <NEW_LINE> config = { "servers.rebuild.return_value": mock_server } <NEW_LINE> OpenStackCloud.nova_client = Mock(**config) <NEW_LINE> self.assertEqual( self.client.rebuild_server("a", "b"), mock_server)
|
Test that rebuild_server with no wait and no exception in the
novaclient rebuild call returns the server instance.
|
625941bc55399d3f0558858d
|
def summary(self): <NEW_LINE> <INDENT> import warnings <NEW_LINE> start = None <NEW_LINE> stop = None <NEW_LINE> i = 0 <NEW_LINE> it_pair = zip(self.seq_a, self.seq_b) <NEW_LINE> while True: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> try: <NEW_LINE> <INDENT> p = next(it_pair) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if p is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if start is None: <NEW_LINE> <INDENT> if '-' not in p: <NEW_LINE> <INDENT> start = i <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if '-' not in p: <NEW_LINE> <INDENT> stop = i <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.start = start <NEW_LINE> self.stop = stop <NEW_LINE> if start is None and stop is None: <NEW_LINE> <INDENT> warnings.warn('The two sequences do not align') <NEW_LINE> return <NEW_LINE> <DEDENT> self.insertions = 0 <NEW_LINE> self.deletions = 0 <NEW_LINE> self.ident = 0 <NEW_LINE> self.mismatches = 0 <NEW_LINE> it_pair = zip(self.seq_a[start - 1:stop], self.seq_b[start - 1:stop]) <NEW_LINE> both = self.seq_a + self.seq_b <NEW_LINE> both = both.replace('-', '').replace('*', '') <NEW_LINE> nt_count = sum([both.upper().count(nt) for nt in ['A', 'C', 'G', 'T']]) <NEW_LINE> if nt_count > 0.75 * len(both): <NEW_LINE> <INDENT> seq_type = 'dna' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> seq_type = 'aa' <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> try: <NEW_LINE> <INDENT> p = next(it_pair) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if p is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if p == ('-', '-'): <NEW_LINE> <INDENT> print(' double gap in %s %s' % (self.id_a, self.id_b), file=sys.stderr) <NEW_LINE> <DEDENT> if p[0] == '-': <NEW_LINE> <INDENT> self.insertions += 1 <NEW_LINE> <DEDENT> elif p[1] == '-': <NEW_LINE> <INDENT> self.deletions += 1 <NEW_LINE> <DEDENT> if p[0].upper() == p[1].upper(): <NEW_LINE> <INDENT> self.ident += 1 <NEW_LINE> <DEDENT> if seq_type == 'dna' and (dna_code[p[0].upper()] & dna_code[p[1].upper()] == set([])): <NEW_LINE> <INDENT> self.mismatches += 1 <NEW_LINE> <DEDENT> elif seq_type == 'aa' and (p[0].upper() != p[1].upper()): <NEW_LINE> <INDENT> self.mismatches += 1 <NEW_LINE> <DEDENT> <DEDENT> return
|
Summary must be called to calculate start, stop, internal gaps, and identity.
|
625941bc187af65679ca4ff8
|
def get(self, param, default=None): <NEW_LINE> <INDENT> return default
|
Returns a property of this instance
|
625941bc5e10d32532c5ee01
|
def add_new(self, block_type, **kwargs): <NEW_LINE> <INDENT> if isinstance(block_type, type) and issubclass(block_type, Block) and hasattr(block_type, "_type"): <NEW_LINE> <INDENT> block_type = block_type._type <NEW_LINE> <DEDENT> elif not isinstance(block_type, str): <NEW_LINE> <INDENT> raise Exception("block_type must be a string or a Block subclass with a _type attribute") <NEW_LINE> <DEDENT> block_id = self._client.create_record(table="block", parent=self._parent, type=block_type) <NEW_LINE> block = self._get_block(block_id) <NEW_LINE> if kwargs: <NEW_LINE> <INDENT> with self._client.as_atomic_transaction(): <NEW_LINE> <INDENT> for key, val in kwargs.items(): <NEW_LINE> <INDENT> if hasattr(block, key): <NEW_LINE> <INDENT> setattr(block, key, val) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.warning("{} does not have attribute '{}' to be set; skipping.".format(block, key)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return block
|
Create a new block, add it as the last child of this parent block, and return the corresponding Block instance.
`block_type` can be either a type string, or a Block subclass.
|
625941bc24f1403a92600a43
|
def BoundCaller(function, *params): <NEW_LINE> <INDENT> def wrapped(*args): <NEW_LINE> <INDENT> function( *(params+args) ) <NEW_LINE> <DEDENT> return wrapped
|
Wrap a function with its initial arguments.
|
625941bc76d4e153a657ea0a
|
def _return_retry_timer(self): <NEW_LINE> <INDENT> msg = 'Minion return retry timer set to %s seconds' <NEW_LINE> if self.opts.get('return_retry_timer_max'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> random_retry = randint(self.opts['return_retry_timer'], self.opts['return_retry_timer_max']) <NEW_LINE> retry_msg = msg % random_retry <NEW_LINE> log.debug('%s (randomized)', msg % random_retry) <NEW_LINE> return random_retry <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> log.error( 'Invalid value (return_retry_timer: %s or ' 'return_retry_timer_max: %s). Both must be positive ' 'integers.', self.opts['return_retry_timer'], self.opts['return_retry_timer_max'], ) <NEW_LINE> log.debug(msg, DEFAULT_MINION_OPTS['return_retry_timer']) <NEW_LINE> return DEFAULT_MINION_OPTS['return_retry_timer'] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log.debug(msg, self.opts.get('return_retry_timer')) <NEW_LINE> return self.opts.get('return_retry_timer')
|
Based on the minion configuration, either return a randomized timer or
just return the value of the return_retry_timer.
|
625941bcd99f1b3c44c6746f
|
def get_n_keys(self, timestamp): <NEW_LINE> <INDENT> if timestamp in self._buffer: <NEW_LINE> <INDENT> return len(self._buffer[timestamp]) <NEW_LINE> <DEDENT> return 0
|
Get the number of keys for a given timestamp.
:param timestamp:
the time stamp to check if there's still keys to transmit
|
625941bcd10714528d5ffbba
|
def delete_whitespace(self, word): <NEW_LINE> <INDENT> new_word = "" <NEW_LINE> for w in word: <NEW_LINE> <INDENT> if w != " ": <NEW_LINE> <INDENT> new_word += w <NEW_LINE> <DEDENT> <DEDENT> return new_word
|
deletes whitespaces in strings
:param word:
:return:
|
625941bc4527f215b584c335
|
def comment(self, what, content, parent=None): <NEW_LINE> <INDENT> from . import Answer, Article, Question, Collection, Comment, Pin <NEW_LINE> data = {'content': content} <NEW_LINE> if parent is not None: <NEW_LINE> <INDENT> if not isinstance(parent, Comment): <NEW_LINE> <INDENT> raise TypeError( 'parent comment must be Comment object, {0} given.'.format( parent.__class__.__name__)) <NEW_LINE> <DEDENT> data.update(comment_id=parent.id) <NEW_LINE> <DEDENT> if isinstance(what, (Answer, Article, Collection, Question, Pin)): <NEW_LINE> <INDENT> data.update({'type': what.__class__.__name__.lower(), 'resource_id': what.id}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('Can\'t add comment to a {0}.'.format( what.__class__.__name__)) <NEW_LINE> <DEDENT> res = self._session.post(SEND_COMMENT_URL, data=data) <NEW_LINE> return get_result_or_error(SEND_COMMENT_URL, res)
|
向答案,文章,问题,收藏夹,Pin 发送评论
.. seealso::
返回值和可能的异常同 :any:`vote` 方法
.. warning:: 奇怪
让我很诧异的是,就算「想要回复的评论」不属于「想要评论的主体」,
知乎的 API 也会返回执行成功。而且经过测试,这条回复真的有效,
会出现在评论主体的评论列表里。暂时不知道被评论用户的会不会收到消息。
另外,莫名其妙的还可以回复自己的评论……
:param what: 向哪里发送评论,可以是 :any:`Answer`, :any:`Article`
:any:`Question`, :any:`Collection`, :any:`Pin`
:param str|unicode content: 评论内容
:param Comment parent: 想要回复的评论,默认值为 None,则为正常的添加评论
|
625941bc7cff6e4e81117860
|
def __init__(self, driver): <NEW_LINE> <INDENT> self.driver = driver
|
Base Page Class. Superclass that all pages should inherit from. Holds
a number of helper methods that will simplify finding page elements
|
625941bce1aae11d1e749b8f
|
def __init__(self, notes, notes_on_page, page_number, previous_element=None): <NEW_LINE> <INDENT> self.notes = notes <NEW_LINE> self.note_count = notes_on_page <NEW_LINE> self.page_number = page_number <NEW_LINE> self.previous_element = previous_element <NEW_LINE> self.next_element = None
|
:param notes:
:param notes_on_page:
:param page_number:
:param previous_element:
|
625941bcbde94217f3682cd6
|
def lua_to_dict(lua_file): <NEW_LINE> <INDENT> md5_info = {} <NEW_LINE> with open(lua_file) as f: <NEW_LINE> <INDENT> lines = f.readlines() <NEW_LINE> <DEDENT> for each_line in lines: <NEW_LINE> <INDENT> if 'md5' in each_line: <NEW_LINE> <INDENT> parts = each_line.split('"') <NEW_LINE> filename = parts[1] <NEW_LINE> md5_value = parts[3] <NEW_LINE> if filename in md5_info: <NEW_LINE> <INDENT> print('[ERROR] {} found more than once in {}'.format(filename, lua_file)) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> md5_info[filename] = md5_value <NEW_LINE> <DEDENT> <DEDENT> return md5_info
|
将lua_file所包含的文件以及md5值导入到一个字典里
|
625941bc4f88993c3716bf46
|
def read_from_stderr(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> line = stderr.readline() <NEW_LINE> while len(line) > 0: <NEW_LINE> <INDENT> line = line.rstrip() <NEW_LINE> print(line, file=sys.stderr) <NEW_LINE> line = stderr.readline() <NEW_LINE> <DEDENT> <DEDENT> except (PipeTimeout, socket.timeout): <NEW_LINE> <INDENT> pass
|
Read stderr stream, time out if necessary.
|
625941bcbd1bec0571d90512
|
def _headers_replace(self, message, escape_regex=False): <NEW_LINE> <INDENT> return re.sub(self._replacer_regex('HEADERS'), self._regex_replacer(self._header_replacer, escape_regex), message)
|
Replace a header indicator in a message with that headers value from
the prior request.
|
625941bc5fc7496912cc3858
|
def get_tensor_size(ts, bs=None): <NEW_LINE> <INDENT> d, s = 1, 1 <NEW_LINE> ndims = ts.shape.ndims <NEW_LINE> if ndims is None: <NEW_LINE> <INDENT> return d <NEW_LINE> <DEDENT> for i in range(0, ndims): <NEW_LINE> <INDENT> v = ts.shape[i].value <NEW_LINE> if v is None: <NEW_LINE> <INDENT> if i == 0: <NEW_LINE> <INDENT> v = bs if bs is not None else d <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> v = d <NEW_LINE> <DEDENT> <DEDENT> s *= v <NEW_LINE> <DEDENT> return s*(ts.dtype.size)
|
Return the size of tensor in bytes.
The first unknown dimension of a tensor will be filled by `bs`.
The other unknown dimenstions of a tensor will be filled by a default
value.
|
625941bcd6c5a10208143f22
|
def setup(self): <NEW_LINE> <INDENT> pass
|
Build this group.
This method should be overidden by your Group's method.
You may call 'add_subsystem' to add systems to this group. You may also issue connections,
and set the linear and nonlinear solvers for this group level. You cannot safely change
anything on children systems; use the 'configure' method instead.
Available attributes:
name
pathname
comm
metadata
|
625941bc57b8e32f52483374
|
def send_message(self, message="", **kwargs): <NEW_LINE> <INDENT> emoji = ":house:" <NEW_LINE> if kwargs.get("data") and kwargs["data"].get("emoji"): <NEW_LINE> <INDENT> emoji = kwargs["data"]["emoji"] <NEW_LINE> <DEDENT> profile = dict() <NEW_LINE> profile['status_text'] = message <NEW_LINE> profile['status_emoji'] = emoji <NEW_LINE> url = '%s?token=%s&profile=%s' % ( self._resource, self._token, urllib.parse.quote_plus(json.dumps(profile))) <NEW_LINE> response = requests.post(url, timeout=10) <NEW_LINE> if response.status_code not in (200, 201): <NEW_LINE> <INDENT> _LOGGER.exception( "Error sending message. Response %d: %s:", response.status_code, response.reason) <NEW_LINE> <DEDENT> elif not response.json().get("ok"): <NEW_LINE> <INDENT> _LOGGER.exception( "Error sending message: %s:", response.json().get("error"))
|
Send a message to a user.
|
625941bc71ff763f4b549562
|
def test_add_supplement_audit(self): <NEW_LINE> <INDENT> pass
|
Test case for add_supplement_audit
Add new audit for a supplement # noqa: E501
|
625941bcbe7bc26dc91cd4e0
|
def extractall(self, *args, **kwargs): <NEW_LINE> <INDENT> self.zipfile.extractall(*args, **kwargs)
|
Extract all files from the archive.
|
625941bcdc8b845886cb540e
|
def calc_n_pickups(self, filename='pickups.txt'): <NEW_LINE> <INDENT> n_pickups = [] <NEW_LINE> for folder in self.folders: <NEW_LINE> <INDENT> n_pickups.append(0) <NEW_LINE> path = self.path + folder + '/' <NEW_LINE> with open(path + filename) as file: <NEW_LINE> <INDENT> for line in file: <NEW_LINE> <INDENT> data = line.split() <NEW_LINE> if data[0] != '#': <NEW_LINE> <INDENT> n_pickups[-1] += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> print('Average:', np.mean(n_pickups)) <NEW_LINE> print('Standard deviation:', np.std(n_pickups)) <NEW_LINE> with open(os.path.join(self.path, filename), 'w') as file: <NEW_LINE> <INDENT> file.write(str(np.mean(n_pickups)) + '\t' + str(np.std(n_pickups)))
|
Calculate the average number of pickups and its standard deviation
and save them in <filename>.
|
625941bc656771135c3eb746
|
def import_PV_profile_5_min(): <NEW_LINE> <INDENT> print('Importing PV profile at a 5-minutes time step.') <NEW_LINE> PV_prod = pd.read_csv('data/PV_production.csv', sep=';') <NEW_LINE> PV_prod_profile_5min = (1000 * PV_prod['Power(kW)']).tolist() <NEW_LINE> return PV_prod_profile_5min
|
Import an example of PV production profile with a time step of 5 minutes
:return: PV production profile for a 5-minutes time step [W]
|
625941bca934411ee3751575
|
def findIdenticalMessage(self, target_potmsgset, target_potemplate): <NEW_LINE> <INDENT> store = Store.of(self) <NEW_LINE> forms_match = (TranslationMessage.msgstr0ID == self.msgstr0ID) <NEW_LINE> for form in xrange(1, TranslationConstants.MAX_PLURAL_FORMS): <NEW_LINE> <INDENT> form_name = 'msgstr%d' % form <NEW_LINE> form_value = getattr(self, 'msgstr%dID' % form) <NEW_LINE> forms_match = And( forms_match, getattr(TranslationMessage, form_name) == form_value) <NEW_LINE> <DEDENT> twins = store.find(TranslationMessage, And( TranslationMessage.potmsgset == target_potmsgset, TranslationMessage.potemplate == target_potemplate, TranslationMessage.language == self.language, TranslationMessage.id != self.id, forms_match)) <NEW_LINE> return twins.order_by(TranslationMessage.id).first()
|
See `ITranslationMessage`.
|
625941bc4c3428357757c205
|
def test_init(self): <NEW_LINE> <INDENT> arg1 = 1 <NEW_LINE> arg2 = 2 <NEW_LINE> arg3 = 3 <NEW_LINE> self.assertRaises(TypeError, ThreeDimensionalPoint, 'a', arg2, arg3) <NEW_LINE> self.assertRaises(TypeError, ThreeDimensionalPoint, arg1, 'b', arg3) <NEW_LINE> self.assertRaises(TypeError, ThreeDimensionalPoint, arg1, arg2, 'c')
|
Test errors for __init__ method
|
625941bcd6c5a10208143f23
|
def linux_distribution(distname='', version='', id='', supported_dists=_supported_dists, full_distribution_name=1): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> etc = os.listdir(_UNIXCONFDIR) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> return distname, version, id <NEW_LINE> <DEDENT> etc.sort() <NEW_LINE> for file in etc: <NEW_LINE> <INDENT> m = _release_filename.match(file) <NEW_LINE> if m is not None: <NEW_LINE> <INDENT> _distname, dummy = m.groups() <NEW_LINE> if _distname in supported_dists: <NEW_LINE> <INDENT> distname = _distname <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return _dist_try_harder(distname, version, id) <NEW_LINE> <DEDENT> with open(os.path.join(_UNIXCONFDIR, file), 'r', encoding='utf-8', errors='surrogateescape') as f: <NEW_LINE> <INDENT> firstline = f.readline() <NEW_LINE> <DEDENT> _distname, _version, _id = _parse_release_file(firstline) <NEW_LINE> if _distname and full_distribution_name: <NEW_LINE> <INDENT> distname = _distname <NEW_LINE> <DEDENT> if _version: <NEW_LINE> <INDENT> version = _version <NEW_LINE> <DEDENT> if _id: <NEW_LINE> <INDENT> id = _id <NEW_LINE> <DEDENT> return distname, version, id
|
Tries to determine the name of the Linux OS distribution name.
The function first looks for a distribution release file in
/etc and then reverts to _dist_try_harder() in case no
suitable files are found.
supported_dists may be given to define the set of Linux
distributions to look for. It defaults to a list of currently
supported Linux distributions identified by their release file
name.
If full_distribution_name is true (default), the full
distribution read from the OS is returned. Otherwise the short
name taken from supported_dists is used.
Returns a tuple (distname, version, id) which default to the
args given as parameters.
|
625941bc167d2b6e31218a71
|
def binaryTreePaths(self, root): <NEW_LINE> <INDENT> if root: <NEW_LINE> <INDENT> self.path.append(root.val) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if not root.left and not root.right: <NEW_LINE> <INDENT> self.res.append('->'.join([str(x) for x in self.path])) <NEW_LINE> <DEDENT> if root.left: <NEW_LINE> <INDENT> self.binaryTreePaths(root.left) <NEW_LINE> <DEDENT> if root.right: <NEW_LINE> <INDENT> self.binaryTreePaths(root.right) <NEW_LINE> <DEDENT> self.path.pop(-1) <NEW_LINE> return self.res
|
:type root: TreeNode
:rtype: List[str]
|
625941bc7d847024c06be194
|
def search(self, query, download_dir, count): <NEW_LINE> <INDENT> self.i_img = 0 <NEW_LINE> url = "https://www.google.com/search?q=" + query + "&source=lnms&tbm=isch" <NEW_LINE> driver = webdriver.Firefox() <NEW_LINE> driver.get(url) <NEW_LINE> while self.i_img < count: <NEW_LINE> <INDENT> for scroll in range(10): <NEW_LINE> <INDENT> driver.execute_script("window.scrollBy(0,1000000)") <NEW_LINE> time.sleep(0.2) <NEW_LINE> <DEDENT> time.sleep(0.5) <NEW_LINE> images = driver.find_elements_by_xpath("//a[@class='rg_l']") <NEW_LINE> for image in images: <NEW_LINE> <INDENT> if self.i_img >= count: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> google_url = image.get_attribute("href") <NEW_LINE> google_url, complex_url = google_url.split("imgurl=") <NEW_LINE> org_url, other_url = complex_url.split("&imgrefurl=") <NEW_LINE> org_url = parse.unquote(org_url) <NEW_LINE> self.download_link(org_url, download_dir) <NEW_LINE> <DEDENT> button_smb = driver.find_element_by_xpath("//input[@id='smb']") <NEW_LINE> if button_smb is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> button_smb.click() <NEW_LINE> <DEDENT> except ElementNotInteractableException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> driver.quit()
|
Uses Firefox to get image links, and download them
:param query: search to query using google images
:param download_dir: path to directory to download search results
:param count: number of results to download
|
625941bcd7e4931a7ee9ddf7
|
def get_test_files(path, mask = None, recursive=True): <NEW_LINE> <INDENT> current = [] <NEW_LINE> directory_items = os.listdir(path) <NEW_LINE> directory_items.sort() <NEW_LINE> for item in directory_items: <NEW_LINE> <INDENT> add_to_pythonpath = False <NEW_LINE> item_path = os.path.join(path, item) <NEW_LINE> if os.path.isfile(item_path): <NEW_LINE> <INDENT> if fnmatch.fnmatch(item, mask): <NEW_LINE> <INDENT> add_to_pythonpath = True <NEW_LINE> current.append(item) <NEW_LINE> <DEDENT> <DEDENT> elif os.path.isdir(item_path): <NEW_LINE> <INDENT> if recursive: <NEW_LINE> <INDENT> current.extend(get_test_files(item_path, mask = mask)) <NEW_LINE> <DEDENT> <DEDENT> if add_to_pythonpath: <NEW_LINE> <INDENT> sys.path.append(path) <NEW_LINE> <DEDENT> <DEDENT> return current
|
Returns test files for path recursively
|
625941bc23e79379d52ee442
|
def get_result(self): <NEW_LINE> <INDENT> return self._result
|
Get the process result when deploying a single machine or
releasing a single machine
|
625941bcd164cc6175782c28
|
def bond_cash_flows(maturity, principal=100, coupon_rate=0.03, coupons_per_year=12): <NEW_LINE> <INDENT> n_coupons = round(maturity * coupons_per_year) <NEW_LINE> coupon_amt = principal * coupon_rate / coupons_per_year <NEW_LINE> coupon_times = np.arange(1, n_coupons + 1) <NEW_LINE> cash_flows = pd.Series(data=coupon_amt, index=coupon_times) <NEW_LINE> cash_flows.iloc[-1] += principal <NEW_LINE> return cash_flows
|
Returnd a series of cash flows from bond,
indexed by a coupon number
|
625941bc24f1403a92600a44
|
def main(): <NEW_LINE> <INDENT> data = input() <NEW_LINE> data = data.split() <NEW_LINE> print(recur_power(float(data[0]), int(data[1])))
|
This is main function
|
625941bccc0a2c11143dcd6b
|
def merge_places (self, target, sources) : <NEW_LINE> <INDENT> srclist = [self.place(p) for p in iterate(sources)] <NEW_LINE> new = srclist[0].__class__(target, srclist[0].tokens, srclist[0]._check) <NEW_LINE> self.add_place(new) <NEW_LINE> for place in srclist[1:] : <NEW_LINE> <INDENT> new._check |= place._check <NEW_LINE> new.tokens.add(place.tokens) <NEW_LINE> <DEDENT> post = {} <NEW_LINE> for place in srclist : <NEW_LINE> <INDENT> for trans, label in place.post.items() : <NEW_LINE> <INDENT> if trans not in post : <NEW_LINE> <INDENT> post[trans] = [] <NEW_LINE> <DEDENT> if label.__class__ is MultiArc : <NEW_LINE> <INDENT> post[trans].extend([x.copy() for x in label]) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> post[trans].append(label.copy()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for trans, labels in post.items() : <NEW_LINE> <INDENT> if len(labels) == 1 : <NEW_LINE> <INDENT> self.add_input(target, trans, labels[0]) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> self.add_input(target, trans, MultiArc(labels)) <NEW_LINE> <DEDENT> <DEDENT> pre = {} <NEW_LINE> for place in srclist : <NEW_LINE> <INDENT> for trans, label in place.pre.items() : <NEW_LINE> <INDENT> if trans not in pre : <NEW_LINE> <INDENT> pre[trans] = [] <NEW_LINE> <DEDENT> if label.__class__ is MultiArc : <NEW_LINE> <INDENT> pre[trans].extend([x.copy() for x in label]) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> pre[trans].append(label.copy()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for trans, labels in pre.items() : <NEW_LINE> <INDENT> if len(labels) == 1 : <NEW_LINE> <INDENT> self.add_output(target, trans, labels[0]) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> self.add_output(target, trans, MultiArc(labels))
|
Create a new place by merging those in `sources`. Markings
are added, place types are 'or'ed and arcs labels are joinded
into multi-arcs, the sources places are not removed. Use
places names.
>>> n = PetriNet('n')
>>> n.add_place(Place('p1', [1], tInteger))
>>> n.add_place(Place('p2', [2.0], tFloat))
>>> n.add_transition(Transition('t1'))
>>> n.add_transition(Transition('t2'))
>>> n.add_output('p1', 't1', Value(1))
>>> n.add_output('p2', 't2', Value(2.0))
>>> n.add_output('p2', 't1', Value(2.0))
>>> n.merge_places('p', ['p1', 'p2'])
>>> (n.pre('p'), n.post('t1')) == (set(['t2', 't1']), set(['p2', 'p', 'p1']))
True
>>> list(sorted(n.node('p').pre.items()))
[('t1', MultiArc((Value(1), Value(2.0)))),
('t2', Value(2.0))]
>>> n.node('p').tokens == MultiSet([1, 2.0])
True
>>> n.node('p').checker()
(Instance(int) | Instance(float))
@param target: the name of the created place
@type target: `str`
@param sources: the list of places names to be merged (or a
single place name)
@type sources: `list`
|
625941bc29b78933be1e5592
|
def update_contexts(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.context['$%new'] = self.expected_state[self.node_id] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.context['$%new'] = self.expected_state['master'] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.context['$%old'] = self.current_state[self.node_id] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.context['$%old'] = {}
|
Set old and new contexts for further evaluation
|
625941bcb5575c28eb68ded9
|
def get_trade(self, symbol): <NEW_LINE> <INDENT> params = {'symbol': symbol} <NEW_LINE> url = MARKET_URL + '/market/trade' <NEW_LINE> return self.http_get_request(url, params)
|
:param symbol
:return:
|
625941bc1b99ca400220a98c
|
def _file_logger(self, logger): <NEW_LINE> <INDENT> file_handler = logging.FileHandler('%s' % self.logfilename) <NEW_LINE> logger.addHandler(file_handler) <NEW_LINE> file_format = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s') <NEW_LINE> file_handler.setFormatter(file_format) <NEW_LINE> flevel = self.config.get("Logging", 'file_logs_level').lower() <NEW_LINE> file_handler.setLevel(logging_levels.get(flevel, logging.NOTSET))
|
Configure File Logger
|
625941bc63d6d428bbe443ca
|
def update( self, analysis_id: str, commands: List[Command], labware: List[LoadedLabware], pipettes: List[LoadedPipette], errors: List[ErrorOccurrence], ) -> None: <NEW_LINE> <INDENT> if len(errors) > 0: <NEW_LINE> <INDENT> result = AnalysisResult.NOT_OK <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = AnalysisResult.OK <NEW_LINE> <DEDENT> self._analyses_by_id[analysis_id] = CompletedAnalysis.construct( id=analysis_id, result=result, commands=commands, labware=labware, pipettes=pipettes, errors=errors, )
|
Update analysis results in the store.
|
625941bcd4950a0f3b08c22c
|
def path_dict_to_path(start: int, end: int, path_dict: Dict[int,WeightedEdge]) -> WeightedPath: <NEW_LINE> <INDENT> if len(path_dict) == 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> edge_path: WeightedPath = [] <NEW_LINE> e: WeightedEdge = path_dict[end] <NEW_LINE> edge_path.append(e) <NEW_LINE> while e.u != start: <NEW_LINE> <INDENT> e = path_dict[e.u] <NEW_LINE> edge_path.append(e) <NEW_LINE> <DEDENT> return list(reversed(edge_path))
|
返回从某个起点到某个终点的路径
|
625941bcadb09d7d5db6c66d
|
def test_create(): <NEW_LINE> <INDENT> pdic, _ = _create_dummy(TEST_CREATE_INPUT) <NEW_LINE> expected_nick = "nick" <NEW_LINE> assert pdic[expected_nick].special_char <NEW_LINE> assert pdic[expected_nick].base == 32 <NEW_LINE> assert pdic[expected_nick].iteration == 1 <NEW_LINE> assert pdic[expected_nick].start == 0 <NEW_LINE> assert pdic[expected_nick].finish == 20
|
Test a basic create.
|
625941bc5fc7496912cc3859
|
def del_role_by_team_name_role_name_role_id(self, role_id, tenant_name): <NEW_LINE> <INDENT> tenant = self.get_tenant(tenant_name=tenant_name) <NEW_LINE> role_repo.del_role_by_team_pk_role_name_role_id(tenant_pk=tenant.pk, role_id=role_id)
|
删除一个角色
|
625941bc23849d37ff7b2f6c
|
def isValid(self, s): <NEW_LINE> <INDENT> if len(s) % 2 != 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> map = { '(': ')', '[': ']', '{': '}' } <NEW_LINE> stack = [] <NEW_LINE> for char in s: <NEW_LINE> <INDENT> if len(stack) > 0 and map.get(stack[-1]) == char: <NEW_LINE> <INDENT> stack.pop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> stack.append(char) <NEW_LINE> <DEDENT> <DEDENT> if len(stack) > 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
|
:type s: str
:rtype: bool
|
625941bcf548e778e58cd457
|
def expand_kids_by_index(self, *indices: int) -> None: <NEW_LINE> <INDENT> for i in sorted(indices, reverse=True): <NEW_LINE> <INDENT> kid = self.children[i] <NEW_LINE> self.children[i:i+1] = kid.children
|
Expand (inline) children at the given indices
|
625941bc851cf427c661a3ed
|
def nullify(value): <NEW_LINE> <INDENT> if value is None or value == 'None' or value == 'none': <NEW_LINE> <INDENT> return 'null' <NEW_LINE> <DEDENT> elif isinstance(value, str): <NEW_LINE> <INDENT> if value == '': <NEW_LINE> <INDENT> return 'null' <NEW_LINE> <DEDENT> return '"%s"' % value <NEW_LINE> <DEDENT> elif isinstance(value, str): <NEW_LINE> <INDENT> if value == '': <NEW_LINE> <INDENT> return 'null' <NEW_LINE> <DEDENT> return '"%s"' % value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value
|
Returns proper SQL value, with null for None and
quotes if needed.
|
625941bc63f4b57ef0000ffb
|
@command <NEW_LINE> def unregsaycmd(argv): <NEW_LINE> <INDENT> command_name = argv[1] <NEW_LINE> try: <NEW_LINE> <INDENT> proxy = say_command_proxies.pop(command_name) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> dbgmsg(0, 'unregsaycmd: Did not find command: {}'.format(command_name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> get_say_command(command_name).remove_callback(proxy)
|
Removes a say command that refers to a particular block.
|
625941bc66673b3332b91f6c
|
def test_ip_address_validation_3(): <NEW_LINE> <INDENT> results = ra.ip_address_validation(tv.ip_address_3) <NEW_LINE> assert results == "Neither"
|
testing ip_address_username with ip_address_3
|
625941bc462c4b4f79d1d5ab
|
def set_ConactID(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'ConactID', value)
|
Set the value of the ConactID input for this Choreo. ((required, integer) The ID of the contact you want to retrieve. This is used to construct the URL for the request.)
|
625941bc5fcc89381b1e1598
|
def get_roic(soup_roic): <NEW_LINE> <INDENT> pattern = re.compile("Return on Invested Capital") <NEW_LINE> roic_p_tag = soup_roic.find("td", text=pattern) <NEW_LINE> try: <NEW_LINE> <INDENT> roic_data = roic_p_tag.find_next_sibling( "td" ) <NEW_LINE> <DEDENT> except AttributeError as e: <NEW_LINE> <INDENT> print("No Roic data found") <NEW_LINE> roic = False <NEW_LINE> return roic <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> roic = roic_data.string <NEW_LINE> print(stock, "had", roic, "ROIC") <NEW_LINE> return roic
|
ROIC
|
625941bc4e696a04525c9327
|
def glMapGrid1d(*argv): <NEW_LINE> <INDENT> pass
|
no string
|
625941bc7b25080760e39336
|
def router(): <NEW_LINE> <INDENT> if not config.API_ROUTES_ENABLED: <NEW_LINE> <INDENT> return cls <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _create_route_from_class(cls)
|
adds a route to the given class, internal
|
625941bcd53ae8145f87a150
|
def check_field_attributes(self, decimal_places, max_digits): <NEW_LINE> <INDENT> if max_digits is None: <NEW_LINE> <INDENT> raise ValueError('You have to provide a max_digits attribute to Money fields.') <NEW_LINE> <DEDENT> if decimal_places is None: <NEW_LINE> <INDENT> raise ValueError('You have to provide a decimal_places attribute to Money fields.')
|
Django < 1.7 has no system checks framework.
Avoid giving the user hard-to-debug errors if they miss required attributes.
|
625941bcbe8e80087fb20b22
|
def read(self, n=1): <NEW_LINE> <INDENT> s = self._RX_buf[0:n] <NEW_LINE> self._RX_buf = self._RX_buf[n:] <NEW_LINE> return s
|
reads n characters from the fake device.
|
625941bc2eb69b55b151c787
|
def min(self, comparer=None): <NEW_LINE> <INDENT> return self.min_by(identity, comparer).select(first_only)
|
Returns the minimum element in an observable sequence according to
the optional comparer else a default greater than less than check.
Example
res = source.min()
res = source.min(lambda x, y: x.value - y.value)
comparer -- {Function} [Optional] Comparer used to compare elements.
Returns an observable sequence {Observable} containing a single element
with the minimum element in the source sequence.
|
625941bcd8ef3951e3243418
|
def serve_forever(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> error = None <NEW_LINE> initial_ctx = ZmqMethodContext(self) <NEW_LINE> initial_ctx.in_string = [self.zmq_socket.recv()] <NEW_LINE> contexts = self.generate_contexts(initial_ctx) <NEW_LINE> p_ctx, others = contexts[0], contexts[1:] <NEW_LINE> if p_ctx.in_error: <NEW_LINE> <INDENT> p_ctx.out_object = p_ctx.in_error <NEW_LINE> error = p_ctx.in_error <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.get_in_object(p_ctx) <NEW_LINE> if p_ctx.in_error: <NEW_LINE> <INDENT> p_ctx.out_object = p_ctx.in_error <NEW_LINE> error = p_ctx.in_error <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.get_out_object(p_ctx) <NEW_LINE> if p_ctx.out_error: <NEW_LINE> <INDENT> p_ctx.out_object = p_ctx.out_error <NEW_LINE> error = p_ctx.out_error <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.get_out_string(p_ctx) <NEW_LINE> process_contexts(self, others, error) <NEW_LINE> self.zmq_socket.send(''.join(p_ctx.out_string)) <NEW_LINE> p_ctx.close()
|
Runs the ZeroMQ server.
|
625941bcde87d2750b85fc6a
|
def test_004(self): <NEW_LINE> <INDENT> driver=self.driver <NEW_LINE> self.login() <NEW_LINE> driver.find_element_by_link_text("学习资料管理").click() <NEW_LINE> driver.switch_to_frame("manage") <NEW_LINE> driver.find_element_by_xpath("html/body/div[1]/div/form/input[4]").send_keys("认") <NEW_LINE> driver.find_element_by_xpath("html/body/div[1]/div/form/input[5]").click() <NEW_LINE> xtr1=driver.find_element_by_xpath("html/body/div[1]/table/tbody[2]/tr[1]/td[3]").get_attribute("textContent") <NEW_LINE> self.assertNotEqual(xtr1,"查无学习资料!")
|
管理员主页-学习资料管理功能按标题搜索调用测试
|
625941bc3617ad0b5ed67dd4
|
def get_name_logfile(files): <NEW_LINE> <INDENT> datfiles = [x for x in files if x.find("_ref.log") != -1] <NEW_LINE> if len(datfiles) > 0: <NEW_LINE> <INDENT> return datfiles[0] <NEW_LINE> <DEDENT> return None
|
this are the dat file
|
625941bc15baa723493c3e4e
|
def local_parameter(self, mu): <NEW_LINE> <INDENT> assert mu.__class__ is Parameter <NEW_LINE> return (None if self.parameter_local_type is None else {k: mu[v] for k, v in self._parameter_global_names.iteritems()})
|
Extract the local parameter components with their local names from a given |Parameter|.
See :meth:`build_parameter_type` for details.
|
625941bcad47b63b2c509e5b
|
def __init__(self, data, version, mode, error): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> if mode in tables.modes: <NEW_LINE> <INDENT> self.mode = tables.modes[mode] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('{0} is not a valid mode.'.format(mode)) <NEW_LINE> <DEDENT> if error in tables.error_level: <NEW_LINE> <INDENT> self.error = tables.error_level[error] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('{0} is not a valid error ' 'level.'.format(error)) <NEW_LINE> <DEDENT> if 1 <= version <= 40: <NEW_LINE> <INDENT> self.version = version <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Illegal version {0}, version must be between " "1 and 40.".format(version)) <NEW_LINE> <DEDENT> self.error_code_words = tables.eccwbi[version][self.error] <NEW_LINE> self.buffer = io.StringIO() <NEW_LINE> self.add_data() <NEW_LINE> self.make_code()
|
See :py:class:`pyqrcode.QRCode` for information on the parameters.
|
625941bc8e7ae83300e4aea7
|
def _adjustTimers(self, start=[], stop=[]): <NEW_LINE> <INDENT> for s in start: <NEW_LINE> <INDENT> self._timers[s].start() <NEW_LINE> <DEDENT> for s in stop: <NEW_LINE> <INDENT> self._timers[s].stop()
|
Helper method for starting/stoping timers.
|
625941bc01c39578d7e74d17
|
@app.route('/category_edit/<category_name>', methods=['GET', 'POST']) <NEW_LINE> def category_edit(category_name): <NEW_LINE> <INDENT> if g.owner: <NEW_LINE> <INDENT> if request.method == "POST": <NEW_LINE> <INDENT> cat_name = category_name <NEW_LINE> new_cat_name = request.form['cat_name'] <NEW_LINE> category_result = new_cat.category_edit( cat_name, new_cat_name, g.owner) <NEW_LINE> data = new_cat.view_recipe_category(g.owner) <NEW_LINE> if category_result == "successfully updated category name": <NEW_LINE> <INDENT> message = "Successfully edited category" <NEW_LINE> return render_template("recipe-categories.html", success=message, data=data) <NEW_LINE> <DEDENT> elif category_result == "Category exists": <NEW_LINE> <INDENT> message = "Can't edit category! Category Name Exists" <NEW_LINE> return render_template("recipe-categories.html", msg=message, data=data) <NEW_LINE> <DEDENT> elif category_result == "Null category name": <NEW_LINE> <INDENT> message = "Category name is NOT provided" <NEW_LINE> return render_template("recipe-categories.html", msg=message, data=data) <NEW_LINE> <DEDENT> elif category_result == "category name exists": <NEW_LINE> <INDENT> message = "Category Name exists" <NEW_LINE> return render_template("recipe-categories.html", msg=message, data=data) <NEW_LINE> <DEDENT> elif category_result == "category name has special characters": <NEW_LINE> <INDENT> message = "Category name should only have letters" <NEW_LINE> return render_template("recipe-categories.html", msg=message, data=data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = "unable to edit recipe category" <NEW_LINE> return render_template("recipe-categories.html", msg=message, data=data) <NEW_LINE> <DEDENT> <DEDENT> return render_template("recipe-categories.html") <NEW_LINE> <DEDENT> return render_template("login.html")
|
method to edit a recipe category
|
625941bca4f1c619b28aff1b
|
def GetPointer(self): <NEW_LINE> <INDENT> return _itkRecursiveMultiResolutionPyramidImageFilterPython.itkRecursiveMultiResolutionPyramidImageFilterID3ID3_GetPointer(self)
|
GetPointer(self) -> itkRecursiveMultiResolutionPyramidImageFilterID3ID3
|
625941bc498bea3a759b998b
|
def get_code(self): <NEW_LINE> <INDENT> import inspect <NEW_LINE> return inspect.getsource(BinarySearchTree)
|
returns the code of the current class
|
625941bce8904600ed9f1e05
|
def getInfoDict(self): <NEW_LINE> <INDENT> contentList = self.getAnswerContentList() <NEW_LINE> questionInfoDictList = [] <NEW_LINE> answerDictList = [] <NEW_LINE> questionInfoDictList.append(self.getQuestionInfoDict()) <NEW_LINE> if len(contentList) != 0: <NEW_LINE> <INDENT> for content in contentList: <NEW_LINE> <INDENT> answerDictList.append(self.getAnswerDict(content)) <NEW_LINE> <DEDENT> <DEDENT> return questionInfoDictList, answerDictList
|
列表长度有可能为0(没有回答),1(1个回答),2(2个回答)...,需要分情况处理
|
625941bc7b180e01f3dc46df
|
def consumption_results(m, parameters, parts, Resources): <NEW_LINE> <INDENT> times = m.times_between(parameters['t_start'],parameters['t_end']) <NEW_LINE> def _is_consumer(part,resource): <NEW_LINE> <INDENT> if not isinstance(part, fs.FlowNetwork): <NEW_LINE> <INDENT> return resource in part.consumption <NEW_LINE> <DEDENT> <DEDENT> consumer_names = [p for p in m.descendants if _is_consumer(p,Resources.heat)] <NEW_LINE> consumers = {p.name: fs.get_series(p.consumption[Resources.heat], times) for p in consumer_names} <NEW_LINE> consumers = pd.DataFrame.from_dict(consumers) <NEW_LINE> power_consumer_names = [p for p in m.descendants if _is_consumer(p,Resources.power)] <NEW_LINE> power_consumers = {p.name: fs.get_series(p.consumption[Resources.power], times) for p in power_consumer_names} <NEW_LINE> power_consumers = pd.DataFrame.from_dict(power_consumers) <NEW_LINE> return consumers, power_consumers
|
Takes a model object, extracts and returns the consumption information.
|
625941bc4a966d76dd550ee8
|
def getTask(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.stTask.config(state=NORMAL) <NEW_LINE> _input = self.eParId.get() <NEW_LINE> if not _input: <NEW_LINE> <INDENT> tkMessageBox.showwarning('Warning', 'Bitte partition id eingeben') <NEW_LINE> <DEDENT> elif '-' in _input: <NEW_LINE> <INDENT> raise Exception <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d = taskProcessor.getTask(int(_input)) <NEW_LINE> d.addCallback(self.displayResponse) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> tkMessageBox.showwarning('Warning', 'Partition id nicht vorhanden')
|
Call function getTask() to send a http request.
Register a callback to function displayResponse().
|
625941bc4428ac0f6e5ba6cd
|
def check_dead_nodes(self): <NEW_LINE> <INDENT> to_remove = [node for node in self.nodes.values() if int(time.time()) > node.last_seen + self.max_time] <NEW_LINE> for node in to_remove: <NEW_LINE> <INDENT> logger.info("Removing inactive node {}".format(node.uid)) <NEW_LINE> self.node_mapping.pop(node.resources['ip']) <NEW_LINE> self.remove_node(node)
|
Check and remove nodes that are not alive anymore.
|
625941bc45492302aab5e19c
|
def response_false(message="请求失败"): <NEW_LINE> <INDENT> content = { "success": "false", "message": message } <NEW_LINE> return JsonResponse(content)
|
返回失败的结果
:param message:
:return: json结果
|
625941bc97e22403b379ce74
|
def make_agents(self): <NEW_LINE> <INDENT> for i in range(50): <NEW_LINE> <INDENT> x = random.random() * self.space.x_max <NEW_LINE> y = random.random() * self.space.y_max <NEW_LINE> center_x = self.space.x_max/2 <NEW_LINE> center_y = self.space.y_max/2 <NEW_LINE> pos = np.array((x, y)) <NEW_LINE> center = np.array((center_x,center_y)) <NEW_LINE> velocity = np.random.random(2) * 2 - 1 <NEW_LINE> velocity = np.zeros(2) + self.space.get_distance(pos, center) <NEW_LINE> velocity[0] *= self.target[0] <NEW_LINE> velocity[1] *= self.target[1] <NEW_LINE> boid = Boid(i, self, pos, self.speed, velocity, self.vision, self.separation, self.collision_separation, "boid.png", 10, **self.factors) <NEW_LINE> self.space.place_agent(boid, pos) <NEW_LINE> self.schedule.add(boid) <NEW_LINE> <DEDENT> for i in range(4): <NEW_LINE> <INDENT> x = random.random() * self.space.x_max <NEW_LINE> y = random.random() * self.space.y_max <NEW_LINE> pos = np.array((x, y)) <NEW_LINE> obstacle = Obstacle(i + self.population, self, pos, 30) <NEW_LINE> obstacle2 = Obstacle(i + self.population + 5, self, pos, 4) <NEW_LINE> self.space.place_agent(obstacle, pos) <NEW_LINE> self.space.place_agent(obstacle2,pos) <NEW_LINE> self.schedule.add(obstacle) <NEW_LINE> self.schedule.add(obstacle2) <NEW_LINE> <DEDENT> if self.predators: <NEW_LINE> <INDENT> x = random.random() * self.space.x_max <NEW_LINE> y = random.random() * self.space.y_max <NEW_LINE> pos = np.array((x, y)) <NEW_LINE> velocity = np.random.random(2) * 2 - 1 <NEW_LINE> predator = Predator(2003,self, pos, self.speed + 0.1, velocity, self.vision + 5, 12, self.collision_separation, "predator.png") <NEW_LINE> self.space.place_agent(predator,pos) <NEW_LINE> self.schedule.add(predator)
|
Create self.population agents, with random positions and starting headings.
|
625941bccc40096d6159582d
|
def start(): <NEW_LINE> <INDENT> initConfig() <NEW_LINE> updateRoom() <NEW_LINE> print("Starting button thread...") <NEW_LINE> t = threading.Thread(target=checkBtn) <NEW_LINE> t.start() <NEW_LINE> print("Starting server...") <NEW_LINE> run(host='0.0.0.0', port=8080, server="cherrypy") <NEW_LINE> print("All started")
|
This method starts the whole program. It creates the global config variable, updates the room,
starts the button/buzzer thread and starts the server.
|
625941bc82261d6c526ab37c
|
def breadthFirstSearch(problem): <NEW_LINE> <INDENT> curState = problem.getStartState() <NEW_LINE> visited = set() <NEW_LINE> toDo = Queue() <NEW_LINE> result = [] <NEW_LINE> if problem.isGoalState(curState): <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> visited.add(curState) <NEW_LINE> successors = problem.getSuccessors(curState) <NEW_LINE> for successor in successors: <NEW_LINE> <INDENT> if successor[0] not in visited: <NEW_LINE> <INDENT> newResult = list(result) <NEW_LINE> newResult.append(successor[1]) <NEW_LINE> if problem.isGoalState(successor[0]): <NEW_LINE> <INDENT> return newResult <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> toDo.push((successor[0], newResult)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if toDo.isEmpty(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> next = toDo.pop() <NEW_LINE> curState = next[0] <NEW_LINE> if curState in visited: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> result = next[1] <NEW_LINE> <DEDENT> raise Exception("No valid path found")
|
Search the shallowest nodes in the search tree first.
[2nd Edition: p 73, 3rd Edition: p 82]
|
625941bc99cbb53fe6792ac3
|
def OnRemove(self,*args): <NEW_LINE> <INDENT> pass
|
OnRemove(self: DictionaryBase,key: object,value: object)
Performs additional custom processes before removing an element from the
System.Collections.DictionaryBase instance.
key: The key of the element to remove.
value: The value of the element to remove.
|
625941bcfb3f5b602dac356c
|
def __init__(self, BaseMask = None, OwnerMask = None, GroupMask = None, EveryoneMask = None, NextOwnerMask = None): <NEW_LINE> <INDENT> self.BaseMask = BaseMask <NEW_LINE> self.OwnerMask = OwnerMask <NEW_LINE> self.GroupMask = GroupMask <NEW_LINE> self.EveryoneMask = EveryoneMask <NEW_LINE> self.NextOwnerMask = NextOwnerMask
|
store the values of the various targets permissions
|
625941bcbaa26c4b54cb0ffe
|
def parseBoolean2(self, text, match): <NEW_LINE> <INDENT> return text[0].lower() == 's'
|
^(sim|Sim|SIM|n.o|N.o|N.O)$
|
625941bcbe383301e01b5368
|
def edit_dist(seqA, seqB, normalized=False, restriction=''): <NEW_LINE> <INDENT> seqA, seqB = _as_lists(seqA, seqB) <NEW_LINE> if restriction in ['cv', 'consonant-vowel']: <NEW_LINE> <INDENT> resA = prosodic_string(seqA, 'cv') <NEW_LINE> resB = prosodic_string(seqB, 'cv') <NEW_LINE> return malign.restricted_edit_dist(seqA, seqB, resA, resB, normalized) <NEW_LINE> <DEDENT> return malign.edit_dist(seqA, seqB, normalized)
|
Return the edit distance between two strings.
Parameters
----------
seqA,seqB : str
The strings that shall be compared.
normalized : bool (default=False)
Specify whether the normalized edit distance shall be returned. If no
restrictions are chosen, the edit distance is normalized by dividing by
the length of the longer string. If *restriction* is set to *cv*
(consonant-vowel), the edit distance is normalized by the length of the
alignment.
restriction : {"cv"} (default="")
Specify the restrictions to be used. Currently, only ``cv`` is
supported. This prohibits matches of vowels with consonants.
Notes
-----
The edit distance was first formally defined by V. I. Levenshtein
(:evobib:`Levenshtein1965`). The first algorithm to compute the edit
distance was proposed by Wagner and Fisher (:evobib:`Wagner1974`).
Returns
-------
dist : {int float}
The edit distance, which is a float if normalized is set to c{True},
and an integer otherwise.
Examples
--------
Align two sequences::
>>> seqA = 'fat cat'
>>> seqB = 'catfat'
>>> edit_dist(seqA, seqB)
3
|
625941bc009cb60464c63290
|
@cli.command() <NEW_LINE> @click.argument("fds_url") <NEW_LINE> def show_ttl(fds_url): <NEW_LINE> <INDENT> url = FDSURL(fds_url) <NEW_LINE> bucket_name = url.bucket_name() <NEW_LINE> ttl = fds_client.get_lifecycle_config(bucket_name) <NEW_LINE> if url.is_bucket_url(): <NEW_LINE> <INDENT> CLIPrinter.print_lifecycle(ttl) <NEW_LINE> <DEDENT> elif url.is_object_url(): <NEW_LINE> <INDENT> if not fds_client.does_object_exists(bucket_name, url.object_name()): <NEW_LINE> <INDENT> raise Exception("object {} is not exist".format(url.object_name())) <NEW_LINE> <DEDENT> if url.is_object_dir(): <NEW_LINE> <INDENT> prefix = url.object_dir() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix = url.object_name() <NEW_LINE> <DEDENT> rules = [rule for rule in ttl["rules"] if rule["prefix"] in prefix] <NEW_LINE> CLIPrinter.print_lifecycle({"rules": rules}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> CLIPrinter.wrong_format()
|
ttl command shows the lifecycle information of a bucket or a object
|
625941bc6aa9bd52df036c7e
|
def send_entries(self, channel): <NEW_LINE> <INDENT> for filename in self.filenames: <NEW_LINE> <INDENT> tokens = self._get_file_content(filename).split() <NEW_LINE> index = {} <NEW_LINE> words = [] <NEW_LINE> for token in tokens: <NEW_LINE> <INDENT> token = self._strip(token) <NEW_LINE> if token not in words: <NEW_LINE> <INDENT> channel.send((token, filename)) <NEW_LINE> words.append(token) <NEW_LINE> <DEDENT> <DEDENT> if self.delay: <NEW_LINE> <INDENT> time.sleep(self.delay)
|
Send word entries from the file.
|
625941bc6aa9bd52df036c7f
|
def __str__(self): <NEW_LINE> <INDENT> return _print_attrs(self)
|
This enables the "pretty" printing of Attrs.
|
625941bc8a43f66fc4b53f44
|
def remove_host(config, sync_name, name): <NEW_LINE> <INDENT> sync = get_sync(config, name) <NEW_LINE> sync['hosts'].remove(get_host(config, sync_name, name)) <NEW_LINE> return True
|
remove sync
|
625941bcdd821e528d63b086
|
def test_edit_invalidly_tagged_build(self): <NEW_LINE> <INDENT> update = self.db.query(Update).one() <NEW_LINE> update_json = self.get_update(update.title) <NEW_LINE> update_json['csrf_token'] = self.get_csrf_token() <NEW_LINE> update_json['notes'] = 'testing!!!' <NEW_LINE> update_json['edited'] = update.alias <NEW_LINE> del update_json['requirements'] <NEW_LINE> with mock.patch('bodhi.server.buildsys.DevBuildsys.listTags', return_value=[{'name': 'f17-updates'}]) as listTags: <NEW_LINE> <INDENT> res = self.app.post_json('/updates/', update_json, status=400) <NEW_LINE> <DEDENT> expected_json = { 'status': 'error', 'errors': [ {'description': ( "Invalid tag: bodhi-2.0-1.fc17 not tagged with any of the following tags " "{}".format(['f17-updates-candidate', 'f17-updates-testing'])), 'location': 'body', 'name': 'builds'}]} <NEW_LINE> assert res.json == expected_json <NEW_LINE> listTags.assert_called_once_with('bodhi-2.0-1.fc17')
|
Editing an update that references invalidly tagged builds should raise an error.
|
625941bcd268445f265b4d4a
|
def decode_packet_30(self, low, high): <NEW_LINE> <INDENT> return self.decode_unsigned_short(low, high)
|
Decode Packet 30 (cliff front right signal) and return its value
Arguments:
low: Low byte of the 2's complement. Low is specified first to make pop() easier
high: High byte of the 2's complement
Returns: unsigned 16bit short. Strength of cliff front right signal from 0-4095
|
625941bcc4546d3d9de7290d
|
def plugin_loaded(): <NEW_LINE> <INDENT> config.load() <NEW_LINE> for window in sublime.windows(): <NEW_LINE> <INDENT> debug_message("[plugin_loaded] Found window %d" % window.id()) <NEW_LINE> for view in window.views(): <NEW_LINE> <INDENT> debug_message("[plugin_loaded] Found view %d" % view.id()) <NEW_LINE> mediator.add(view) <NEW_LINE> set_timeout_async( lambda: view.run_command('phpcoverage_update'), 1 ) <NEW_LINE> <DEDENT> <DEDENT> debug_message("[plugin_loaded] Finished.")
|
Called automatically by Sublime when the API is ready to be used.
Updates coverage for any open views, and adds them to the mediator.
|
625941bc4d74a7450ccd409f
|
def check_table_and_update_flags(table_name, num_events, summary): <NEW_LINE> <INDENT> if table_name in SUPPORTED_TABLES: <NEW_LINE> <INDENT> number_of_events_attr = 'num_%s' % table_name <NEW_LINE> update_flag_attr = 'needs_update_%s' % table_name <NEW_LINE> if getattr(summary, number_of_events_attr) != num_events: <NEW_LINE> <INDENT> logger.info("New data (%s) on %s for station %d", table_name, summary.date.strftime("%a %b %d %Y"), summary.station.number) <NEW_LINE> if table_name in RECORD_EARLY_NUM_EVENTS: <NEW_LINE> <INDENT> setattr(summary, number_of_events_attr, num_events) <NEW_LINE> <DEDENT> setattr(summary, update_flag_attr, True) <NEW_LINE> summary.needs_update = True <NEW_LINE> summary.save() <NEW_LINE> <DEDENT> <DEDENT> elif table_name not in IGNORE_TABLES: <NEW_LINE> <INDENT> logger.warning('Unsupported table type: %s', table_name)
|
Check a single table and update flags if new data
|
625941bc63b5f9789fde6fc1
|
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, SplunkIntegration): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__
|
Returns true if both objects are equal
|
625941bc627d3e7fe0d68d2a
|
def end(self): <NEW_LINE> <INDENT> return _DataModel.Station_end(self)
|
end(Station self) -> Time
|
625941bc4f6381625f11491a
|
def unpack_nested_exception(error): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> if error.args[i:]: <NEW_LINE> <INDENT> if isinstance(error.args[i], Exception): <NEW_LINE> <INDENT> error = error.args[i] <NEW_LINE> i = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return error
|
If exception are stacked, return the first one
:param error: A python exception with possible exception embeded within
:return: A python exception with no exception embeded within
|
625941bc97e22403b379ce75
|
@app.route("/register", methods=["GET", "POST"]) <NEW_LINE> def register(): <NEW_LINE> <INDENT> if request.method == "POST": <NEW_LINE> <INDENT> if request.form.get("username")== "" or request.form.get("password") == "" or request.form.get("confirm_password") == "": <NEW_LINE> <INDENT> return apology("Fill in all the fields") <NEW_LINE> <DEDENT> if not request.form.get("password") == request.form.get("confirm_password"): <NEW_LINE> <INDENT> return render_template("apology2.html") <NEW_LINE> <DEDENT> result = db.execute("INSERT INTO users (username, hash) VALUES(:username, :hash)", username=request.form.get("username"), hash=pwd_context.hash(request.form.get("password"))) <NEW_LINE> if not result: <NEW_LINE> <INDENT> return apology("Username already exist") <NEW_LINE> <DEDENT> session["user_id"]= result <NEW_LINE> return redirect(url_for("index")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render_template("register.html")
|
Register user.
|
625941bcd486a94d0b98e021
|
def azimuth_difference(azimuth_a, azimuth_b, directional=True): <NEW_LINE> <INDENT> def unsigned_difference(a, b): <NEW_LINE> <INDENT> difference = a-b <NEW_LINE> if difference > 180: <NEW_LINE> <INDENT> difference -= 360 <NEW_LINE> <DEDENT> if difference < -180: <NEW_LINE> <INDENT> difference += 360 <NEW_LINE> <DEDENT> return abs(difference) <NEW_LINE> <DEDENT> if directional is True: <NEW_LINE> <INDENT> azimuth_a = normalize_azimuth(azimuth_a, zero_center=True) <NEW_LINE> azimuth_b = normalize_azimuth(azimuth_b, zero_center=True) <NEW_LINE> return unsigned_difference(azimuth_a, azimuth_b) <NEW_LINE> <DEDENT> elif (directional is False) or (directional == 'inverse'): <NEW_LINE> <INDENT> azimuth_a = normalize_azimuth(azimuth_a, zero_center=True) <NEW_LINE> azimuth_b = normalize_azimuth(azimuth_b, zero_center=True) <NEW_LINE> return min( [unsigned_difference(azimuth_a, azimuth_b), unsigned_difference(azimuth_a + 180, azimuth_b)]) <NEW_LINE> <DEDENT> elif directional == 'polar': <NEW_LINE> <INDENT> return normalize_azimuth((azimuth_b - azimuth_a)) <NEW_LINE> <DEDENT> elif directional == 'signed': <NEW_LINE> <INDENT> azimuth_a = normalize_azimuth(azimuth_a) <NEW_LINE> azimuth_b = normalize_azimuth(azimuth_b) <NEW_LINE> return azimuth_b - azimuth_a
|
Find the difference between two azimuths specifed in degrees.
If ``directional=True`` (default), will produce a difference
between 0 and 180 degrees that ignores sign but accounts for
inverted differences in orientation.
If ``directional=False`` or ``directional='inverse'``, will ingore
inverted differences in rotation by also calculating the difference
if one azimuth is rotated 180 degrees and returning the smaller of
the two differences.
If ``directional='polar'``, will produce a difference between
0 and 360 degrees, accounting for differences past 180 degrees.
If ``directional='signed'``, will produce a difference between -180
and 180, accounting for the sign of the difference.
|
625941bc7cff6e4e81117862
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.