code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def do_rolls(self, arg): <NEW_LINE> <INDENT> if not arg: <NEW_LINE> <INDENT> print("Missing argument on how many times to roll the dice.") <NEW_LINE> return <NEW_LINE> <DEDENT> times = int(arg) <NEW_LINE> for _ in range(times): <NEW_LINE> <INDENT> print("Rolling a... {}".format(self.dice.roll()))
Roll one dice several times, supply argument how many times to roll.
625941bbd18da76e2353237e
def __init__(self, coefficients, get_feedback_value): <NEW_LINE> <INDENT> self.coefficients = coefficients <NEW_LINE> self.get_feedback_value = get_feedback_value
Initialises the polynomial function controller from the polynomial coefficients and the feedback value.
625941bb91af0d3eaac9b8c0
def multiply(self, num1, num2): <NEW_LINE> <INDENT> if num1=='0' or num2=='0': <NEW_LINE> <INDENT> return '0' <NEW_LINE> <DEDENT> num1 = num1[::-1] <NEW_LINE> num2 = num2[::-1] <NEW_LINE> ans = [0]*(len(num1)+len(num2)+1) <NEW_LINE> for i in range(len(num1)): <NEW_LINE> <INDENT> for j in range(len(num2)): <NEW_LINE> <INDENT> ans[i+j] += int(num1[i])*int(num2[j]) <NEW_LINE> <DEDENT> <DEDENT> t=0 <NEW_LINE> add=0 <NEW_LINE> while t<len(ans)-1: <NEW_LINE> <INDENT> add=ans[t]//10 <NEW_LINE> ans[t]=ans[t]%10 <NEW_LINE> t+=1 <NEW_LINE> ans[t]+=add <NEW_LINE> <DEDENT> while ans[-1]==0: <NEW_LINE> <INDENT> ans.pop() <NEW_LINE> <DEDENT> s='' <NEW_LINE> for i in ans: <NEW_LINE> <INDENT> s=str(i)+s <NEW_LINE> <DEDENT> return s
:type num1: str :type num2: str :rtype: str
625941bb1b99ca400220a95c
def find_temperature_sensors(self): <NEW_LINE> <INDENT> sensor_tree = {} <NEW_LINE> duplicate_chip_ids = set() <NEW_LINE> for chip, (chip_name, chip_sensors) in LinuxPlatform._collect_temperature_sensor_globs().iteritems(): <NEW_LINE> <INDENT> if chip_name is None: <NEW_LINE> <INDENT> debug("Un-named temperature sensor chip found.") <NEW_LINE> debug("Naming the chip: '%s'" % LinuxPlatform.UNKNOWN_SENSOR_CHIP_NAME) <NEW_LINE> chip_name = LinuxPlatform.UNKNOWN_SENSOR_CHIP_NAME <NEW_LINE> <DEDENT> chip_id = (chip_name, len(chip_sensors)) <NEW_LINE> if chip_id in sensor_tree: <NEW_LINE> <INDENT> info("Found duplicate chips named '%s' with %s temperature sensor(s)" % chip_id) <NEW_LINE> duplicate_chip_ids.add(chip_id) <NEW_LINE> del sensor_tree[chip_id] <NEW_LINE> continue <NEW_LINE> <DEDENT> elif chip_id in duplicate_chip_ids: <NEW_LINE> <INDENT> info("Found another chip named '%s' with %s temperature sensor(s)" % chip_id) <NEW_LINE> continue <NEW_LINE> <DEDENT> sensor_tree[chip_id] = {} <NEW_LINE> for sysfs_path in chip_sensors: <NEW_LINE> <INDENT> sensor_id = (chip_id[0], str(chip_id[1]), os.path.basename(sysfs_path)) <NEW_LINE> assert sensor_id not in sensor_tree[chip_id] <NEW_LINE> sensor_tree[chip_id][sensor_id] = sysfs_path <NEW_LINE> <DEDENT> <DEDENT> sensor_map = {} <NEW_LINE> for chip_id, sensors in sensor_tree.iteritems(): <NEW_LINE> <INDENT> for sensor_id, sysfs_path in sensors.iteritems(): <NEW_LINE> <INDENT> assert sensor_id not in sensor_map <NEW_LINE> sensor_map[":".join(sensor_id)] = sysfs_path <NEW_LINE> <DEDENT> <DEDENT> debug("Detected temperature sensors: %s" % sensor_map) <NEW_LINE> self.temp_sensors = sensor_map.keys() <NEW_LINE> self.temp_sensor_map = sensor_map
Detect sensors using the hwmon sysfs framework It's unclear as to whether the sensors may move between reboots, so we have to roll our own sensor identifiers. We are not fussy, and use as many sensors as we can find
625941bb56b00c62f0f14502
def notify(self, event_type, *args, **kwargs): <NEW_LINE> <INDENT> if debug_events: <NEW_LINE> <INDENT> print('[Event] {}: args={} kwargs={}'.format(event_type, repr(args), repr(kwargs))) <NEW_LINE> <DEDENT> if self._supported_event_types and not(event_type in self._supported_event_types): <NEW_LINE> <INDENT> raise KeyError('Event type {} is not supported by this Observable'.format(repr(event_type))) <NEW_LINE> <DEDENT> elif event_type in self._observer_handles: <NEW_LINE> <INDENT> observers = self._observer_handles[event_type] <NEW_LINE> for listener in list(observers): <NEW_LINE> <INDENT> listener._callback(*args, **kwargs) <NEW_LINE> listener._limit -= 1 <NEW_LINE> <DEDENT> observers[:] = [ listener for listener in observers if listener._limit != 0 ] <NEW_LINE> if len(observers) == 0: <NEW_LINE> <INDENT> del self._observer_handles[event_type]
Calls all observers with a corresponding event_type in the order they were registered :param event_type: Matches add():event_type :param args: Ordered arguments to pass to the callbacks :param kwargs: Named arguments to pass to the callbacks :return: None
625941bb1d351010ab8559c8
def comment_inter(self,productId): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> url = 'http://product.dangdang.com/index.php?r=comment%2Flist&productId=' + productId + '&categoryPath=01.41.26.15.00.00&mainProductId=' + productId + '&mediumId=0&pageIndex=1&sortType=1&filterType=1&isSystem=1&tagId=0&tagFilterCount=0&template=publish' <NEW_LINE> response = requests.get(url) <NEW_LINE> resp_dict = json.loads(response.text) <NEW_LINE> html = resp_dict['data']['list']['html'] <NEW_LINE> html_etree = etree.HTML(html) <NEW_LINE> comment_list = html_etree.xpath("//div[@class='item_wrap']/div") <NEW_LINE> commentcount = resp_dict['data']['list']['summary']['total_comment_num'] <NEW_LINE> commentpercent = resp_dict['data']['list']['summary']['goodRate'] <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self._logger.error(e) <NEW_LINE> comment_list = None <NEW_LINE> commentcount = '' <NEW_LINE> commentpercent = '' <NEW_LINE> <DEDENT> return {'comments':comment_list,'commentcount':commentcount,'commentpercent':commentpercent}
评论接口 :param productId: :return:
625941bb4f6381625f1148e9
def write_level_to_dat(level, writer): <NEW_LINE> <INDENT> if (level.lower_layer == None or len(level.lower_layer) == 0): <NEW_LINE> <INDENT> level.lower_layer = [0]*1024 <NEW_LINE> <DEDENT> level_bytes = calculate_level_byte_size(level) <NEW_LINE> writer.write(level_bytes.to_bytes(2, cc_data.BYTE_ORDER)) <NEW_LINE> writer.write(level.level_number.to_bytes(2, cc_data.BYTE_ORDER)) <NEW_LINE> writer.write(level.time.to_bytes(2, cc_data.BYTE_ORDER)) <NEW_LINE> writer.write(level.num_chips.to_bytes(2, cc_data.BYTE_ORDER)) <NEW_LINE> writer.write(b'\x01\x00') <NEW_LINE> write_layer_to_dat(level.upper_layer, writer) <NEW_LINE> write_layer_to_dat(level.lower_layer, writer) <NEW_LINE> total_field_byte_size = calculate_total_optional_field_byte_size(level.optional_fields) <NEW_LINE> writer.write(total_field_byte_size.to_bytes(2, cc_data.BYTE_ORDER)) <NEW_LINE> for field in level.optional_fields: <NEW_LINE> <INDENT> write_field_to_dat(field, writer)
Writes the given level in binary form to the given writer Args: level (CCLevel): the level to write writer (BufferedWriter): the active writer in binary write mode
625941bb627d3e7fe0d68cfa
def calculate_age(birth_year): <NEW_LINE> <INDENT> return datetime.today().year - birth_year
Calculate the age for a given birth year. Args: birth_year: Integer indicating the year of birth. Returns: Integer indicating the age.
625941bb3cc13d1c6d3c722f
def extract_overview_output_json(ifile_in): <NEW_LINE> <INDENT> with open(ifile_in) as f: <NEW_LINE> <INDENT> data = json.load(f) <NEW_LINE> summary_keys = ['source_name', 'testname', 'high_priorities', 'medium_priorities', 'low_priorities'] <NEW_LINE> summary = {key: None for key in summary_keys} <NEW_LINE> for sum_key in summary_keys: <NEW_LINE> <INDENT> summary[sum_key] = extract_from_nested_json(data, sum_key)[0] <NEW_LINE> <DEDENT> summary['file'] = ifile_in <NEW_LINE> <DEDENT> return summary
extracts information from given json file and returns them as a dictionary
625941bb0c0af96317bb8094
@catch_error('Delete all queued images') <NEW_LINE> def delete_all_queued_images(args): <NEW_LINE> <INDENT> if (not args.force and not user_confirm("Delete all queued images?", default=False)): <NEW_LINE> <INDENT> return SUCCESS <NEW_LINE> <DEDENT> client = get_client(args) <NEW_LINE> num_deleted = client.delete_all_queued_images() <NEW_LINE> if args.verbose: <NEW_LINE> <INDENT> print("Deleted %(num_deleted)s queued images" % {'num_deleted': num_deleted}) <NEW_LINE> <DEDENT> return SUCCESS
%(prog)s delete-all-queued-images [options] Remove all images from the cache queue.
625941bb1d351010ab8559c9
def test_lstrip(self): <NEW_LINE> <INDENT> cases = { ' test': 'test', '\n\ttest': 'test', '\n\n\t\t \n\n \t\ttest': 'test', } <NEW_LINE> for s, expected in cases.items(): <NEW_LINE> <INDENT> self.assertCallEqual( ChainedBase(expected), func=ChainedBase(s).lstrip, msg='Failed to lstrip ChainedBase.', )
lstrip() should act like str.lstrip().
625941bbd53ae8145f87a121
def parse_external(ext): <NEW_LINE> <INDENT> def is_source_url(s): <NEW_LINE> <INDENT> return is_svn_absolute(s) or is_svn_relative(s) <NEW_LINE> <DEDENT> parser = OptionParser(add_help_option=False) <NEW_LINE> parser.add_option('-r', dest='rev', default=None) <NEW_LINE> parts = [s.strip() for s in ext.split()] <NEW_LINE> opts, args = parser.parse_args(parts) <NEW_LINE> assert len(args) == 2 <NEW_LINE> if is_source_url(args[1]): <NEW_LINE> <INDENT> target = args[0] <NEW_LINE> source = args[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> target = args[1] <NEW_LINE> source = args[0] <NEW_LINE> <DEDENT> return SvnExternal(target=target, source=source, source_rev=opts.rev)
parses externals definition. Returns SvnExternal object
625941bbd99f1b3c44c67442
def to_markdown(query_number, item): <NEW_LINE> <INDENT> return str(query_number) + " | " + str(item)+"\n"
Take an item and transform to compose a table in markdown Parameters ---------- query_number: int item: int, float or string Returns ------- markdown_line: string
625941bb4527f215b584c306
def info(self): <NEW_LINE> <INDENT> return self.__enroll_repository.info()
:return: a copy of the enrolled dictionary
625941bb460517430c394039
def test_add_strings(self): <NEW_LINE> <INDENT> result = tools.sum('abc', 'def') <NEW_LINE> self.assertEqual(result, 'abcdef')
Test the addition of two strings returns the two strings as one concatenated string
625941bb10dbd63aa1bd2a5a
def parse_version(package): <NEW_LINE> <INDENT> from os.path import dirname, join <NEW_LINE> import ast <NEW_LINE> init_fpath = join(dirname(__file__), package, '__init__.py') <NEW_LINE> with open(init_fpath) as file_: <NEW_LINE> <INDENT> sourcecode = file_.read() <NEW_LINE> <DEDENT> pt = ast.parse(sourcecode) <NEW_LINE> class VersionVisitor(ast.NodeVisitor): <NEW_LINE> <INDENT> def visit_Assign(self, node): <NEW_LINE> <INDENT> for target in node.targets: <NEW_LINE> <INDENT> if target.id == '__version__': <NEW_LINE> <INDENT> self.version = node.value.s <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> visitor = VersionVisitor() <NEW_LINE> visitor.visit(pt) <NEW_LINE> return visitor.version
Statically parse the version number from __init__.py CommandLine: python -c "import setup; print(setup.parse_version('danesfield'))"
625941bb3539df3088e2e1f7
def _get_object_inverse(self, inverted_property): <NEW_LINE> <INDENT> inverse = ET.Element('ObjectInverseOf') <NEW_LINE> inverse.append(self.properties[inverted_property]) <NEW_LINE> return inverse
Utility method which returns an ObjectInverseOf property for use in declarations. Assumes that the class name provided has already been created. :param lst(str) inverted_class, class to obtain the inverse of
625941bbcc0a2c11143dcd44
def highlightWithWebIDL(text, el): <NEW_LINE> <INDENT> from widlparser import parser <NEW_LINE> class IDLUI: <NEW_LINE> <INDENT> def warn(self, msg): <NEW_LINE> <INDENT> m.die(msg.rstrip()) <NEW_LINE> <DEDENT> <DEDENT> class HighlightMarker: <NEW_LINE> <INDENT> def markup_type_name(self, text, construct): <NEW_LINE> <INDENT> return ("\1n\2", "\3") <NEW_LINE> <DEDENT> def markup_name(self, text, construct): <NEW_LINE> <INDENT> return ("\1g\2", "\3") <NEW_LINE> <DEDENT> def markup_keyword(self, text, construct): <NEW_LINE> <INDENT> return ("\1b\2", "\3") <NEW_LINE> <DEDENT> def markup_enum_value(self, text, construct): <NEW_LINE> <INDENT> return ("\1s\2", "\3") <NEW_LINE> <DEDENT> <DEDENT> if "\1" in text or "\2" in text or "\3" in text: <NEW_LINE> <INDENT> m.die( "WebIDL text contains some U+0001-0003 characters, which are used by the highlighter. This block can't be highlighted. :(", el=el, ) <NEW_LINE> return <NEW_LINE> <DEDENT> widl = parser.Parser(text, IDLUI()) <NEW_LINE> return coloredTextFromWidlStack(str(widl.markup(HighlightMarker())))
Trick the widlparser emitter, which wants to output HTML via wrapping with start/end tags, into instead outputting a stack-based text format. A  indicates a new stack push; the text between the  and the  is the attr to be pushed. A  indicates a stack pop. All other text is colored with the attr currently on top of the stack.
625941bb57b8e32f5248334c
@WithContextSkip.conditionalcontextmanager <NEW_LINE> def only_thread(thread_name): <NEW_LINE> <INDENT> if thread_name == threading.current_thread().name: <NEW_LINE> <INDENT> yield <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise WithContextSkip.SkipStatement()
Runs the controlled block only if the current thread has the given name - otherwise skips it
625941bb4e696a04525c92f8
def argetScstar(self): <NEW_LINE> <INDENT> return _core.CGPkronSumCache_argetScstar(self)
argetScstar(CGPkronSumCache self) Parameters ---------- self: limix::CGPkronSumCache *
625941bbd268445f265b4d1a
def _handle_put(gcs_stub, filename, param_dict, headers, payload): <NEW_LINE> <INDENT> token = _get_param('upload_id', param_dict) <NEW_LINE> content_range = _ContentRange(headers) <NEW_LINE> if not content_range.value: <NEW_LINE> <INDENT> raise ValueError('Missing header content-range.') <NEW_LINE> <DEDENT> gcs_stub.put_continue_creation(token, payload, content_range.range, content_range.last) <NEW_LINE> if content_range.last: <NEW_LINE> <INDENT> filestat = gcs_stub.head_object(filename) <NEW_LINE> response_headers = { 'content-length': filestat.st_size, } <NEW_LINE> response_status = httplib.OK <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response_headers = {} <NEW_LINE> response_status = 308 <NEW_LINE> <DEDENT> return _FakeUrlFetchResult(response_status, response_headers, '')
Handle PUT that continues object creation.
625941bb4d74a7450ccd406f
def check_zmq_connection(init: bool = False, error_logged: bool = False) -> None: <NEW_LINE> <INDENT> global zmq_alive <NEW_LINE> if init: <NEW_LINE> <INDENT> if not zmq_alive: <NEW_LINE> <INDENT> logger.error("Cannot connect to MISP's ZMQ notification channel! The module will be stopped!") <NEW_LINE> sys.exit(2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info("Connection to MISP's ZMQ notification channel works!") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not zmq_alive and not error_logged: <NEW_LINE> <INDENT> logger.error("Cannot connect to MISP's ZMQ notification channel!") <NEW_LINE> error_logged = True <NEW_LINE> <DEDENT> elif zmq_alive and error_logged: <NEW_LINE> <INDENT> logger.error("Connection to MISP's ZMQ notification channel works!") <NEW_LINE> error_logged = False <NEW_LINE> <DEDENT> <DEDENT> zmq_alive = False <NEW_LINE> zmq_availability_timer = threading.Timer(15, check_zmq_connection, (False, error_logged)) <NEW_LINE> zmq_availability_timer.start()
Every 15 seconds the Timer is set to check if some notification from ZMQ channel was received, because every 10 seconds should arrive at least one keep-alive message. If it does not arrive, something is wrong, so log an error (or exit the program if the first connection does not work). :param init: set to True, when the Timer and ZMQ channel is initialized, to exit program when connection is not successful :param error_logged: flag indicating, whether connection error has been logged into log file or not to prevent flooding of the log :return: None
625941bb236d856c2ad44689
def count_files(path): <NEW_LINE> <INDENT> path = path.replace('"', '') <NEW_LINE> if not os.path.isdir(path): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return sum(count_files(i) for i in paths_in_dir(path))
count the number of files in a directory
625941bb8e71fb1e9831d659
def migrate_from_1_1_14(self, globals_dict): <NEW_LINE> <INDENT> aids_Aid = resolve_model("aids.Aid") <NEW_LINE> def create_aids_aid(id, start_date, end_date, decided_date, board_id, client_id, aid_regime, aid_type_id, category_id, amount): <NEW_LINE> <INDENT> kw = dict() <NEW_LINE> kw.update(id=id) <NEW_LINE> kw.update(start_date=start_date) <NEW_LINE> kw.update(end_date=end_date) <NEW_LINE> kw.update(client_id=client_id) <NEW_LINE> kw.update(aid_regime=aid_regime) <NEW_LINE> kw.update(aid_type_id=aid_type_id) <NEW_LINE> kw.update(category_id=category_id) <NEW_LINE> if amount is not None: amount = Decimal(amount) <NEW_LINE> kw.update(amount=amount) <NEW_LINE> return aids_Aid(**kw) <NEW_LINE> <DEDENT> globals_dict.update(create_aids_aid=create_aids_aid) <NEW_LINE> return '1.1.15'
aids.Aid: removed fields `board` and `date_decided`
625941bb7c178a314d6ef305
def cosine_sim(u, v): <NEW_LINE> <INDENT> return np.dot(u,v) / (norm(u)*norm(v))
Computes the cosine similarity between two vectors u and v. :param u: Numpy ndarray, the vector u. :param v: Numpy ndarray, the vector v. :return: Float between 0 and 1, the cosine similarity score between the vector u and v.
625941bb5510c4643540f2a0
def _single_sample_adjust(self, node_in: Node, node_out: Node, out_value: float): <NEW_LINE> <INDENT> one_hot_input = np.zeros((1, self.N)) <NEW_LINE> one_hot_input[(0, self.nodes_ordered.index(node_in))] = 1 <NEW_LINE> weight_hot_output = np.zeros((1, self.N)) <NEW_LINE> weight_hot_output[(0, self.nodes_ordered.index(node_out))] = out_value <NEW_LINE> self.neural_network.update_input_output(one_hot_input, weight_hot_output) <NEW_LINE> self.neural_network.loop()
Helper function for adjust_network. Given a training sample that can be represented as (Node1, Node2, weight) the input is one-hot vector for Node1 and expected output is one-hot vector for Node2 multiplied by weight. :param node_in: node onto which the window spans. :param node_out: node in context of node_in. :param out_value: importance of node_out for node_in.
625941bb8a349b6b435e8020
def test_cosine_similarity(): <NEW_LINE> <INDENT> rng = np.random.RandomState(0) <NEW_LINE> X = rng.random_sample((5, 4)) <NEW_LINE> Y = rng.random_sample((3, 4)) <NEW_LINE> Xcsr = csr_matrix(X) <NEW_LINE> Ycsr = csr_matrix(Y) <NEW_LINE> for X_, Y_ in ((X, None), (X, Y), (Xcsr, None), (Xcsr, Ycsr)): <NEW_LINE> <INDENT> K1 = pairwise_kernels(X_, Y=Y_, metric="cosine") <NEW_LINE> X_ = normalize(X_) <NEW_LINE> if Y_ is not None: <NEW_LINE> <INDENT> Y_ = normalize(Y_) <NEW_LINE> <DEDENT> K2 = pairwise_kernels(X_, Y=Y_, metric="linear") <NEW_LINE> assert_array_almost_equal(K1, K2)
Test the cosine_similarity.
625941bb6fece00bbac2d5e7
def _ehExport(self): <NEW_LINE> <INDENT> selection = self._listbox.curselection() <NEW_LINE> if(selection == ()): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> items = [] <NEW_LINE> for s in selection: <NEW_LINE> <INDENT> items.append(self._contents[s]) <NEW_LINE> <DEDENT> packfile.export(items, self._wdir)
Export the currently selected items
625941bb3d592f4c4ed1cf2a
def get_all_passwords(self): <NEW_LINE> <INDENT> endpoint = "{}/services/storage/passwords".format(self._splunkd_uri) <NEW_LINE> response, content = rest.splunkd_request( endpoint, self._session_key, method="GET") <NEW_LINE> if response and response.status in (200, 201) and content: <NEW_LINE> <INDENT> return xdp.parse_conf_xml_dom(content) <NEW_LINE> <DEDENT> raise CredException("Failed to get credentials")
:return: a list of dict when successful, None when failed. the dict at least contains { "realm": xxx, "username": yyy, "clear_password": zzz, }
625941bb31939e2706e4cd1b
def test_pydata_tuple(self): <NEW_LINE> <INDENT> intype = (1, 'two', False) <NEW_LINE> graph = read_pydata(intype) <NEW_LINE> self.assertTupleEqual(intype, write_pydata(graph))
Test import/export of plain python tuple
625941bb046cf37aa974cbf6
def show_line( self, in_line ): <NEW_LINE> <INDENT> aa_list = [ 'ALA', 'CYS', 'ASP', 'GLU', 'PHE', 'GLY', 'HIS', 'ILE', 'LYS', 'LEU', 'MET', 'PRO', 'ARG', 'GLN', 'ASN', 'SER', 'THR', 'TRP', 'TYR', 'VAL' ] <NEW_LINE> water_list = [ 'HOH', 'WAT', 'H2O', 'TP3', 'TP5' ] <NEW_LINE> if not self.noatom and in_line[0:4] == 'ATOM': <NEW_LINE> <INDENT> if self.onlystandard and not in_line[17:20] in aa_list: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return in_line <NEW_LINE> <DEDENT> <DEDENT> elif self.hetatm and in_line[0:6] == 'HETATM': <NEW_LINE> <INDENT> return in_line <NEW_LINE> <DEDENT> elif self.ter and in_line[0:3] == 'TER': <NEW_LINE> <INDENT> return in_line <NEW_LINE> <DEDENT> elif self.end and in_line[0:3] == 'END': <NEW_LINE> <INDENT> return in_line <NEW_LINE> <DEDENT> elif self.model and ( in_line[0:5] == 'MODEL' or in_line[0:6] == 'ENDMDL' ): <NEW_LINE> <INDENT> return in_line <NEW_LINE> <DEDENT> elif self.water and in_line[17:20] in water_list: <NEW_LINE> <INDENT> return in_line <NEW_LINE> <DEDENT> elif self.x00 and in_line[17:20] == 'X00': <NEW_LINE> <INDENT> return in_line <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
Find and return matches according to input option flags.
625941bbd8ef3951e32433e9
def press_forced_error(self): <NEW_LINE> <INDENT> self.detail_context = 'forced_error' <NEW_LINE> self.update_scoreboard(self.winner, self.looser, self.match) <NEW_LINE> self.ids.game_manager.current = 'service'
Called when there is a forced error
625941bb45492302aab5e16c
def _health_k8s_readyz(self) -> Health: <NEW_LINE> <INDENT> health = Health(source=self._instance_id) <NEW_LINE> try: <NEW_LINE> <INDENT> if self.readyz(): <NEW_LINE> <INDENT> health.healthy("KubeAPI: readyz reports ready") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> health.warning("KubeAPI: readyz reports NOT ready.") <NEW_LINE> <DEDENT> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> health.error(f"Could not retrieve readyz: {err}") <NEW_LINE> <DEDENT> return health
Check if kubernetes thinks the pod is healthy.
625941bb656771135c3eb71e
def compressive_distr(dens, meandens, sigma, offset=1.5, sigma2=None, secondscale=0.8, rescale=True): <NEW_LINE> <INDENT> if sigma2 is None: sigma2 = sigma <NEW_LINE> distr = np.exp(-((dens-meandens)*ln10)**2/(2.*sigma**2)) + np.exp(-((dens-(meandens+offset/ln10))*ln10)**2/(2.*(sigma2)**2))*secondscale <NEW_LINE> if rescale: <NEW_LINE> <INDENT> distr_mean = (dens*distr).sum()/distr.sum() <NEW_LINE> delta = distr_mean-meandens <NEW_LINE> return compressive_distr(meandens-delta,sigma,offset=offset,sigma2=sigma2,dens=dens,secondscale=secondscale,rescale=False) <NEW_LINE> <DEDENT> return distr/distr.sum()
two lognormals stuck together offset is in ln units (log-base-e) For mach3, secondscale = 0.8, offset = 1.5 for Mach 10, see federrath_mach10_rescaled_massweighted_fitted: offset = 1.9 secondscale = 1.2 sigma2 = 0.61 sigma
625941bb956e5f7376d70d24
def boot_linux(self, rootfs=None, bootargs=""): <NEW_LINE> <INDENT> common.print_bold("\n===== Booting linux for %s =====" % self.model) <NEW_LINE> self.sendline('fdt addr $fdt_addr') <NEW_LINE> self.expect(self.uprompt) <NEW_LINE> self.sendline('fdt get value bcm_bootargs /chosen bootargs') <NEW_LINE> self.expect(self.uprompt) <NEW_LINE> self.sendline('setenv bootargs "$bcm_bootargs %s"' % bootargs) <NEW_LINE> self.expect(self.uprompt) <NEW_LINE> self.sendline( "setenv bootcmd 'fatload mmc 0 ${kernel_addr_r} %s; bootm ${kernel_addr_r} - ${fdt_addr}; booti ${kernel_addr_r} - ${fdt_addr}'" % getattr(self, 'kernel_file', 'uImage')) <NEW_LINE> self.expect(self.uprompt) <NEW_LINE> self.sendline('saveenv') <NEW_LINE> self.expect(self.uprompt) <NEW_LINE> self.sendline('boot') <NEW_LINE> self.delaybetweenchar = None
This method boots the RPi's OS. :param rootfs: Indicates the rootsfs image path if needs to be loaded (parameter to be used at later point), defaults to None. :type rootfs: NA :param bootargs: Indicates the boot parameters to be specified if any (parameter to be used at later point), defaults to empty string "". :type bootargs: string
625941bb24f1403a92600a15
def validate_schema_version(configs): <NEW_LINE> <INDENT> migrations_version = run_script('get_db_version.py', configs=configs) <NEW_LINE> db_version = run_psql_command( 'SELECT version_num FROM alembic_version', 'cloudify_db_name', logger, ) <NEW_LINE> migrations_version = migrations_version.strip() <NEW_LINE> db_version = db_version.strip() <NEW_LINE> if migrations_version != db_version: <NEW_LINE> <INDENT> raise ValidationError( 'Database schema version mismatch: this manager expects schema ' 'revision {0} but the database is {1})' .format(migrations_version, db_version))
Check that the database schema version is the same as the current manager's migrations version.
625941bb63b5f9789fde6f91
def build_rnn_blocks(self): <NEW_LINE> <INDENT> with tf.variable_scope("Encoder"): <NEW_LINE> <INDENT> for i in range(len(self._gru_fms)): <NEW_LINE> <INDENT> self.rnn_blocks.append(ConvGRUCell(num_filter=self._gru_filter[i], b_h_w=(self._batch, self._gru_fms[i], self._gru_fms[i]), h2h_kernel=self._h2h_kernel[i], i2h_kernel=self._i2h_kernel[i], name="e_cgru_" + str(i), chanel=self._gru_in_chanel[i]))
first rnn changes input chanels input (b, 180, 180, 8) output (b, 180, 180, 64) so set the chanel parameter to define gru i2h. other rnn cells keep the input chanel. :return:
625941bb187af65679ca4fc9
def adapt_keys(self): <NEW_LINE> <INDENT> return list(self.map.keys())
Returns a list containing any keys (names) defined by this adapter.
625941bb442bda511e8be2d1
def __str__(self): <NEW_LINE> <INDENT> if self.prettyprint: <NEW_LINE> <INDENT> self.indent(self.xml) <NEW_LINE> <DEDENT> return etree.tostring(self.xml, encoding=self.encoding)
Return the XML as string.
625941bb187af65679ca4fca
def imshow(self, var_name, ax=None, *args, **kwargs): <NEW_LINE> <INDENT> t = kwargs.pop('t', None) <NEW_LINE> mask_var = kwargs.pop('mask_var', None) <NEW_LINE> mask_thold = kwargs.pop('mask_thold', None) <NEW_LINE> try: <NEW_LINE> <INDENT> title = self.data.variables[var_name].long_name <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> title = var_name <NEW_LINE> <DEDENT> title = kwargs.pop('title', title) <NEW_LINE> allow_colorbar = kwargs.pop('allow_colorbar', False) <NEW_LINE> ax = self._get_axes(ax) <NEW_LINE> ax.set_aspect(aspect='equal', adjustable='box-forced') <NEW_LINE> xx, yy = np.meshgrid(self.x, self.y) <NEW_LINE> z = self.get_masked_data(var_name, t=t, mask_var=mask_var, mask_thold=mask_thold) <NEW_LINE> im = ax.imshow(z, cmap=kwargs.pop('cmap', default_cmaps.get(var_name)), origin=kwargs.pop('origin', 'lower'), extent=kwargs.pop('extent', [self.node_x.min(), self.node_x.max(), self.node_y.min(), self.node_y.max()]), **kwargs) <NEW_LINE> ax.set_xlabel('X (km)') <NEW_LINE> ax.set_ylabel('Y (km)') <NEW_LINE> if title is not None: <NEW_LINE> <INDENT> ax.set_title(title) <NEW_LINE> <DEDENT> if allow_colorbar: <NEW_LINE> <INDENT> cbar = plt.colorbar(im, ax=ax, orientation='horizontal', shrink=0.6) <NEW_LINE> try: <NEW_LINE> <INDENT> cbar.set_label(self.data.variables[var_name].units) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return im
Plot mapview of PISM data Parameters ---------- var_name : str Variable name to plot ax : matplotlib axes Axes where data are potted t : float, optional When the PISM data contains time dimension, t is required mask_var : str, optional Variable to create mask mask_thold : float, optional Variable threshold to create mask title : str, optional Title, default is long_name of the variable, use var_name if long_name does not exist, set None to disable allow_colorbar : bool, optional If ture, show colorbar, default False For other parameters, see matplotlib doc
625941bb851cf427c661a3be
def generate_select_direct_fields(*args): <NEW_LINE> <INDENT> multi_field_names = set(args) <NEW_LINE> def select_direct_fields(querydict, queryset, **kwargs): <NEW_LINE> <INDENT> field_names = set( field.name for field in queryset.model._meta.get_fields() ) <NEW_LINE> for key, value in querydict.items(): <NEW_LINE> <INDENT> if key in multi_field_names: <NEW_LINE> <INDENT> filter_value = { key+'__in': querydict.getlist(key) } <NEW_LINE> queryset = queryset.filter(**filter_value) <NEW_LINE> <DEDENT> elif key in field_names: <NEW_LINE> <INDENT> queryset = queryset.filter(**{ key:value }) <NEW_LINE> <DEDENT> <DEDENT> return queryset <NEW_LINE> <DEDENT> return select_direct_fields
Simple filtering on equality and inclusion of fields Any given field that is included in the class's multi_field_names is tested against any of potentially multiple arguments given in the request. Any other (existing) field is tested for equality with the given value.
625941bb498bea3a759b995d
def nativeDeleteSelected(self): <NEW_LINE> <INDENT> gview = self.activeView() <NEW_LINE> if gview: <NEW_LINE> <INDENT> gview.deleteSelected()
TOWRITE
625941bb26068e7796caeb85
def _msg_type(self, value): <NEW_LINE> <INDENT> if type(value) is str: <NEW_LINE> <INDENT> return ord(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value
Convert message type from str to int, if necessary.
625941bb2ae34c7f2600cfde
def _create_from_serialized(states, initial_state, alphabet, transitions, state_groupings, validate=True, **attributes): <NEW_LINE> <INDENT> states = tuple(states) <NEW_LINE> if type(alphabet) is not Alphabet: <NEW_LINE> <INDENT> alphabet = Alphabet(*alphabet) <NEW_LINE> <DEDENT> transitions = tuple(map(lambda edge: Transition(*edge), transitions)) <NEW_LINE> partitionings = tuple(map(lambda grouping: Partitioning(*[Observation(*group) for group in grouping]), state_groupings)) <NEW_LINE> return MultiplayerGame(states, initial_state, alphabet, transitions, partitionings, False, validate, **attributes)
Create a new game from serialized data and validate it
625941bb07d97122c4178738
def load_model(fname='iasp91'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return _MODEL_CACHE[fname] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> fname_key = fname <NEW_LINE> if fname == 'iasp91': <NEW_LINE> <INDENT> fname = resource_filename('rf', 'data/iasp91.dat') <NEW_LINE> <DEDENT> values = np.loadtxt(fname, unpack=True) <NEW_LINE> try: <NEW_LINE> <INDENT> z, vp, vs, n = values <NEW_LINE> n.astype(int) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> n = None <NEW_LINE> z, vp, vs = values <NEW_LINE> <DEDENT> _MODEL_CACHE[fname_key] = model = SimpleModel(z, vp, vs, n) <NEW_LINE> return model
Load model from file. :param fname: path to model file or 'iasp91' :return: `SimpleModel` instance The model file should have 4 columns with depth, vp, vs, n. The model file for iasp91 starts like this:: #IASP91 velocity model #depth vp vs n 0.00 5.800 3.360 0 0.00 5.800 3.360 0 10.00 5.800 3.360 4
625941bb9f2886367277a73d
def GetFixedImageMask(self): <NEW_LINE> <INDENT> return _itkImageToImageMetricPython.itkImageToImageMetricIUC3IUC3_GetFixedImageMask(self)
GetFixedImageMask(self) -> itkSpatialObject3
625941bba17c0f6771cbdf00
def fermat(number): <NEW_LINE> <INDENT> if isOdd(number): <NEW_LINE> <INDENT> a = math.floor(math.sqrt(number)) <NEW_LINE> bsq = a*a - number <NEW_LINE> while not isSquare(bsq): <NEW_LINE> <INDENT> a +=1 <NEW_LINE> bsq = a*a - number <NEW_LINE> <DEDENT> result = a - math.sqrt(bsq) <NEW_LINE> return int(result) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1
Returns one of the factors of number
625941bbde87d2750b85fc3b
def load_case_day_mapping(case_day_file): <NEW_LINE> <INDENT> patient_order = [] <NEW_LINE> patient_days = {} <NEW_LINE> patient_cuttimes = {} <NEW_LINE> loaded_list = load_list(case_day_file) <NEW_LINE> for idx in range(len(loaded_list)): <NEW_LINE> <INDENT> split_line = loaded_list[idx].split(',') <NEW_LINE> if len(split_line) == 3: <NEW_LINE> <INDENT> if split_line[0] not in patient_order: <NEW_LINE> <INDENT> patient_order.append(split_line[0]) <NEW_LINE> patient_days[split_line[0]] = [] <NEW_LINE> patient_cuttimes[split_line[0]] = [] <NEW_LINE> <DEDENT> patient_days[split_line[0]].append(split_line[1]) <NEW_LINE> patient_cuttimes[split_line[0]].append(float(split_line[2])) <NEW_LINE> <DEDENT> <DEDENT> return [patient_order, patient_days, patient_cuttimes]
Loads data from a file that contains each case day in the lab_739 dataset. That file was created with the one_time_scripts.py determine_case_times()
625941bb0fa83653e4656e69
def RunSshCmdWithStdin(cluster_name, node, basecmd, port, data, debug=False, verbose=False, use_cluster_key=False, ask_key=False, strict_host_check=False, ensure_version=False): <NEW_LINE> <INDENT> cmd = [basecmd] <NEW_LINE> if debug: <NEW_LINE> <INDENT> cmd.append("--debug") <NEW_LINE> <DEDENT> if verbose: <NEW_LINE> <INDENT> cmd.append("--verbose") <NEW_LINE> <DEDENT> if ensure_version: <NEW_LINE> <INDENT> all_cmds = _EnsureCorrectGanetiVersion(cmd) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> all_cmds = [cmd] <NEW_LINE> <DEDENT> if port is None: <NEW_LINE> <INDENT> port = netutils.GetDaemonPort(constants.SSH) <NEW_LINE> <DEDENT> srun = SshRunner(cluster_name) <NEW_LINE> scmd = srun.BuildCmd(node, constants.SSH_LOGIN_USER, utils.ShellQuoteArgs( utils.ShellCombineCommands(all_cmds)), batch=False, ask_key=ask_key, quiet=False, strict_host_check=strict_host_check, use_cluster_key=use_cluster_key, port=port) <NEW_LINE> tempfh = tempfile.TemporaryFile() <NEW_LINE> try: <NEW_LINE> <INDENT> tempfh.write(serializer.DumpJson(data)) <NEW_LINE> tempfh.seek(0) <NEW_LINE> result = utils.RunCmd(scmd, interactive=True, input_fd=tempfh) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> tempfh.close() <NEW_LINE> <DEDENT> if result.failed: <NEW_LINE> <INDENT> raise errors.OpExecError("Command '%s' failed: %s" % (result.cmd, result.fail_reason))
Runs a command on a remote machine via SSH and provides input in stdin. @type cluster_name: string @param cluster_name: Cluster name @type node: string @param node: Node name @type basecmd: string @param basecmd: Base command (path on the remote machine) @type port: int @param port: The SSH port of the remote machine or None for the default @param data: JSON-serializable input data for script (passed to stdin) @type debug: bool @param debug: Enable debug output @type verbose: bool @param verbose: Enable verbose output @type use_cluster_key: bool @param use_cluster_key: See L{ssh.SshRunner.BuildCmd} @type ask_key: bool @param ask_key: See L{ssh.SshRunner.BuildCmd} @type strict_host_check: bool @param strict_host_check: See L{ssh.SshRunner.BuildCmd}
625941bb63f4b57ef0000fce
def get_meta_content(self, doc, meta_name): <NEW_LINE> <INDENT> meta = self.parser.css_select(doc, meta_name) <NEW_LINE> if meta: <NEW_LINE> <INDENT> return meta[0].xpath + '/@content' <NEW_LINE> <DEDENT> return ''
Extract a given meta content form document. Example metaNames: "meta[name=description]" "meta[name=keywords]" "meta[property=og:type]"
625941bbfff4ab517eb2f2e6
def get_template_prefix(self, multiple_subs=False, multiple_objs=False): <NEW_LINE> <INDENT> SINGULAR_KEY = 'sing' <NEW_LINE> PLURAL_KEY = 'plur' <NEW_LINE> pfx = self.slug <NEW_LINE> sub = 'sub_%s' % (PLURAL_KEY if multiple_subs else SINGULAR_KEY) <NEW_LINE> obj = 'obj_%s' % (PLURAL_KEY if multiple_objs else SINGULAR_KEY) <NEW_LINE> return 'notification/%s/%s_%s/' % (pfx, sub, obj)
Return template name prefix based on plurality of subject and objects.
625941bb0a366e3fb873e6c4
def down_sweep(self): <NEW_LINE> <INDENT> x = self.down <NEW_LINE> while x != self: <NEW_LINE> <INDENT> yield x <NEW_LINE> x = x.down <NEW_LINE> <DEDENT> return
Does a down sweep over nodes in the doubly linked list.
625941bb26068e7796caeb86
def get_bucket_with_http_info(self, bucket_id, storage_account_id, **kwargs): <NEW_LINE> <INDENT> all_params = ['bucket_id', 'storage_account_id'] <NEW_LINE> all_params.append('async_req') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in six.iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_bucket" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('bucket_id' not in params or params['bucket_id'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `bucket_id` when calling `get_bucket`") <NEW_LINE> <DEDENT> if ('storage_account_id' not in params or params['storage_account_id'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `storage_account_id` when calling `get_bucket`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> path_params = {} <NEW_LINE> if 'bucket_id' in params: <NEW_LINE> <INDENT> path_params['bucket-id'] = params['bucket_id'] <NEW_LINE> <DEDENT> if 'storage_account_id' in params: <NEW_LINE> <INDENT> path_params['storage-account-id'] = params['storage_account_id'] <NEW_LINE> <DEDENT> query_params = [] <NEW_LINE> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> header_params['Accept'] = self.api_client.select_header_accept( ['application/json;charset=UTF-8']) <NEW_LINE> auth_settings = ['Bearer'] <NEW_LINE> return self.api_client.call_api( '/backend/rest/storage-accounts/{storage-account-id}/buckets/{bucket-id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Bucket', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Get detailed stats for the bucket # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_bucket_with_http_info(bucket_id, storage_account_id, async_req=True) >>> result = thread.get() :param async_req bool :param int bucket_id: bucket-id (required) :param int storage_account_id: storage-account-id (required) :return: Bucket If the method is called asynchronously, returns the request thread.
625941bba934411ee3751547
def configure(self, query: str, option: str, *, autoupdate=False): <NEW_LINE> <INDENT> self._close_ad() <NEW_LINE> query = query.lower() <NEW_LINE> if query in self.__selections: <NEW_LINE> <INDENT> self.__selections[query].configure(self.page, option) <NEW_LINE> <DEDENT> elif query in self.__dropdowns: <NEW_LINE> <INDENT> self.__dropdowns[query].configure(self.page, option) <NEW_LINE> <DEDENT> elif query in self.__splits: <NEW_LINE> <INDENT> self.__splits[query].configure(self.page, option) <NEW_LINE> <DEDENT> elif query in self.__switches: <NEW_LINE> <INDENT> options = [o.lower() for o in self.list_options(query)] <NEW_LINE> if option.lower() not in options: <NEW_LINE> <INDENT> raise fangraphs.exceptions.InvalidFilterOption(option) <NEW_LINE> <DEDENT> if option != self.current_option(query)[0].title(): <NEW_LINE> <INDENT> self.page.click(self.__switches[query]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise fangraphs.exceptions.InvalidFilterQuery(query) <NEW_LINE> <DEDENT> if autoupdate: <NEW_LINE> <INDENT> self.update() <NEW_LINE> <DEDENT> self._refresh_parser()
Configures a filter query to a specified option. :param query: The filter query to be configured :param option: The option to set the filter query to :param autoupdate: If ``True``, :py:meth:`update` will be called following configuration :raises FanGraphs.exceptions.InvalidFilterQuery: Invalid argument ``query``
625941bb925a0f43d2549d20
def _calculate_distances(self, new_point): <NEW_LINE> <INDENT> new_point = np.resize(new_point, (self.n, new_point.shape[0])) <NEW_LINE> euclidean_distance = np.sum((self.data[:,0:-1] - new_point)**2, axis=1) <NEW_LINE> return euclidean_distance
Calculate the euclidean distance between a new point and all points in self.data Parameters ---------- new_points : (numpy array) An array of containing the new point. predict. Returns ------- euclidean_distance : (numpy array) An array containing the distances between all the new point and all points in the model.
625941bb99fddb7c1c9de23f
def _get(self, request, params=dict(), limit=None): <NEW_LINE> <INDENT> responses = list() <NEW_LINE> response = self._get_request(request, params, limit) <NEW_LINE> responses.append(response) <NEW_LINE> payload = response.json() <NEW_LINE> if 'paging' in payload.keys(): <NEW_LINE> <INDENT> pages = int(payload['paging']['pages']) <NEW_LINE> limit = int(payload['paging']['limit']) <NEW_LINE> for i in range(1, pages, 1): <NEW_LINE> <INDENT> params['offset'] = str(int(i) * limit) <NEW_LINE> response_page = self._get_request(request, params, limit) <NEW_LINE> responses.append(response_page) <NEW_LINE> <DEDENT> <DEDENT> return responses
GET Operation that supports paging for FMC REST API. In case of authentication issues session will be refreshed :param request: URL of request that should be performed :param params: dict of parameters for http request :param limit: set custom limit for paging. If not set, api will default to 25 :return: list of requests.Response objects
625941bb7047854f462a12b9
def test_lists_deep_comparison_should_be_unequal(self): <NEW_LINE> <INDENT> actual = [{'key': Decimal(5.5)}, {'key': Decimal(5.4)}] <NEW_LINE> expected = [{'key': Decimal(5.4)}, {'key': Decimal(5.5)}] <NEW_LINE> self.assertion._builtin.should_be_equal = mock.Mock(side_effect=AssertionError()) <NEW_LINE> with self.assertRaises(AssertionError) as context: <NEW_LINE> <INDENT> self.assertion.lists_deep_compare_should_be_equal(actual, expected, 'key') <NEW_LINE> <DEDENT> self.assertEqual(context.exception.message, '')
Lists deep comparison should compare unequally.
625941bbb7558d58953c4dc7
def _push_assign_event(self, event): <NEW_LINE> <INDENT> context = self._stack[-1] <NEW_LINE> value, value_event = event.value, event.value_event <NEW_LINE> if isinstance(event.name, six.string_types): <NEW_LINE> <INDENT> value_id = self.object_tracker.get_id(value) <NEW_LINE> if value_id and value_id in context.output_table: <NEW_LINE> <INDENT> source = context.output_table[value_id] <NEW_LINE> <DEDENT> elif value_event and value_event in context.event_table: <NEW_LINE> <INDENT> source = context.event_table[value_event] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> source = None <NEW_LINE> <DEDENT> if source is not None: <NEW_LINE> <INDENT> context.variable_table[event.name] = source <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i, name in enumerate(event.name): <NEW_LINE> <INDENT> value_id = self.object_tracker.get_id(value[i]) <NEW_LINE> if value_id and value_id in context.output_table: <NEW_LINE> <INDENT> source = context.output_table[value_id] <NEW_LINE> <DEDENT> elif value_event and value_event in context.event_table: <NEW_LINE> <INDENT> src, src_port = context.event_table[value_event] <NEW_LINE> source = (src, src_port + '.' + str(i)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> source = None <NEW_LINE> <DEDENT> if source is not None: <NEW_LINE> <INDENT> context.variable_table[name] = source
Update variable table for variable assign event.
625941bbd164cc6175782bfb
def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> comment = Comment.objects.get(pk=kwargs['pk']) <NEW_LINE> <DEDENT> except Comment.DoesNotExist: <NEW_LINE> <INDENT> return Response( {"message": "Comment not found"}, status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> self.queryset = CommentHistory.objects.filter(parent_comment=comment) <NEW_LINE> return generics.ListAPIView.list(self, request, *args, **kwargs)
Overrides the default GET request from ListAPIView Returns all comment edits for a particular comment :param request: :param args: :param kwargs: :return: HTTP Code 200 :return: Response #
625941bb50812a4eaa59c1d1
def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.kwargs = kwargs <NEW_LINE> self.request = request <NEW_LINE> if self.owner_object: <NEW_LINE> <INDENT> owner_object = self.owner_object['class'].objects.get( pk=kwargs[self.owner_object['pk']] ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> owner_object = self.get_object().get_owner_object() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> owner_object = False <NEW_LINE> <DEDENT> <DEDENT> if owner_object and owner_object.user != self.request.user: <NEW_LINE> <INDENT> return HttpResponseForbidden('You are not allowed to access this object') <NEW_LINE> <DEDENT> return super(WgerFormMixin, self).dispatch(request, *args, **kwargs)
Custom dispatch method. This basically only checks for ownerships of editable/deletable objects and return a HttpResponseForbidden response if the user is not the owner.
625941bbb57a9660fec3372d
def write_templates(self): <NEW_LINE> <INDENT> for name, content in self.site.templates.items(): <NEW_LINE> <INDENT> if name.startswith("_"): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> path = os.path.join(self.base_path, os.path.dirname(name)) <NEW_LINE> create_path(path) <NEW_LINE> path = os.path.join(self.base_path, name) <NEW_LINE> write(content, path)
write template files which are not prefixed with _
625941bb3eb6a72ae02ec381
def lineWrapColumnOrWidth(self): <NEW_LINE> <INDENT> return 0
lineWrapColumnOrWidth(self) -> int
625941bb76e4537e8c351525
def last_location(self): <NEW_LINE> <INDENT> return self.event_set.order_by("-date").first().location
Returns the location recorded in the most recent event
625941bbe8904600ed9f1dd6
def sem_duplicatas(lista: list) -> list: <NEW_LINE> <INDENT> return list(set(lista))
Retira as duplicatas da lista com a função set() :param lista: entrada :type lista: list :return: lista sem duplicatas :rtype: list
625941bb6fb2d068a760ef47
def areStrictlyCompatibleForMul(self, *args): <NEW_LINE> <INDENT> return _MEDCoupling.MEDCouplingTimeDiscretization_areStrictlyCompatibleForMul(self, *args)
areStrictlyCompatibleForMul(self, MEDCouplingTimeDiscretization other) -> bool 1
625941bbbe7bc26dc91cd4b2
def is_valid_channel_total_withdraw(channel_total_withdraw: TokenAmount) -> bool: <NEW_LINE> <INDENT> return channel_total_withdraw <= UINT256_MAX
Sanity check for the channel's total withdraw. The channel's total deposit is: p1.total_deposit + p2.total_deposit The channel's total withdraw is: p1.total_withdraw + p2.total_withdraw The smart contract forces: - The channel's total deposit to fit in a UINT256. - The channel's withdraw must be in the range [0,channel_total_deposit]. Because the `total_withdraw` must be in the range [0,channel_deposit], and the maximum value for channel_deposit is UINT256, the overflow below must never happen, otherwise there is a smart contract bug.
625941bb851cf427c661a3bf
def cwd(): <NEW_LINE> <INDENT> cwd = os.environ.get("BE_CWD") <NEW_LINE> if cwd and not os.path.isdir(cwd): <NEW_LINE> <INDENT> sys.stderr.write("ERROR: %s is not a directory" % cwd) <NEW_LINE> sys.exit(lib.USER_ERROR) <NEW_LINE> <DEDENT> return cwd or os.getcwd().replace("\\", "/")
Return the be current working directory
625941bb5fcc89381b1e1571
def __init__(self, ai_settings, screen): <NEW_LINE> <INDENT> self.screen = screen <NEW_LINE> self.ai_settings = ai_settings <NEW_LINE> self.image = pygame.image.load("./images/ship.bmp") <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.screen_rect = screen.get_rect() <NEW_LINE> self.moving_right = False <NEW_LINE> self.moving_left = False <NEW_LINE> self.rect.centerx = self.screen_rect.centerx <NEW_LINE> self.rect.bottom = self.screen_rect.bottom <NEW_LINE> self.center = float(self.rect.centerx)
initialize the ship and its position
625941bb30dc7b7665901817
def get_size(self) -> int: <NEW_LINE> <INDENT> return self._size
Return the size or strength of this part. :return: The size or strength of this part.
625941bb2c8b7c6e89b35670
def on_connection_open_error(self, _unused_connection, err): <NEW_LINE> <INDENT> reconnect_delay = self._get_reconnect_delay() <NEW_LINE> LOGGER.error('Connection open failed, reopening in %d seconds: %s', reconnect_delay, err) <NEW_LINE> self._connection.ioloop.call_later(reconnect_delay, self.reconnect)
This method is called by pika if the connection to RabbitMQ can't be established. :param pika.SelectConnection _unused_connection: The connection :param Exception err: The error
625941bb2eb69b55b151c758
def get_last_open_log(student): <NEW_LINE> <INDENT> logs = StudentSession.objects.filter(student=student, sign_out_timestamp=None) <NEW_LINE> if logs: <NEW_LINE> <INDENT> return logs.latest('sign_in_timestamp')
Get last StudentSession for Student if student isn't logged out.
625941bb5fc7496912cc3833
def checkpoint_now(s): <NEW_LINE> <INDENT> global CHECKPOINT <NEW_LINE> CHECKPOINT = osmt.copy(s)
Save current state
625941bbac7a0e7691ed3f86
def test_copyright(self): <NEW_LINE> <INDENT> self.assertEqual(self.aps._get_copyright(), ('authors', '2015', 'Published by the American Physical Society'))
Check that Copyright is extracted.
625941bbab23a570cc25002d
def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.player_hands = '' <NEW_LINE> self.player_values = '' <NEW_LINE> self.interface_init() <NEW_LINE> self.game.bot.loop.create_task( self.async_init() )
initializer
625941bb379a373c97cfa9f8
def shufflenet_v2_arg_scope(is_training=True, weight_decay=0.00004, stddev=0.09, regularize_depthwise=False): <NEW_LINE> <INDENT> batch_norm_params = { 'is_training': is_training, 'center': True, 'scale': True, 'decay': 0.9997, 'epsilon': 0.001, } <NEW_LINE> weights_init = slim.variance_scaling_initializer() <NEW_LINE> regularizer = tf.contrib.layers.l2_regularizer(weight_decay) <NEW_LINE> if regularize_depthwise: <NEW_LINE> <INDENT> depthwise_regularizer = regularizer <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> depthwise_regularizer = None <NEW_LINE> <DEDENT> with slim.arg_scope([slim.conv2d, slim.separable_conv2d], weights_initializer=weights_init, activation_fn=tf.nn.relu, normalizer_fn=slim.batch_norm): <NEW_LINE> <INDENT> with slim.arg_scope([slim.batch_norm], **batch_norm_params): <NEW_LINE> <INDENT> with slim.arg_scope([slim.conv2d], weights_regularizer=regularizer): <NEW_LINE> <INDENT> with slim.arg_scope([slim.separable_conv2d], weights_regularizer=depthwise_regularizer) as sc: <NEW_LINE> <INDENT> return sc
Defines the default ShufflenetV2 arg scope. Args: is_training: Whether or not we're training the model. weight_decay: The weight decay to use for regularizing the model. stddev: The standard deviation of the trunctated normal weight initializer. regularize_depthwise: Whether or not apply regularization on depthwise. Returns: An `arg_scope` to use for the mobilenet v1 model.
625941bb0383005118ecf492
def top_errors_days(): <NEW_LINE> <INDENT> days = "\r\n3. %s:\r\n" % questions[2] <NEW_LINE> result = get_result(queries[2]) <NEW_LINE> for day in result: <NEW_LINE> <INDENT> days += day[0] + ' - ' + str(day[1]) + '% errors\r\n' <NEW_LINE> <DEDENT> return days
Get days with error more than 1%
625941bbd6c5a10208143ef5
def test_screensaver_missing_command(self): <NEW_LINE> <INDENT> addon.settings['display_method'] = '2' <NEW_LINE> addon.settings['power_method'] = '2' <NEW_LINE> turnoff = screensaver.TurnOffDialog('gui.xml', screensaver.addon_path(), 'default') <NEW_LINE> with self.assertRaises(SystemExit) as init: <NEW_LINE> <INDENT> turnoff.onInit() <NEW_LINE> <DEDENT> self.assertEqual(init.exception.code, 2) <NEW_LINE> time.sleep(2) <NEW_LINE> with self.assertRaises(SystemExit) as resume: <NEW_LINE> <INDENT> turnoff.resume() <NEW_LINE> <DEDENT> self.assertEqual(resume.exception.code, 2)
Test enabling screensaver
625941bb2ae34c7f2600cfdf
def test_add_object_existing(self): <NEW_LINE> <INDENT> self.dbus_mock.AddObject('/obj1', 'org.freedesktop.Test.Sub', {}, []) <NEW_LINE> self.assertRaises(dbus.exceptions.DBusException, self.dbus_mock.AddObject, '/obj1', 'org.freedesktop.Test.Sub', {}, []) <NEW_LINE> self.assertRaises(dbus.exceptions.DBusException, self.dbus_mock.AddObject, '/', 'org.freedesktop.Test.Other', {}, [])
try to add an existing object
625941bbdc8b845886cb53e2
def fit(self, X, y, groups=None): <NEW_LINE> <INDENT> self.regr_ = [clone(x) for x in self.regressors] <NEW_LINE> self.meta_regr_ = clone(self.meta_regressor) <NEW_LINE> kfold = check_cv(self.cv, y) <NEW_LINE> if isinstance(self.cv, int): <NEW_LINE> <INDENT> kfold.shuffle = self.shuffle <NEW_LINE> <DEDENT> meta_features = np.zeros((X.shape[0], len(self.regressors))) <NEW_LINE> for i, regr in enumerate(self.regressors): <NEW_LINE> <INDENT> for train_idx, holdout_idx in kfold.split(X, y, groups): <NEW_LINE> <INDENT> instance = clone(regr) <NEW_LINE> instance.fit(X[train_idx], y[train_idx]) <NEW_LINE> y_pred = instance.predict(X[holdout_idx]) <NEW_LINE> meta_features[holdout_idx, i] = y_pred <NEW_LINE> <DEDENT> <DEDENT> if self.store_train_meta_features: <NEW_LINE> <INDENT> self.train_meta_features_ = meta_features <NEW_LINE> <DEDENT> if self.use_features_in_secondary: <NEW_LINE> <INDENT> self.meta_regr_.fit(np.hstack((X, meta_features)), y) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.meta_regr_.fit(meta_features, y) <NEW_LINE> <DEDENT> for regr in self.regr_: <NEW_LINE> <INDENT> regr.fit(X, y) <NEW_LINE> <DEDENT> return self
Fit ensemble regressors and the meta-regressor. Parameters ---------- X : numpy array, shape = [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. y : numpy array, shape = [n_samples] Target values. groups : numpy array/None, shape = [n_samples] The group that each sample belongs to. This is used by specific folding strategies such as GroupKFold() Returns ------- self : object
625941bb462c4b4f79d1d57e
def describe(node): <NEW_LINE> <INDENT> desc = "%s\n\n" % type(node).class_kind <NEW_LINE> if isinstance(node, compass_model.Array): <NEW_LINE> <INDENT> desc += "Shape\n%s\n\nType\n%s\n" % (node.shape, dtype_text(node.dtype)) <NEW_LINE> <DEDENT> elif isinstance(node, compass_model.Container): <NEW_LINE> <INDENT> desc += "%d items\n" % len(node) <NEW_LINE> <DEDENT> if not isinstance(node, compass_model.KeyValue): <NEW_LINE> <INDENT> handlers = node.store.gethandlers(node.key) <NEW_LINE> for h in handlers: <NEW_LINE> <INDENT> kv_node = h(node.store, node.key) <NEW_LINE> if isinstance(kv_node, compass_model.KeyValue): <NEW_LINE> <INDENT> num_keys = len(kv_node.keys) <NEW_LINE> if num_keys > 0: <NEW_LINE> <INDENT> desc += "\n%d %s\n" % (len(kv_node.keys), type(kv_node).class_kind) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return desc
Return a (possibly multi-line) text description of a node.
625941bb3cc13d1c6d3c7231
def solve_rotation_ap(u, v): <NEW_LINE> <INDENT> N = u.size <NEW_LINE> uv = np.stack([u, v], axis=1) <NEW_LINE> M = np.identity(N) <NEW_LINE> if uv[0, 0] < 0: <NEW_LINE> <INDENT> M[0, 0] = -1 <NEW_LINE> M[1, 1] = -1 <NEW_LINE> uv = M.dot(uv) <NEW_LINE> <DEDENT> for c in range(0, 2): <NEW_LINE> <INDENT> for r in range(N - 1, c, -1): <NEW_LINE> <INDENT> if uv[r, c] != 0: <NEW_LINE> <INDENT> theta = np.arctan2(uv[r, c], uv[r - 1, c]) <NEW_LINE> Mk = givens_rotation_matrix(r, r - 1, theta, N) <NEW_LINE> uv = Mk.dot(uv) <NEW_LINE> M = Mk.dot(M) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> theta = 2 * np.arctan2(uv[1, 1], uv[0, 1]) <NEW_LINE> logger.debug( "solve_rotation_ap: {d} degree rotation".format( d=180 * theta / np.pi)) <NEW_LINE> R = givens_rotation_matrix(0, 1, theta, N) <NEW_LINE> M_inverse = M.T <NEW_LINE> R = M_inverse.dot(R.dot(M)) <NEW_LINE> return R
Return the rotation matrix for the rotation in the plane defined by the vectors u and v across TWICE the angle between u and v. This algorithm uses the Aguilera-Perez Algorithm \cite{Aguilera} to generate the rotation matrix. The algorithm works basically as follows: Starting with the Nth component of u, rotate u towards the (N-1)th component until the Nth component is zero. Continue until u is parallel to the 0th basis vector. Next do the same with v until it only has none zero components in the first two dimensions. The result will be something like this: [[u0, 0, 0 ... 0], [v0, v1, 0 ... 0]] Now it is trivial to align u with v. Apply the inverse rotations to return to the original orientation. NOTE: The precision of this method is limited by sin, cos, and arctan functions.
625941bb1f5feb6acb0c4a02
def __init__( self, *, ip_address: Optional[str] = None, port: Optional[int] = 53, **kwargs ): <NEW_LINE> <INDENT> super(TargetDnsServer, self).__init__(**kwargs) <NEW_LINE> self.ip_address = ip_address <NEW_LINE> self.port = port
:keyword ip_address: DNS server IP address. :paramtype ip_address: str :keyword port: DNS server port. :paramtype port: int
625941bba219f33f34628822
def detect_pid_alive(pid): <NEW_LINE> <INDENT> proc = psutil.Process(pid) <NEW_LINE> try: <NEW_LINE> <INDENT> if proc.status() == psutil.STATUS_ZOMBIE: <NEW_LINE> <INDENT> print('pid is zombie:',pid) <NEW_LINE> return False <NEW_LINE> <DEDENT> elif proc.status() == psutil.STATUS_DEAD: <NEW_LINE> <INDENT> print('pid is dead:', pid) <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('pid is alive:', pid) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> except psutil.NoSuchProcess as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> print('pid does not exist:', pid) <NEW_LINE> return False
Check For the existence of a unix pid.
625941bba17c0f6771cbdf01
def rect_prism(pmass, H, a, b, phic): <NEW_LINE> <INDENT> result = cq.Workplane("XY").box(a, b, H) <NEW_LINE> result.pmass = pmass <NEW_LINE> result = rotate_mesh(result, phic, 0, 0) <NEW_LINE> return result
Rectangular prism centered on the origin with height H and sides of length a and b extending along the x and y axes respectively when phic=0. Inputs ------ pmass : bool bool that indicates whether mesh has positive mass H : float Total height of the prism a : float Length of prism b : float Width of prism phic : float Average angle of prism, in radians Returns ------- result : CadQuery object
625941bb82261d6c526ab350
def get_lambda_derivatives(self, lambda_vals): <NEW_LINE> <INDENT> derivs = 0 <NEW_LINE> for kfold in self.all_kfolds_data: <NEW_LINE> <INDENT> derivs += kfold.get_lambda_derivatives(lambda_vals) <NEW_LINE> <DEDENT> return derivs
Calculate the lambda derivative, aggregating across all k folds
625941bb460517430c39403b
def copy(self, port, location): <NEW_LINE> <INDENT> start = time.time() <NEW_LINE> print('LOG: Average transfer speed will be about 30-100MB/sec') <NEW_LINE> print('LOG: Initating the copy process. Please wait, this could take a few minutes...') <NEW_LINE> if location == 'SJC': <NEW_LINE> <INDENT> ip = 'pxe-sja.cisco.com' <NEW_LINE> user = 'pxe' <NEW_LINE> password = 'pxe' <NEW_LINE> <DEDENT> elif location == 'SJA': <NEW_LINE> <INDENT> ip = 'pxe-sja.cisco.com' <NEW_LINE> user = 'pxe' <NEW_LINE> password = 'pxe' <NEW_LINE> <DEDENT> elif location == 'BGL': <NEW_LINE> <INDENT> ip = 'pxe-bgl.cisco.com' <NEW_LINE> user = 'pxe' <NEW_LINE> password = 'pxe' <NEW_LINE> <DEDENT> elif location == 'AST': <NEW_LINE> <INDENT> ip = 'pxe-ast.cisco.com' <NEW_LINE> user = 'pxe' <NEW_LINE> password = 'pxe' <NEW_LINE> <DEDENT> s = pxssh.pxssh(timeout=3000) <NEW_LINE> s.login(ip, user, password) <NEW_LINE> s.sendline('cd /tftpboot/asa/scratch/kick') <NEW_LINE> s.sendline('tsunami') <NEW_LINE> s.sendline('connect ' + self.docker + ' ' + port) <NEW_LINE> s.sendline('set rate 128M') <NEW_LINE> s.sendline('set verbose no') <NEW_LINE> s.prompt() <NEW_LINE> s.sendline('get *') <NEW_LINE> s.expect('Transfer complete*.') <NEW_LINE> end = time.time() <NEW_LINE> howlong = (end - start) <NEW_LINE> my_file_name = file_name(self.file) <NEW_LINE> print('LOG: Copy complete. Total elapsed time is ' + str(howlong) + ' seconds') <NEW_LINE> print('LOG: Copy complete please naviate to http://' + ip + '/scratch/kick/' + my_file_name + ' to pull your file') <NEW_LINE> path = 'http://' + ip + 'scratch/kick/' + my_file_name <NEW_LINE> return path
Purpose: Connects via pexpect to remote site provided and initiates the client side pull of file via the tsunami server. Arguments: * self - docker-py connection * port - port tsunami server is running on * location - 3 character location identifier, see below
625941bb63d6d428bbe4439d
def mySum(L): <NEW_LINE> <INDENT> current = 0 <NEW_LINE> for i in L: <NEW_LINE> <INDENT> current += i <NEW_LINE> <DEDENT> return current
Input: a list L of numbers Output: sum of the numbers in L Be sure your procedure works for the empty list. Examples: >>> mySum([1,2,3,4]) 10 >>> mySum([3,5,10]) 18
625941bb7cff6e4e81117833
def aggregate_values_from_db(context, host, key_name): <NEW_LINE> <INDENT> aggrlist = aggregate.AggregateList.get_by_host( context.elevated(), host, key=key_name) <NEW_LINE> aggregate_vals = set(aggr.metadata[key_name] for aggr in aggrlist) <NEW_LINE> return aggregate_vals
Returns a set of values based on a metadata key for a specific host.
625941bb97e22403b379ce47
def addEntry(self, dictKey, dictVal): <NEW_LINE> <INDENT> hashBucket = self.buckets[dictKey%self.numBuckets] <NEW_LINE> for i in range(len(hashBucket)): <NEW_LINE> <INDENT> if hashBucket[i][0] == dictKey: <NEW_LINE> <INDENT> hashBucket[i] = (dictKey, dictVal) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> hashBucket.append((dictKey, dictVal))
Assumes dictKey an int. Adds an entry.
625941bb73bcbd0ca4b2bf2b
def setup(hass, hass_config): <NEW_LINE> <INDENT> conf = hass_config[DOMAIN] <NEW_LINE> host = conf[CONF_HOST] <NEW_LINE> username = conf[CONF_USERNAME] <NEW_LINE> passwd = conf[CONF_PASSWORD] <NEW_LINE> ecoal_contr = ECoalController(host, username, passwd) <NEW_LINE> if ecoal_contr.version is None: <NEW_LINE> <INDENT> _LOGGER.error( "Unable to read controller status from %s@%s (wrong host/credentials)", username, host, ) <NEW_LINE> return False <NEW_LINE> <DEDENT> _LOGGER.debug("Detected controller version: %r @%s", ecoal_contr.version, host) <NEW_LINE> hass.data[DATA_ECOAL_BOILER] = ecoal_contr <NEW_LINE> switches = conf[CONF_SWITCHES][CONF_MONITORED_CONDITIONS] <NEW_LINE> load_platform(hass, "switch", DOMAIN, switches, hass_config) <NEW_LINE> sensors = conf[CONF_SENSORS][CONF_MONITORED_CONDITIONS] <NEW_LINE> load_platform(hass, "sensor", DOMAIN, sensors, hass_config) <NEW_LINE> return True
Set up global ECoalController instance same for sensors and switches.
625941bbbaa26c4b54cb0fd1
def get_modules(projectpath): <NEW_LINE> <INDENT> modules = [] <NEW_LINE> for i in os.listdir(projectpath): <NEW_LINE> <INDENT> path = os.path.join(projectpath, i) <NEW_LINE> if not os.path.isdir(path): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if i[0] == ".": <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> modules.append(i) <NEW_LINE> <DEDENT> return modules
Get all the modules of the project.
625941bb0a366e3fb873e6c5
def __main__(): <NEW_LINE> <INDENT> template.format['txt'] = txttemplate() <NEW_LINE> template.format['md'] = mdtemplate() <NEW_LINE> template.format['html'] = htmltemplate() <NEW_LINE> template.format['json'] = jsontemplate()
Internal: Set up default templates.
625941bb31939e2706e4cd1d
def ms_pan(self, viewer, event, data_x, data_y): <NEW_LINE> <INDENT> if not self.canpan: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> x, y = viewer.get_last_win_xy() <NEW_LINE> if event.state == 'move': <NEW_LINE> <INDENT> data_x, data_y = self.get_new_pan(viewer, x, y, ptype=self._pantype) <NEW_LINE> viewer.panset_xy(data_x, data_y) <NEW_LINE> <DEDENT> elif event.state == 'down': <NEW_LINE> <INDENT> self.pan_set_origin(viewer, x, y, data_x, data_y) <NEW_LINE> self.pan_start(viewer, ptype=2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pan_stop(viewer) <NEW_LINE> <DEDENT> return True
A 'drag' or proportional pan, where the image is panned by 'dragging the canvas' up or down. The amount of the pan is proportionate to the length of the drag.
625941bb45492302aab5e16e
def write(self, cr, uid, ids, vals, context=None): <NEW_LINE> <INDENT> for vehicle in self.browse(cr, uid, ids, context): <NEW_LINE> <INDENT> changes = [] <NEW_LINE> if 'model_id' in vals and vehicle.model_id.id != vals['model_id']: <NEW_LINE> <INDENT> value = self.pool.get('fleet.vehicle.model').browse(cr,uid,vals['model_id'],context=context).name <NEW_LINE> oldmodel = vehicle.model_id.name or _('None') <NEW_LINE> changes.append(_("Model: from '%s' to '%s'") %(oldmodel, value)) <NEW_LINE> <DEDENT> if 'driver_id' in vals and vehicle.driver_id.id != vals['driver_id']: <NEW_LINE> <INDENT> value = self.pool.get('res.partner').browse(cr,uid,vals['driver_id'],context=context).name <NEW_LINE> olddriver = (vehicle.driver_id.name) or _('None') <NEW_LINE> changes.append(_("Driver: from '%s' to '%s'") %(olddriver, value)) <NEW_LINE> <DEDENT> if 'state_id' in vals and vehicle.state_id.id != vals['state_id']: <NEW_LINE> <INDENT> value = self.pool.get('fleet.vehicle.state').browse(cr,uid,vals['state_id'],context=context).name <NEW_LINE> oldstate = vehicle.state_id.name or _('None') <NEW_LINE> changes.append(_("State: from '%s' to '%s'") %(oldstate, value)) <NEW_LINE> <DEDENT> if 'license_plate' in vals and vehicle.license_plate != vals['license_plate']: <NEW_LINE> <INDENT> old_license_plate = vehicle.license_plate or _('None') <NEW_LINE> changes.append(_("License Plate: from '%s' to '%s'") %(old_license_plate, vals['license_plate'])) <NEW_LINE> <DEDENT> if len(changes) > 0: <NEW_LINE> <INDENT> self.message_post(cr, uid, [vehicle.id], body=", ".join(changes), context=context) <NEW_LINE> <DEDENT> <DEDENT> vehicle_id = super(fleet_vehicle,self).write(cr, uid, ids, vals, context) <NEW_LINE> return True
This function write an entry in the openchatter whenever we change important information on the vehicle like the model, the drive, the state of the vehicle or its license plate
625941bb046cf37aa974cbf8
def test_instantiation(self): <NEW_LINE> <INDENT> Archive.objects.create( name="My Test Archive", host="archive.example.com", policy="cdimage", transport="ssh", basedir="/var/tmp", username="testing", ssh_credentials=self.credentials)
We can instantiate an Archive.
625941bb656771135c3eb720
def get_type(o): <NEW_LINE> <INDENT> return type(o).__name__
Handy wrapper for logging purposes :param o: any object :return str: Nice type name
625941bb956e5f7376d70d26
def make_monitoring_log(level, message, timestamp=None, to_logger=False): <NEW_LINE> <INDENT> level = level.lower() <NEW_LINE> if level not in ['debug', 'info', 'warning', 'error', 'critical']: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if to_logger: <NEW_LINE> <INDENT> logging.getLogger(ALIGNAK_LOGGER_NAME).debug("Monitoring log: %s / %s", level, message) <NEW_LINE> message = message.replace('\r', '\\r') <NEW_LINE> message = message.replace('\n', '\\n') <NEW_LINE> logger_ = logging.getLogger(MONITORING_LOGGER_NAME) <NEW_LINE> logging_function = getattr(logger_, level) <NEW_LINE> try: <NEW_LINE> <INDENT> message = message.decode('utf8', 'ignore') <NEW_LINE> <DEDENT> except UnicodeEncodeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if timestamp: <NEW_LINE> <INDENT> st = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') <NEW_LINE> logging_function(message, extra={'my_date': st}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging_function(message) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> return Brok({'type': 'monitoring_log', 'data': {'level': level, 'message': message}})
Function used to build the monitoring log. Emit a log message with the provided level to the monitoring log logger. Build a Brok typed as monitoring_log with the provided message When to_logger is True, the information is sent to the python logger, else a monitoring_log Brok is returned. The Brok is managed by the daemons to build an Event that will br logged by the Arbiter when it collects all the events. TODO: replace with dedicated brok for each event to log - really useful? :param level: log level as defined in logging :type level: str :param message: message to send to the monitoring log logger :type message: str :param to_logger: when set, send to the logger, else raise a brok :type to_logger: bool :param timestamp: if set, force the log event timestamp :return: a monitoring_log Brok :rtype: alignak.brok.Brok
625941bb30bbd722463cbc71
def test_zero_kelvin(): <NEW_LINE> <INDENT> npt.assert_almost_equal(fahr_to_kelvin(-459.67), 0.0, decimal=2)
test for zero kelvin
625941bb6fece00bbac2d5ea
def setupGui(self): <NEW_LINE> <INDENT> self.setWindowTitle('PhoPlay') <NEW_LINE> self.availableMimeTypes = Phonon.BackendCapabilities.availableMimeTypes() <NEW_LINE> self.openAction.triggered.connect(self.openFile) <NEW_LINE> self.exitAction.triggered.connect(qApp.quit) <NEW_LINE> self.infoAction.triggered.connect(self.showInfoDialog) <NEW_LINE> self.aboutAction.triggered.connect(self.showAboutDialog) <NEW_LINE> self.stopButton.clicked.connect(self.stop) <NEW_LINE> self.playButton.clicked.connect(self.play) <NEW_LINE> self.pauseButton.clicked.connect(self.pause) <NEW_LINE> self.mediaObject = Phonon.MediaObject(self) <NEW_LINE> self.mediaObject.setTickInterval(100) <NEW_LINE> self.mediaObject.tick.connect(self.tick) <NEW_LINE> self.mediaObject.finished.connect(self.finished) <NEW_LINE> self.mediaObject.stateChanged.connect(self.catchStateChanged) <NEW_LINE> self.mediaObject.totalTimeChanged.connect(self.totalTime) <NEW_LINE> self.audioOutput = Phonon.AudioOutput(Phonon.MusicCategory, self) <NEW_LINE> Phonon.createPath(self.mediaObject, self.audioOutput) <NEW_LINE> self.seekSlider.setMediaObject(self.mediaObject) <NEW_LINE> self.volumeSlider.setAudioOutput(self.audioOutput) <NEW_LINE> if not self.disableGui: <NEW_LINE> <INDENT> self.show()
Setup the Gui
625941bb8c0ade5d55d3e86d
def find_date(filename): <NEW_LINE> <INDENT> results = dateRE.search(filename) <NEW_LINE> if results is None: <NEW_LINE> <INDENT> return '0' <NEW_LINE> <DEDENT> return results.group()
find_date(str) --> str Pulls a date out of the input and returns it >>> find_date('giantbombcast-020210.mp3') '020210' >>> find_date('4G1U030510.mp3') '030510' >>> find_date('Rebel_FM_Episode_54_-_031110.mp3') '031110'
625941bbd10714528d5ffb8e