code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Point: <NEW_LINE> <INDENT> def __init__(self, x, y): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "(x: {}, y: {})".format(self.x, self.y) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def distance(self, other): <NEW_LINE> <INDENT> return math.sqrt((self.x - other.x)**2 + (self.y - other.y)**2)
Point class represents and manipulates x,y coords.
62598fcb60cbc95b063646e9
class RHRegistrationBulkCheckIn(RHRegistrationsActionBase): <NEW_LINE> <INDENT> def _process(self): <NEW_LINE> <INDENT> check_in = request.form['flag'] == '1' <NEW_LINE> msg = 'checked-in' if check_in else 'not checked-in' <NEW_LINE> for registration in self.registrations: <NEW_LINE> <INDENT> registration.checked_in = check_in <NEW_LINE> signals.event.registration_checkin_updated.send(registration) <NEW_LINE> logger.info('Registration %s marked as %s by %s', registration, msg, session.user) <NEW_LINE> <DEDENT> flash(_("Selected registrations marked as {} successfully.").format(msg), 'success') <NEW_LINE> return jsonify_data(**self.list_generator.render_list())
Bulk apply check-in/not checked-in state to registrations
62598fcbff9c53063f51a9f8
class ImageWindow : <NEW_LINE> <INDENT> def __init__(self, width = 400, height = 400, title = ""): <NEW_LINE> <INDENT> self._isValid = True <NEW_LINE> self._tkwin = tk.Toplevel( _rootWin, width = width, height = height, borderwidth = 0, padx = 0, pady = 0) <NEW_LINE> self._tkwin.protocol("WM_DELETE_WINDOW", self.close) <NEW_LINE> self._tkwin.title(title) <NEW_LINE> self._tkimage = tk.PhotoImage(width = width, height = height) <NEW_LINE> self._tkcanvas = tk.Canvas(self._tkwin, width = width, height = height, bg = "white", bd = 0) <NEW_LINE> self._tkcanvas.create_image(0, 0, anchor="nw", image=self._tkimage) <NEW_LINE> self._tkcanvas.pack() <NEW_LINE> self._tkwin.lift() <NEW_LINE> self._tkwin.resizable(0, 0) <NEW_LINE> self._tkwin.update_idletasks() <NEW_LINE> <DEDENT> def close( self ): <NEW_LINE> <INDENT> if not self._isValid : return <NEW_LINE> self._isValid = False <NEW_LINE> self._tkwin.destroy() <NEW_LINE> self._tkwin.update_idletasks() <NEW_LINE> self._tkwin.quit() <NEW_LINE> <DEDENT> def wait( self ): <NEW_LINE> <INDENT> if not self._isValid : raise GraphicsWinError() <NEW_LINE> self._tkwin.mainloop() <NEW_LINE> <DEDENT> def setPixel(self, row, col, red, green, blue) : <NEW_LINE> <INDENT> self._tkimage.put("#%02x%02x%02x" % (red, green, blue), (col, row)) <NEW_LINE> <DEDENT> def getPixel(self, row, col) : <NEW_LINE> <INDENT> string = self._tkimage.get(col, row) <NEW_LINE> parts = string.split() <NEW_LINE> return (int(parts[0]), int(parts[1]), int(parts[2]))
Defines a simple top-level framed window that contains a photo image.
62598fcb50812a4eaa620dba
class MessageEncoder(json.JSONEncoder): <NEW_LINE> <INDENT> message_key = '__json_message' <NEW_LINE> def default(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, Message): <NEW_LINE> <INDENT> is_safedata = 1 if isinstance(obj.message, SafeData) else 0 <NEW_LINE> message = [self.message_key, is_safedata, obj.level, obj.message] <NEW_LINE> if obj.extra_tags: <NEW_LINE> <INDENT> message.append(obj.extra_tags) <NEW_LINE> <DEDENT> return message <NEW_LINE> <DEDENT> return super().default(obj)
Compactly serialize instances of the ``Message`` class as JSON.
62598fcb167d2b6e312b7324
class BEPostalCodeField(RegexField): <NEW_LINE> <INDENT> default_error_messages = { 'invalid': _( 'Enter a valid postal code in the range and format 1XXX - 9XXX.'), } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__(r'^[1-9]\d{3}$', **kwargs)
A form field that validates its input as a belgium postal code. Belgium postal code is a 4 digits string. The first digit indicates the province (except for the 3ddd numbers that are shared by the eastern part of Flemish Brabant and Limburg and the and 1ddd that are shared by the Brussels Capital Region, the western part of Flemish Brabant and Walloon Brabant)
62598fcb3d592f4c4edbb260
class RowStructure(object): <NEW_LINE> <INDENT> def __init__(self, row_struct_dict, filetype): <NEW_LINE> <INDENT> if filetype == 'csv': <NEW_LINE> <INDENT> mandatory_props = CSV_ROW_MANDATORY_PROPS <NEW_LINE> <DEDENT> elif filetype == 'pos': <NEW_LINE> <INDENT> mandatory_props = POS_ROW_MANDATORY_PROPS <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RowStructureParseException("filetype must be 'pos' or 'csv' instead of " + filetype) <NEW_LINE> <DEDENT> keys = [key for key in row_struct_dict] <NEW_LINE> if 'type' not in keys: <NEW_LINE> <INDENT> raise RowStructureParseException("'type' property is mandatory in row structure") <NEW_LINE> <DEDENT> row_type = row_struct_dict['type'] <NEW_LINE> if not set(mandatory_props).issubset(keys): <NEW_LINE> <INDENT> missing_props = [prop for prop in mandatory_props if prop not in keys] <NEW_LINE> raise RowStructureParseException("Missing following properties in row structure for type'" + row_type + "' : " + ", ".join(missing_props) + ".") <NEW_LINE> <DEDENT> for key in keys: <NEW_LINE> <INDENT> self.__dict__[key] = row_struct_dict[key] <NEW_LINE> <DEDENT> if 'type' not in row_struct_dict: <NEW_LINE> <INDENT> raise RowStructureParseException("'type' property is mandatory in row structure")
Structure of a row in a csv flat file
62598fcb956e5f7376df5854
class info_elem_siq(): <NEW_LINE> <INDENT> def __init__(self,spi,bl,sb,nt,iv): <NEW_LINE> <INDENT> self._spi = spi <NEW_LINE> self._bl = bl <NEW_LINE> self._sb = sb <NEW_LINE> self._nt = nt <NEW_LINE> self._iv = iv <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = str(self.spi)+str(self.bl)+str(self.sb) <NEW_LINE> s += str(self.nt)+str(self.iv) <NEW_LINE> return s <NEW_LINE> <DEDENT> @property <NEW_LINE> def spi(self): <NEW_LINE> <INDENT> return self._spi <NEW_LINE> <DEDENT> @property <NEW_LINE> def bl(self): <NEW_LINE> <INDENT> return self._bl <NEW_LINE> <DEDENT> @property <NEW_LINE> def sb(self): <NEW_LINE> <INDENT> return self._sb <NEW_LINE> <DEDENT> @property <NEW_LINE> def nt(self): <NEW_LINE> <INDENT> return self._nt <NEW_LINE> <DEDENT> @property <NEW_LINE> def iv(self): <NEW_LINE> <INDENT> return self._iv
IEC 104 single point information with quality descriptor
62598fcb4a966d76dd5ef284
class FlatPagesPredicate(object): <NEW_LINE> <INDENT> package = None <NEW_LINE> subdir = None <NEW_LINE> def __call__(self, info, request): <NEW_LINE> <INDENT> output = render(request, info) <NEW_LINE> return output
This is a predicate which allows us to only serve up files that actually exist within our data sources through a catch-all route.
62598fcbadb09d7d5dc0a927
class OmsShellCmd(Cmd): <NEW_LINE> <INDENT> command('omsh') <NEW_LINE> def execute(self, args): <NEW_LINE> <INDENT> self.write("nested shell not implemented yet.\n")
This command represents the oms shell. Currently it cannot run a nested shell.
62598fcb7b180e01f3e49226
class ClustersListByResourceGroupOptions(Model): <NEW_LINE> <INDENT> _attribute_map = { 'filter': {'key': '', 'type': 'str'}, 'select': {'key': '', 'type': 'str'}, 'max_results': {'key': '', 'type': 'int'}, } <NEW_LINE> def __init__(self, *, filter: str=None, select: str=None, max_results: int=1000, **kwargs) -> None: <NEW_LINE> <INDENT> super(ClustersListByResourceGroupOptions, self).__init__(**kwargs) <NEW_LINE> self.filter = filter <NEW_LINE> self.select = select <NEW_LINE> self.max_results = max_results
Additional parameters for list_by_resource_group operation. :param filter: An OData $filter clause.. Used to filter results that are returned in the GET respnose. :type filter: str :param select: An OData $select clause. Used to select the properties to be returned in the GET respnose. :type select: str :param max_results: The maximum number of items to return in the response. A maximum of 1000 files can be returned. Default value: 1000 . :type max_results: int
62598fcb60cbc95b063646eb
class BatchConverter(object): <NEW_LINE> <INDENT> def __init__(self, batch_size=1000, threadpool_prefix="batch_processor", threadpool_size=10): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.threadpool_prefix = threadpool_prefix <NEW_LINE> self.threadpool_size = threadpool_size <NEW_LINE> <DEDENT> def ConvertBatch(self, batch): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def Convert(self, values, start_index=0, end_index=None): <NEW_LINE> <INDENT> if not values: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> total_batch_count = len(values) // self.batch_size <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> total_batch_count = -1 <NEW_LINE> <DEDENT> pool = ThreadPool.Factory(self.threadpool_prefix, self.threadpool_size) <NEW_LINE> val_iterator = itertools.islice(values, start_index, end_index) <NEW_LINE> pool.Start() <NEW_LINE> try: <NEW_LINE> <INDENT> for batch_index, batch in enumerate( collection.Batch(val_iterator, self.batch_size)): <NEW_LINE> <INDENT> logging.debug("Processing batch %d out of %d", batch_index, total_batch_count) <NEW_LINE> pool.AddTask( target=self.ConvertBatch, args=(batch,), name="batch_%d" % batch_index, inline=False) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> pool.Stop(join_timeout=3600)
Generic class that does multi-threaded values conversion. BatchConverter converts a set of values to a set of different values in batches using a threadpool.
62598fcb851cf427c66b8661
class VolatileParams(AutomationConfig): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.path = "volatile_params.json" <NEW_LINE> if not os.path.isfile(self.path): <NEW_LINE> <INDENT> with open(self.path, "w") as json_file: <NEW_LINE> <INDENT> json.dump({}, json_file) <NEW_LINE> <DEDENT> <DEDENT> super(VolatileParams, self).__init__(self.path) <NEW_LINE> <DEDENT> def change_param(self, param, value=None): <NEW_LINE> <INDENT> with open(self.path, "r") as json_file: <NEW_LINE> <INDENT> data = json.load(json_file) <NEW_LINE> <DEDENT> data[param] = value <NEW_LINE> with open(self.path, "w") as json_file: <NEW_LINE> <INDENT> json.dump(data, json_file)
VolatileParams inherit AutomationConfig Methods: change_param
62598fcb3617ad0b5ee064f6
class Bandit(ABC): <NEW_LINE> <INDENT> def __init__(self, seed = None): <NEW_LINE> <INDENT> self.random = random.Random() <NEW_LINE> self.regenerate(seed) <NEW_LINE> <DEDENT> def regenerate(self, seed = None): <NEW_LINE> <INDENT> self.random.seed(seed) <NEW_LINE> self._random_state = self.random.getstate() <NEW_LINE> self._on_regenerate() <NEW_LINE> <DEDENT> def _on_regenerate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.random.setstate(self._random_state) <NEW_LINE> self._on_reset() <NEW_LINE> <DEDENT> def _on_reset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def pull(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def expected_value(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def expected_value_changed(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def describe(self): <NEW_LINE> <INDENT> pass
A bandit arm that can be selected for some reward drawn from a distribution. Contains its own random, so the ith pull of an instance will result in the same reward, no matter the order + how many times other bandits have been pulled. Parameters -------- seed Seed to use for random.seed.
62598fcb4527f215b58ea27d
class Index: <NEW_LINE> <INDENT> def __init__(self, tokenizer=nltk.word_tokenize, stemmer=EnglishStemmer(), stopwords=nltk.corpus.stopwords.words('english')): <NEW_LINE> <INDENT> self.redis_token_client = redis.StrictRedis(db=0) <NEW_LINE> self.redis_docs_client = redis.StrictRedis(db=1) <NEW_LINE> self.redis_docs_client.ping() <NEW_LINE> self.tokenizer = tokenizer <NEW_LINE> self.stemmer = stemmer <NEW_LINE> self.__unique_id = 0 <NEW_LINE> self.stopwords = set(stopwords) if stopwords else set() <NEW_LINE> <DEDENT> def lookup(self, *words): <NEW_LINE> <INDENT> conjunct = set() <NEW_LINE> for word in words: <NEW_LINE> <INDENT> word = word.lower() <NEW_LINE> if self.stemmer: <NEW_LINE> <INDENT> word = self.stemmer.stem(word) <NEW_LINE> <DEDENT> docs_with_word = self.redis_token_client.smembers(word) <NEW_LINE> hits = set([ (id, self.redis_docs_client.get(id)) for id in docs_with_word ]) <NEW_LINE> conjunct = conjunct & hits if conjunct else hits <NEW_LINE> <DEDENT> return conjunct <NEW_LINE> <DEDENT> def delete_key(self): <NEW_LINE> <INDENT> self.redis_token_client.flushall() <NEW_LINE> self.redis_docs_client.flushall() <NEW_LINE> self.redis_docs_client.flushdb() <NEW_LINE> self.redis_token_client.flushdb() <NEW_LINE> <DEDENT> def print_lookup(self, *words): <NEW_LINE> <INDENT> hits = self.lookup(*words) <NEW_LINE> if not hits: <NEW_LINE> <INDENT> print("No hits found.") <NEW_LINE> return <NEW_LINE> <DEDENT> for i in hits: <NEW_LINE> <INDENT> print("*** %s has text:\n%s" % i) <NEW_LINE> <DEDENT> <DEDENT> def document_is_processed(self, filename): <NEW_LINE> <INDENT> res = self.redis_docs_client.get(filename) <NEW_LINE> if res: <NEW_LINE> <INDENT> print("%s already added to index." % filename) <NEW_LINE> return True <NEW_LINE> <DEDENT> if res == '': <NEW_LINE> <INDENT> print('File %s was already checked, and contains no text.' % filename) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def set_contains_no_text(self, filename): <NEW_LINE> <INDENT> self.redis_docs_client.set(filename, '') <NEW_LINE> <DEDENT> def add(self, filename, document): <NEW_LINE> <INDENT> for token in [t.lower() for t in nltk.word_tokenize(document)]: <NEW_LINE> <INDENT> if token in self.stopwords: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if token in ['.', ',', ':', '']: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if self.stemmer: <NEW_LINE> <INDENT> token = self.stemmer.stem(token) <NEW_LINE> <DEDENT> self.redis_token_client.sadd(token, filename) <NEW_LINE> <DEDENT> self.redis_docs_client.set(filename, document)
Inverted index datastructure
62598fcbd486a94d0ba2c381
class Seg(object): <NEW_LINE> <INDENT> def __init__(self, start: pos.Pos, end: pos.Pos): <NEW_LINE> <INDENT> self.__start = start <NEW_LINE> self.__end = end <NEW_LINE> <DEDENT> @property <NEW_LINE> def start(self) -> pos.Pos: <NEW_LINE> <INDENT> return self.__start <NEW_LINE> <DEDENT> @start.setter <NEW_LINE> def start(self, start: pos.Pos) -> None: <NEW_LINE> <INDENT> self.__start = start <NEW_LINE> <DEDENT> @property <NEW_LINE> def end(self) -> pos.Pos: <NEW_LINE> <INDENT> return self.__end <NEW_LINE> <DEDENT> @end.setter <NEW_LINE> def end(self, end: pos.Pos) -> None: <NEW_LINE> <INDENT> self.__end = end <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return str(self.start) + ":" + str(self.end) <NEW_LINE> <DEDENT> def length(self) -> float: <NEW_LINE> <INDENT> return pos.distance(self.start, self.end) <NEW_LINE> <DEDENT> def direction(self): <NEW_LINE> <INDENT> len = self.length() <NEW_LINE> if len == 0.0: <NEW_LINE> <INDENT> uvec = pos.Pos(np.transpose(np.array([0, 0, 0]))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> uvec = pos.Pos(np.transpose(np.array([(self.end.x - self.start.x) / len, (self.end.y - self.start.y) / len, (self.end.z - self.start.z) / len]))) <NEW_LINE> <DEDENT> return uvec
path segment object, specified by a start and an end position __start : start position __end : end position
62598fcb0fa83653e46f5296
class TranslatedNodeMixin: <NEW_LINE> <INDENT> task_comments = None <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> task_comments = kwargs.pop('task_comments', None) <NEW_LINE> if task_comments: <NEW_LINE> <INDENT> self.task_comments = task_comments <NEW_LINE> <DEDENT> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> name = self.task_title <NEW_LINE> if not name: <NEW_LINE> <INDENT> name = super().__str__() <NEW_LINE> name = l_(name.lower().capitalize()) <NEW_LINE> <DEDENT> return str(name)
При приведении к строке возвращает переведённое имя
62598fcbec188e330fdf8c46
class CmdSpawn(COMMAND_DEFAULT_CLASS): <NEW_LINE> <INDENT> key = "@spawn" <NEW_LINE> aliases = ["spawn"] <NEW_LINE> locks = "cmd:perm(spawn) or perm(Builders)" <NEW_LINE> help_category = "Building" <NEW_LINE> def func(self): <NEW_LINE> <INDENT> def _show_prototypes(prototypes): <NEW_LINE> <INDENT> string = "\nAvailable prototypes:\n %s" <NEW_LINE> string = string % utils.fill(", ".join(sorted(prototypes.keys()))) <NEW_LINE> return string <NEW_LINE> <DEDENT> prototypes = spawn(return_prototypes=True) <NEW_LINE> if not self.args: <NEW_LINE> <INDENT> string = "Usage: @spawn {key:value, key, value, ... }" <NEW_LINE> self.caller.msg(string + _show_prototypes(prototypes)) <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> prototype = _convert_from_string(self, self.args) <NEW_LINE> <DEDENT> except SyntaxError: <NEW_LINE> <INDENT> string = "{RCritical Python syntax error in argument. " <NEW_LINE> string += "Only primitive Python structures are allowed. " <NEW_LINE> string += "\nYou also need to use correct Python syntax. " <NEW_LINE> string += "Remember especially to put quotes around all " <NEW_LINE> string += "strings inside lists and dicts.{n" <NEW_LINE> self.caller.msg(string) <NEW_LINE> return <NEW_LINE> <DEDENT> if isinstance(prototype, basestring): <NEW_LINE> <INDENT> keystr = prototype <NEW_LINE> prototype = prototypes.get(prototype, None) <NEW_LINE> if not prototype: <NEW_LINE> <INDENT> string = "No prototype named '%s'." % keystr <NEW_LINE> self.caller.msg(string + _show_prototypes(prototypes)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> elif not isinstance(prototype, dict): <NEW_LINE> <INDENT> self.caller.msg("The prototype must be a prototype key or a Python dictionary.") <NEW_LINE> return <NEW_LINE> <DEDENT> if not "noloc" in self.switches and not "location" in prototype: <NEW_LINE> <INDENT> prototype["location"] = self.caller.location <NEW_LINE> <DEDENT> for obj in spawn(prototype): <NEW_LINE> <INDENT> self.caller.msg("Spawned %s." % obj.get_display_name(self.caller))
spawn objects from prototype Usage: @spawn @spawn[/switch] prototype_name @spawn[/switch] {prototype dictionary} Switch: noloc - allow location to be None if not specified explicitly. Otherwise, location will default to caller's current location. Example: @spawn GOBLIN @spawn {"key":"goblin", "typeclass":"monster.Monster", "location":"#2"} Dictionary keys: {wprototype {n - name of parent prototype to use. Can be a list for multiple inheritance (inherits left to right) {wkey {n - string, the main object identifier {wtypeclass {n - string, if not set, will use settings.BASE_OBJECT_TYPECLASS {wlocation {n - this should be a valid object or #dbref {whome {n - valid object or #dbref {wdestination{n - only valid for exits (object or dbref) {wpermissions{n - string or list of permission strings {wlocks {n - a lock-string {waliases {n - string or list of strings {wndb_{n<name> - value of a nattribute (ndb_ is stripped) any other keywords are interpreted as Attributes and their values. The available prototypes are defined globally in modules set in settings.PROTOTYPE_MODULES. If @spawn is used without arguments it displays a list of available prototypes.
62598fcb3617ad0b5ee064f8
class ClusteringDataGenerator(tf.keras.utils.Sequence): <NEW_LINE> <INDENT> def __init__(self, samples, img_dir, batch_size, n_classes, basenet_preprocess, img_format, img_load_dims=(224, 224)): <NEW_LINE> <INDENT> self.samples = samples <NEW_LINE> self.img_dir = img_dir <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.n_classes = n_classes <NEW_LINE> self.basenet_preprocess = basenet_preprocess <NEW_LINE> self.img_load_dims = img_load_dims <NEW_LINE> self.img_format = img_format <NEW_LINE> self.on_epoch_end() <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return int(np.ceil(len(self.samples) / self.batch_size)) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> batch_indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size] <NEW_LINE> batch_samples = [self.samples[i] for i in batch_indexes] <NEW_LINE> X, y = self.__data_generator(batch_samples) <NEW_LINE> return X, y <NEW_LINE> <DEDENT> def on_epoch_end(self): <NEW_LINE> <INDENT> self.indexes = np.arange(len(self.samples)) <NEW_LINE> <DEDENT> def __data_generator(self, batch_samples): <NEW_LINE> <INDENT> X = np.empty((len(batch_samples), *self.img_load_dims, 3)) <NEW_LINE> y = np.empty((len(batch_samples), self.n_classes)) <NEW_LINE> for i, sample in enumerate(batch_samples): <NEW_LINE> <INDENT> img = sample['image'].resize(self.img_load_dims) <NEW_LINE> if img is not None: <NEW_LINE> <INDENT> X[i, ] = img <NEW_LINE> <DEDENT> if sample.get('label') is not None: <NEW_LINE> <INDENT> y[i, ] = utils.normalize_labels(sample['label']) <NEW_LINE> <DEDENT> <DEDENT> X = self.basenet_preprocess(X) <NEW_LINE> return X, y
inherits from Keras Sequence base object, allows to use multiprocessing in .fit_generator
62598fcb4c3428357761a66e
class ValueIterationAgent(ValueEstimationAgent): <NEW_LINE> <INDENT> def __init__(self, mdp, discount = 0.9, iterations = 100): <NEW_LINE> <INDENT> self.mdp = mdp <NEW_LINE> self.discount = discount <NEW_LINE> self.iterations = iterations <NEW_LINE> self.values = util.Counter() <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> n = self.iterations <NEW_LINE> for i in range(0, n): <NEW_LINE> <INDENT> states = mdp.getStates() <NEW_LINE> valueArr = self.values.copy() <NEW_LINE> for state in states: <NEW_LINE> <INDENT> bestV = None <NEW_LINE> if mdp.isTerminal(state): <NEW_LINE> <INDENT> valueArr[state] = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> allActions = mdp.getPossibleActions(state) <NEW_LINE> for action in allActions: <NEW_LINE> <INDENT> v = self.computeQValueFromValues(state, action) <NEW_LINE> if bestV <= v: <NEW_LINE> <INDENT> bestV = v <NEW_LINE> <DEDENT> <DEDENT> valueArr[state] = bestV <NEW_LINE> <DEDENT> <DEDENT> self.values = valueArr <NEW_LINE> <DEDENT> <DEDENT> def getValue(self, state): <NEW_LINE> <INDENT> return self.values[state] <NEW_LINE> <DEDENT> def computeQValueFromValues(self, state, action): <NEW_LINE> <INDENT> q = 0 <NEW_LINE> transitions = self.mdp.getTransitionStatesAndProbs(state, action) <NEW_LINE> for nextState, probability in transitions: <NEW_LINE> <INDENT> reward = self.mdp.getReward(state, action, nextState) <NEW_LINE> d = self.discount <NEW_LINE> newV = self.values[nextState] <NEW_LINE> q += probability*(reward + (d*newV)) <NEW_LINE> <DEDENT> return q <NEW_LINE> <DEDENT> def computeActionFromValues(self, state): <NEW_LINE> <INDENT> if self.mdp.isTerminal(state): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> allActions = self.mdp.getPossibleActions(state) <NEW_LINE> bestV = None <NEW_LINE> bestA = None <NEW_LINE> for action in allActions: <NEW_LINE> <INDENT> newV = self.computeQValueFromValues(state, action) <NEW_LINE> if bestV <= newV: <NEW_LINE> <INDENT> bestV = newV <NEW_LINE> bestA = action <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return bestA <NEW_LINE> <DEDENT> def getPolicy(self, state): <NEW_LINE> <INDENT> return self.computeActionFromValues(state) <NEW_LINE> <DEDENT> def getAction(self, state): <NEW_LINE> <INDENT> return self.computeActionFromValues(state) <NEW_LINE> <DEDENT> def getQValue(self, state, action): <NEW_LINE> <INDENT> return self.computeQValueFromValues(state, action)
* Please read learningAgents.py before reading this.* A ValueIterationAgent takes a Markov decision process (see mdp.py) on initialization and runs value iteration for a given number of iterations using the supplied discount factor.
62598fcbbe7bc26dc9252033
@dataclass(frozen=False, init=False) <NEW_LINE> class FileUpdateRequest(Entity): <NEW_LINE> <INDENT> fileMd5sum: str = None <NEW_LINE> fileSize: int = None <NEW_LINE> fileAccess: str = None <NEW_LINE> info: dict = None
Mutable request object used to update file data. :param str file_md5: MD5 checksum value to update :param int file_size: File size (bytes) to update :param int file_access: Access type to update :param dict file_info: json info metadata to update
62598fcb3346ee7daa337820
class Wxpropgrid(Package, SourceforgePackage): <NEW_LINE> <INDENT> homepage = "http://wxpropgrid.sourceforge.net/" <NEW_LINE> sourceforge_mirror_path = "wxpropgrid/wxpropgrid-1.4.15-src.tar.gz" <NEW_LINE> version('1.4.15', sha256='f0c9a86656828f592c8e57d2c89401f07f0af6a45b17bbca3990e8d29121c2b8') <NEW_LINE> depends_on("wxwidgets") <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> configure("--prefix=%s" % prefix, "--with-wxdir=%s" % spec['wxwidgets'].prefix.bin, "--enable-unicode") <NEW_LINE> make() <NEW_LINE> make("install")
wxPropertyGrid is a property sheet control for wxWidgets. In other words, it is a specialized two-column grid for editing properties such as strings, numbers, flagsets, string arrays, and colours.
62598fcb60cbc95b063646ef
class TagSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Tag <NEW_LINE> fields = ('name', )
Serialize a Cloud instance. Provides statistics for a cloud account.
62598fcb5fcc89381b266326
class PositionWiseFF(nn.Module): <NEW_LINE> <INDENT> def __init__(self, hidden_size, inner_size, ffn_dropout=0.0, hidden_act="relu"): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.dense_in = nn.Linear(hidden_size, inner_size) <NEW_LINE> self.dense_out = nn.Linear(inner_size, hidden_size) <NEW_LINE> self.layer_dropout = nn.Dropout(ffn_dropout) <NEW_LINE> ACT2FN = {"gelu": gelu, "relu": torch.relu} <NEW_LINE> self.act_fn = ACT2FN[hidden_act] <NEW_LINE> <DEDENT> def forward(self, hidden_states): <NEW_LINE> <INDENT> output_states = self.dense_in(hidden_states) <NEW_LINE> output_states = self.act_fn(output_states) <NEW_LINE> output_states = self.dense_out(output_states) <NEW_LINE> output_states = self.layer_dropout(output_states) <NEW_LINE> return output_states
Position-wise feed-forward network of Transformer block. Args: hidden_size: size of the embeddings in the model, also known as d_model inner_size: number of neurons in the intermediate part of feed-forward net, usually is (4-8 x hidden_size) in the papers ffn_dropout: probability of dropout applied to net output hidden_act: activation function used between two linear layers
62598fcbadb09d7d5dc0a92d
class MiscCommands(commands.Cog, name='Misc Commands'): <NEW_LINE> <INDENT> def __init__(self, bot, **options): <NEW_LINE> <INDENT> super().__init__(**options) <NEW_LINE> self.bot = bot <NEW_LINE> <DEDENT> @commands.command(brief="Use bot to display emoji.") <NEW_LINE> async def emoji(self, ctx, emoji_text, number=None): <NEW_LINE> <INDENT> await ctx.message.delete() <NEW_LINE> if number is None: <NEW_LINE> <INDENT> number = 1 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> number = int(number) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise(commands.BadArgument) <NEW_LINE> <DEDENT> author = ctx.author <NEW_LINE> if not (emoji_text.startswith(':') and emoji_text.endswith(':')): <NEW_LINE> <INDENT> emoji_text = ':' + emoji_text + ':' <NEW_LINE> <DEDENT> emoji = convert_emoji(self.bot, emoji_text) <NEW_LINE> if number < 1 or number > 10: <NEW_LINE> <INDENT> await ctx.send(f'Hm. {author.mention} Please enter a valid integer. (No overflows)') <NEW_LINE> <DEDENT> elif emoji: <NEW_LINE> <INDENT> emoji *= number <NEW_LINE> wb = await ctx.channel.create_webhook(name=author.display_name) <NEW_LINE> await ctx.message.delete() <NEW_LINE> await wb.send(emoji, username=author.display_name, avatar_url=author.avatar_url) <NEW_LINE> await wb.delete() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await ctx.send(f'Hm. {author.mention} I couldn\'t find `{emoji_text}`.') <NEW_LINE> <DEDENT> <DEDENT> @commands.command(brief="Get the ID and name of any emoji.") <NEW_LINE> async def emojiid(self, ctx, emoji_text): <NEW_LINE> <INDENT> author = ctx.author <NEW_LINE> if not (emoji_text.startswith(':') and emoji_text.endswith(':')): <NEW_LINE> <INDENT> emoji_text = ':' + emoji_text + ':' <NEW_LINE> <DEDENT> emoji = convert_emoji(self.bot, emoji_text) <NEW_LINE> if emoji: <NEW_LINE> <INDENT> await ctx.send(f'Emoji name: {emoji.name}, Emoji ID: {emoji.id}') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await ctx.send(f'{author.mention} I couldn\'t find `{emoji_text}`.') <NEW_LINE> <DEDENT> <DEDENT> @commands.command(brief='Current prefix of bot.') <NEW_LINE> async def prefix(self, ctx): <NEW_LINE> <INDENT> prefix = CONFIG.PREFIX <NEW_LINE> await ctx.send(f'{ctx.author.mention}, the prefix is {prefix}') <NEW_LINE> <DEDENT> @commands.command(brief='Show invite link for bot.') <NEW_LINE> async def invite(self, ctx): <NEW_LINE> <INDENT> author = ctx.author <NEW_LINE> url = CONFIG.INVITE_URL <NEW_LINE> inv = f'{author.mention} here is the invite link:\n{url}' <NEW_LINE> inv += '\n\nPlease do not forget to use `!!help invite` to verify permissions required!' <NEW_LINE> await ctx.send(inv)
This category contains all the commands for fun, or are informational.
62598fcb4a966d76dd5ef28a
class BaseCountValidator(object): <NEW_LINE> <INDENT> def __call__(self, value): <NEW_LINE> <INDENT> if not self.get_min_count(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> counter = 0 <NEW_LINE> for character in force_text(value): <NEW_LINE> <INDENT> category = unicodedata.category(character) <NEW_LINE> if category in self.categories: <NEW_LINE> <INDENT> counter += 1 <NEW_LINE> <DEDENT> <DEDENT> if counter < self.get_min_count(): <NEW_LINE> <INDENT> raise ValidationError(self.get_error_message(), code=self.code) <NEW_LINE> <DEDENT> <DEDENT> def get_error_message(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_min_count(self): <NEW_LINE> <INDENT> raise NotImplementedError
Counts the occurrences of characters of a :py:func:`unicodedata.category` and raises a :class:`~django.core.exceptions.ValidationError` if the count is less than :py:func:`~BaseCountValidator.get_min_count`.
62598fcba05bb46b3848ac1e
class Request(object): <NEW_LINE> <INDENT> def __init__(self, request_msg, uuid = 0): <NEW_LINE> <INDENT> self.request_msg = request_msg <NEW_LINE> self.type = 'client' <NEW_LINE> self.uuid = uuid
docstring for Request
62598fcb7cff6e4e811b5dd9
class ComplexExpm(Op): <NEW_LINE> <INDENT> __props__ = () <NEW_LINE> def make_node(self, A): <NEW_LINE> <INDENT> assert imported_scipy, ( "Scipy not available. Scipy is needed for the Expm op") <NEW_LINE> A = as_tensor_variable(A) <NEW_LINE> assert A.ndim == 3 <NEW_LINE> expm = theano.tensor.tensor3(dtype=A.dtype) <NEW_LINE> return Apply(self, [A, ], [expm, ]) <NEW_LINE> <DEDENT> def perform(self, node, inputs, outputs): <NEW_LINE> <INDENT> (A,) = inputs <NEW_LINE> (expm,) = outputs <NEW_LINE> temp = scipy.linalg.expm(A[0, :, :] + 1j * A[1, :, :]) <NEW_LINE> expm[0] = np.stack([temp.real, temp.imag]) <NEW_LINE> <DEDENT> def grad(self, inputs, outputs): <NEW_LINE> <INDENT> (A,) = inputs <NEW_LINE> (g_out,) = outputs <NEW_LINE> return [ComplexExpmGrad()(A, g_out)] <NEW_LINE> <DEDENT> def infer_shape(self, node, shapes): <NEW_LINE> <INDENT> return [shapes[0]]
Compute the matrix exponential of a square array.
62598fcb656771135c489a24
class Tracking(object): <NEW_LINE> <INDENT> def __init__(self, request, metric): <NEW_LINE> <INDENT> self.metric = metric <NEW_LINE> self.RECORD_HASH_KEY = RECORD_HASH_PREFIX + str(metric.pk) <NEW_LINE> record_hash = request.session.get(self.RECORD_HASH_KEY, None) <NEW_LINE> if not record_hash: <NEW_LINE> <INDENT> self.record = self._set_record(metric) <NEW_LINE> self.setup = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.record = self._get_record(record_hash) <NEW_LINE> self.setup = True <NEW_LINE> <DEDENT> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "[%s] %s for %s" % (self.metric, self.option, self.hash) <NEW_LINE> <DEDENT> def _set_record(self, metric): <NEW_LINE> <INDENT> option = metric.get_random_option() <NEW_LINE> record = TrackRecord.objects.create(option=option) <NEW_LINE> return record <NEW_LINE> <DEDENT> def _get_record(self, hash): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return TrackRecord.objects.get(hash=hash) <NEW_LINE> <DEDENT> except TrackRecord.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def hash(self): <NEW_LINE> <INDENT> return self.record.hash <NEW_LINE> <DEDENT> @property <NEW_LINE> def option(self): <NEW_LINE> <INDENT> return self.record.option <NEW_LINE> <DEDENT> def setup_session(self, request): <NEW_LINE> <INDENT> if not self.record: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> del request.session[self.RECORD_HASH_KEY] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> request.session[self.RECORD_HASH_KEY] = self.record.hash <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> def track(self): <NEW_LINE> <INDENT> return self.record.track_conversion()
Tracks chosen metrics, options and conversions, and keep the data available on the request object.
62598fcb3346ee7daa337821
class RouteFilter(Resource): <NEW_LINE> <INDENT> _validation = { 'name': {'readonly': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'etag': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'rules': {'key': 'properties.rules', 'type': '[RouteFilterRule]'}, 'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCircuitPeering]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(RouteFilter, self).__init__(**kwargs) <NEW_LINE> self.rules = kwargs.get('rules', None) <NEW_LINE> self.peerings = kwargs.get('peerings', None) <NEW_LINE> self.provisioning_state = None <NEW_LINE> self.etag = None
Route Filter Resource. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource ID. :type id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :param location: Resource location. :type location: str :param tags: Resource tags. :type tags: dict[str, str] :param rules: Collection of RouteFilterRules contained within a route filter. :type rules: list[~azure.mgmt.network.v2018_01_01.models.RouteFilterRule] :param peerings: A collection of references to express route circuit peerings. :type peerings: list[~azure.mgmt.network.v2018_01_01.models.ExpressRouteCircuitPeering] :ivar provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', 'Succeeded' and 'Failed'. :vartype provisioning_state: str :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str
62598fcb9f28863672818a56
class CustomEncoder(json.JSONEncoder): <NEW_LINE> <INDENT> def default(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, (datetime, date, time)): <NEW_LINE> <INDENT> encoded_obj = obj.isoformat() <NEW_LINE> <DEDENT> elif isinstance(obj, db.Model): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> encoded_obj = json.dumps(obj.to_dict(), cls=CustomEncoder, indent=4) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> encoded_obj = str(obj) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> encoded_obj = json.JSONEncoder.default(self, obj) <NEW_LINE> <DEDENT> return encoded_obj
Define encoding rules for fields that are not readily serializable.
62598fcbbf627c535bcb185e
class SplinePlot(AnnotationPlot): <NEW_LINE> <INDENT> style_opts = ['alpha', 'edgecolor', 'linewidth', 'linestyle', 'visible'] <NEW_LINE> def draw_annotation(self, axis, data, opts): <NEW_LINE> <INDENT> verts, codes = data <NEW_LINE> patch = patches.PathPatch(matplotlib.path.Path(verts, codes), facecolor='none', **opts) <NEW_LINE> axis.add_patch(patch) <NEW_LINE> return [patch]
Draw the supplied Spline annotation (see Spline docstring)
62598fcb71ff763f4b5e7b35
class GW2(commands.Cog): <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> <DEDENT> @commands.command() <NEW_LINE> async def PrintDungeons(self, ctx, amount: int = 3): <NEW_LINE> <INDENT> message = "This Weeks Easy Paths Are: \n" + EPaths(ctx, amount) + "\n" + "This Weeks Medium Paths Are: \n" + MPaths(ctx, amount) + "\n" + "This Weeks Hard Paths Are: \n" + HPaths(ctx, amount) <NEW_LINE> await ctx.send(message)
GW2 Commands
62598fcbd486a94d0ba2c387
class SelectorDIC(ModelSelector): <NEW_LINE> <INDENT> def select(self): <NEW_LINE> <INDENT> warnings.filterwarnings("ignore", category=DeprecationWarning) <NEW_LINE> min_val = float("-inf") <NEW_LINE> best_model = None <NEW_LINE> for n in range(self.min_n_components, self.max_n_components+1): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> model = self.base_model(n) <NEW_LINE> logL = model.score(self.X, self.lengths) <NEW_LINE> total_other_logL = 0 <NEW_LINE> for word in self.words: <NEW_LINE> <INDENT> other_x, other_lengths = self.hwords[word] <NEW_LINE> total_other_logL += model.score(other_x, other_lengths) <NEW_LINE> <DEDENT> avg_logL = total_other_logL/(len(self.words)-1) <NEW_LINE> dic_score = logL - avg_logL <NEW_LINE> if dic_score > min_val: <NEW_LINE> <INDENT> min_val = dic_score <NEW_LINE> best_model = model <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> return best_model
select best model based on Discriminative Information Criterion Biem, Alain. "A model selection criterion for classification: Application to hmm topology optimization." Document Analysis and Recognition, 2003. Proceedings. Seventh International Conference on. IEEE, 2003. http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.58.6208&rep=rep1&type=pdf DIC = log(P(X(i)) - 1/(M-1)SUM(log(P(X(all but i))
62598fcb7c178a314d78d855
class CdnMedia(models.Model): <NEW_LINE> <INDENT> TYPE_CHOICES = ( (1, '图片'), ) <NEW_LINE> CDN_CHOICES = ( (1, '七牛'), ) <NEW_LINE> def display_img(self, width=77): <NEW_LINE> <INDENT> return '<img src="{url}" height="{height}", width="{width}", onclick="window.open(this.src)"/>'. format(url=self.url, width=width, height=width*self.height/self.width if self.width and self.height else width) <NEW_LINE> <DEDENT> display_img.short_description = '图片预览' <NEW_LINE> display_img.allow_tags = True <NEW_LINE> def display_size(self): <NEW_LINE> <INDENT> if self.width and self.height: <NEW_LINE> <INDENT> return '{width} * {height}'.format(width=self.width, height=self.height) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '未知' <NEW_LINE> <DEDENT> <DEDENT> display_size.short_description = '图片尺寸' <NEW_LINE> id = models.AutoField(primary_key=True) <NEW_LINE> image = models.ImageField('图片保存路径', upload_to=path_gen, max_length=255) <NEW_LINE> width = models.SmallIntegerField('宽度', blank=True, null=True) <NEW_LINE> height = models.SmallIntegerField('高度', blank=True, null=True) <NEW_LINE> supplier = models.SmallIntegerField('cdn 服务商', choices=CDN_CHOICES, default=1) <NEW_LINE> url = models.CharField('图片地址', max_length=255) <NEW_LINE> remark = models.CharField('备注', max_length=255, blank=True, null=True) <NEW_LINE> create_time = models.DateTimeField('创建时间', auto_now_add=True) <NEW_LINE> update_time = models.DateTimeField('更新时间', auto_now=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'cdn_image' <NEW_LINE> verbose_name = '上传图片' <NEW_LINE> verbose_name_plural = verbose_name
手工上传图片
62598fcb5fc7496912d48454
class StoreEvalResults(base_handler.PipelineBase): <NEW_LINE> <INDENT> def run(self, resource, output): <NEW_LINE> <INDENT> logging.info("resource is %s, output is %s", resource, str(output)) <NEW_LINE> dataset = DatasetPB.query(DatasetPB.output_link == '/blobstore/'+resource).get() <NEW_LINE> dataset.result_link = output[0] <NEW_LINE> dataset.put() <NEW_LINE> return <NEW_LINE> <DEDENT> def finalized(self): <NEW_LINE> <INDENT> logging.info('StoreEvalResults finalized')
A pipeline to store the result of the Analysis job in the database. Args: encoded_key: the DB key corresponding to the metadata of this job output: the blobstore location where the output of the job is stored
62598fcb71ff763f4b5e7b37
class CommandFormatter: <NEW_LINE> <INDENT> def verbose(self, msg): <NEW_LINE> <INDENT> return add_linesep_if_missing(msg) <NEW_LINE> <DEDENT> def out(self, msg): <NEW_LINE> <INDENT> return add_linesep_if_missing(msg) <NEW_LINE> <DEDENT> def warn(self, msg): <NEW_LINE> <INDENT> return prepend_warning_if_missing(msg) <NEW_LINE> <DEDENT> def err(self, msg): <NEW_LINE> <INDENT> return prepend_error_if_missing(msg) <NEW_LINE> <DEDENT> def start(self, command): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def end(self, suppress, *exc_info): <NEW_LINE> <INDENT> pass
Helper class used to format output from ``Command`` objects. Command subclasses can specify a custom instance of this class to alter the way messages are printed (for example, surrounding messages in html tags). Each method corresponds to the ``Command._<name>()`` method of the ``Command`` class (i.e. ``Command._out()`` calls the ``out(msg)`` method of this class. The formatter is set via the ``Command.formatter`` attribute. This class is the default formatter.
62598fcb5fdd1c0f98e5e344
class Cancel(base.Command): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> flags.Instance(positional=False, text='The ID of the instance the operation is executing on.' ).AddToParser(parser) <NEW_LINE> flags.Database(positional=False, required=False, text='For a database operation, the name of the database ' 'the operation is executing on.').AddToParser(parser) <NEW_LINE> flags.OperationId().AddToParser(parser) <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> if args.database: <NEW_LINE> <INDENT> return database_operations.Cancel( args.instance, args.database, args.operation) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return instance_operations.Cancel(args.instance, args.operation)
Cloud Spanner operations cancel command.
62598fcb7b180e01f3e4922b
class SparseDataset(torch.utils.data.Dataset): <NEW_LINE> <INDENT> def __init__(self, *csrs): <NEW_LINE> <INDENT> assert all(csrs[0].shape[0] == csr.shape[0] for csr in csrs) <NEW_LINE> self.csrs = csrs <NEW_LINE> <DEDENT> def __getitem__(self, index) -> Tuple: <NEW_LINE> <INDENT> return tuple(csr[index, ...] for csr in self.csrs) <NEW_LINE> <DEDENT> def __len__(self) -> int: <NEW_LINE> <INDENT> return self.csrs[0].shape[0]
torch.utils.data.Dataset wrapping a scipy.sparse.csr.csr_matrix Each sample will be retrieved by indexing matrices along the leftmost dimension. Args: *csrs (scipy.sparse.csr.csr_matrix): sparse matrices that have the same size in the leftmost dimension.
62598fcb5fc7496912d48455
class DriverLicenseOCRRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ImageBase64 = None <NEW_LINE> self.ImageUrl = None <NEW_LINE> self.CardSide = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ImageBase64 = params.get("ImageBase64") <NEW_LINE> self.ImageUrl = params.get("ImageUrl") <NEW_LINE> self.CardSide = params.get("CardSide")
DriverLicenseOCR请求参数结构体
62598fcbff9c53063f51aa04
class PageDataShortEntityView(object): <NEW_LINE> <INDENT> swagger_types = { 'data': 'list[ShortEntityView]', 'total_pages': 'int', 'total_elements': 'int', 'has_next': 'bool' } <NEW_LINE> attribute_map = { 'data': 'data', 'total_pages': 'totalPages', 'total_elements': 'totalElements', 'has_next': 'hasNext' } <NEW_LINE> def __init__(self, data=None, total_pages=None, total_elements=None, has_next=None): <NEW_LINE> <INDENT> self._data = None <NEW_LINE> self._total_pages = None <NEW_LINE> self._total_elements = None <NEW_LINE> self._has_next = None <NEW_LINE> self.discriminator = None <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> if total_pages is not None: <NEW_LINE> <INDENT> self.total_pages = total_pages <NEW_LINE> <DEDENT> if total_elements is not None: <NEW_LINE> <INDENT> self.total_elements = total_elements <NEW_LINE> <DEDENT> if has_next is not None: <NEW_LINE> <INDENT> self.has_next = has_next <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_pages(self): <NEW_LINE> <INDENT> return self._total_pages <NEW_LINE> <DEDENT> @total_pages.setter <NEW_LINE> def total_pages(self, total_pages): <NEW_LINE> <INDENT> self._total_pages = total_pages <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_elements(self): <NEW_LINE> <INDENT> return self._total_elements <NEW_LINE> <DEDENT> @total_elements.setter <NEW_LINE> def total_elements(self, total_elements): <NEW_LINE> <INDENT> self._total_elements = total_elements <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_next(self): <NEW_LINE> <INDENT> return self._has_next <NEW_LINE> <DEDENT> @has_next.setter <NEW_LINE> def has_next(self, has_next): <NEW_LINE> <INDENT> self._has_next = has_next <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(PageDataShortEntityView, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PageDataShortEntityView): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. from tb_rest_client.api_client import ApiClient Do not edit the class manually.
62598fcb63b5f9789fe8552f
class DictDiffer(object): <NEW_LINE> <INDENT> def __init__(self, current_dict, past_dict): <NEW_LINE> <INDENT> self.current_dict, self.past_dict = current_dict, past_dict <NEW_LINE> self.current_keys, self.past_keys = [ set(d.keys()) for d in (current_dict, past_dict) ] <NEW_LINE> self.intersect = self.current_keys.intersection(self.past_keys) <NEW_LINE> <DEDENT> def added(self): <NEW_LINE> <INDENT> return self.current_keys - self.intersect <NEW_LINE> <DEDENT> def removed(self): <NEW_LINE> <INDENT> return self.past_keys - self.intersect <NEW_LINE> <DEDENT> def changed(self): <NEW_LINE> <INDENT> return set(o for o in self.intersect if self.past_dict[o] != self.current_dict[o]) <NEW_LINE> <DEDENT> def unchanged(self): <NEW_LINE> <INDENT> return set(o for o in self.intersect if self.past_dict[o] == self.current_dict[o])
from https://github.com/hughdbrown/dictdiffer Calculate the difference between two dictionaries as: (1) items added (2) items removed (3) keys same in both but changed values (4) keys same in both and unchanged values
62598fcc091ae35668704fe3
class MyClz: <NEW_LINE> <INDENT> pass
我是 Class 註解 可以擁有多行
62598fcca05bb46b3848ac24
class Fun_path(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'fun_paths' <NEW_LINE> id = db.Column(db.Integer,primary_key=True) <NEW_LINE> fa_id = db.Column(db.String(16)) <NEW_LINE> fun_id = db.Column(db.String(128)) <NEW_LINE> classify = db.Column(db.String(128)) <NEW_LINE> name = db.Column(db.String(128)) <NEW_LINE> def __init__(self,fa_id,fun_id,classify,name): <NEW_LINE> <INDENT> self.fa_id = fa_id <NEW_LINE> self.fun_id = fun_id <NEW_LINE> self.classify = classify <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Fun_path %r>' % self.name
主业务目录 id 目录编号,自增,主键 fa_id 父级目录id, fun_id 关联api或UI的数据id classify 1标识为api,2标识为UI
62598fcc97e22403b383b2bf
class CommandParser: <NEW_LINE> <INDENT> def __init__(self, master): <NEW_LINE> <INDENT> self.master = master <NEW_LINE> self.currentToken = master <NEW_LINE> <DEDENT> def parse(self, string): <NEW_LINE> <INDENT> parsed = [] <NEW_LINE> collections = {} <NEW_LINE> string += " " <NEW_LINE> for c in string: <NEW_LINE> <INDENT> reset = True <NEW_LINE> validNext = self.currentToken.validNext(c) <NEW_LINE> if validNext is not None: <NEW_LINE> <INDENT> self.currentToken = validNext <NEW_LINE> if self.currentToken.doCollect(): <NEW_LINE> <INDENT> if self.currentToken.collect not in collections: <NEW_LINE> <INDENT> collections[self.currentToken.collect] = "" <NEW_LINE> <DEDENT> collections[self.currentToken.collect] += c <NEW_LINE> <DEDENT> if self.currentToken.terminates(): <NEW_LINE> <INDENT> parsed.append(Command(self.currentToken.action, collections)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> reset = False <NEW_LINE> <DEDENT> <DEDENT> if reset: <NEW_LINE> <INDENT> self.currentToken = self.master <NEW_LINE> collections = {} <NEW_LINE> <DEDENT> <DEDENT> return parsed
An object that will take a grammar, and parse any strings according to it.
62598fcc7b180e01f3e4922c
class ARSCResTableEntry(object): <NEW_LINE> <INDENT> FLAG_COMPLEX = 1 <NEW_LINE> FLAG_util = 2 <NEW_LINE> FLAG_WEAK = 4 <NEW_LINE> def __init__(self, buff, mResId, parent=None): <NEW_LINE> <INDENT> self.start = buff.get_idx() <NEW_LINE> self.mResId = mResId <NEW_LINE> self.parent = parent <NEW_LINE> self.size = unpack('<H', buff.read(2))[0] <NEW_LINE> self.flags = unpack('<H', buff.read(2))[0] <NEW_LINE> self.index = unpack('<I', buff.read(4))[0] <NEW_LINE> if self.is_complex(): <NEW_LINE> <INDENT> self.item = ARSCComplex(buff, parent) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.key = ARSCResStringPoolRef(buff, self.parent) <NEW_LINE> <DEDENT> <DEDENT> def get_index(self): <NEW_LINE> <INDENT> return self.index <NEW_LINE> <DEDENT> def get_value(self): <NEW_LINE> <INDENT> return self.parent.mKeyStrings.getString(self.index) <NEW_LINE> <DEDENT> def get_key_data(self): <NEW_LINE> <INDENT> return self.key.get_data_value() <NEW_LINE> <DEDENT> def is_util(self): <NEW_LINE> <INDENT> return (self.flags & self.FLAG.util) != 0 <NEW_LINE> <DEDENT> def is_complex(self): <NEW_LINE> <INDENT> return (self.flags & self.FLAG_COMPLEX) != 0 <NEW_LINE> <DEDENT> def is_weak(self): <NEW_LINE> <INDENT> return (self.flags & self.FLAG_WEAK) != 0 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<ARSCResTableEntry idx='0x{:08x}' mResId='0x{:08x}' size='{}' flags='0x{:02x}' index='0x{:x}' holding={}>".format( self.start, self.mResId, self.size, self.flags, self.index, self.item if self.is_complex() else self.key )
See https://github.com/LineageOS/android_frameworks_base/blob/df2898d9ce306bb2fe922d3beaa34a9cf6873d27/include/androidfw/ResourceTypes.h#L1370
62598fcc9f28863672818a59
class NetworkSegment(model_base.BASEV2, models_v2.HasId): <NEW_LINE> <INDENT> __tablename__ = 'ml2_network_segments' <NEW_LINE> network_id = sa.Column(sa.String(36), sa.ForeignKey('networks.id', ondelete="CASCADE"), nullable=False) <NEW_LINE> network_type = sa.Column(sa.String(32), nullable=False) <NEW_LINE> physical_network = sa.Column(sa.String(64)) <NEW_LINE> segmentation_id = sa.Column(sa.Integer)
Represent persistent state of a network segment. A network segment is a portion of a neutron network with a specific physical realization. A neutron network can consist of one or more segments.
62598fccbf627c535bcb1864
class AliasContextKindValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> ANY = 0 <NEW_LINE> FIXED = 1 <NEW_LINE> MOVABLE = 2 <NEW_LINE> OTHER = 3
The alias kind. Values: ANY: <no description> FIXED: <no description> MOVABLE: <no description> OTHER: <no description>
62598fcc8a349b6b436865fa
class Car(): <NEW_LINE> <INDENT> def __init__(self, make, model, year): <NEW_LINE> <INDENT> self.make = make <NEW_LINE> self.model = model <NEW_LINE> self.year = year <NEW_LINE> self.odometer_reading = 0 <NEW_LINE> <DEDENT> def get_descriptive_name(self): <NEW_LINE> <INDENT> long_name = str(self.year) + ' ' + self.make + ' ' + self.model <NEW_LINE> return long_name.title() <NEW_LINE> <DEDENT> def read_odometer(self): <NEW_LINE> <INDENT> print("This car has " + str(self.odometer_reading) + " miles on it.") <NEW_LINE> <DEDENT> def update_odometer(self, mileage): <NEW_LINE> <INDENT> self.odometer_reading = mileage
一次模拟汽车的简单尝试
62598fcc50812a4eaa620dc1
class NumpyEmpty(NumpyAutoFill): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> name = 'empty' <NEW_LINE> @property <NEW_LINE> def fill_value(self): <NEW_LINE> <INDENT> return None
Represents a call to numpy.empty for code generation.
62598fcc4a966d76dd5ef292
class TestInitialConditionDomain(TestComponent): <NEW_LINE> <INDENT> _class = InitialConditionDomain <NEW_LINE> _factory = initial_conditions
Unit testing of InitialConditionDomain object.
62598fccab23a570cc2d4f4b
class ExtendedTargetParser(target_parser_class): <NEW_LINE> <INDENT> def enter_optional(self): <NEW_LINE> <INDENT> self.trigger_listener('enter_optional') <NEW_LINE> <DEDENT> def exit_optional(self): <NEW_LINE> <INDENT> self.trigger_listener('exit_optional') <NEW_LINE> <DEDENT> def enterRecursionRule(self, localctx, state, ruleIndex, precedence): <NEW_LINE> <INDENT> super().enterRecursionRule(localctx, state, ruleIndex, precedence) <NEW_LINE> self.trigger_listener('recursion_enter') <NEW_LINE> <DEDENT> def pushNewRecursionContext(self, localctx, state, ruleIndex): <NEW_LINE> <INDENT> super().pushNewRecursionContext(localctx, state, ruleIndex) <NEW_LINE> self.trigger_listener('recursion_push') <NEW_LINE> <DEDENT> def unrollRecursionContexts(self, parentCtx): <NEW_LINE> <INDENT> super().unrollRecursionContexts(parentCtx) <NEW_LINE> self.trigger_listener('recursion_unroll') <NEW_LINE> <DEDENT> def trigger_listener(self, event): <NEW_LINE> <INDENT> for listener in self.getParseListeners(): <NEW_LINE> <INDENT> if hasattr(listener, event): <NEW_LINE> <INDENT> getattr(listener, event)() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def syntax_error_warning(self): <NEW_LINE> <INDENT> if self.getNumberOfSyntaxErrors() > 0: <NEW_LINE> <INDENT> logger.warning('%s finished with %d syntax errors. This may decrease reduce quality.', target_parser_class.__name__, self.getNumberOfSyntaxErrors())
ExtendedTargetParser is a subclass of the original parser implementation. It can trigger state changes that are needed to identify parts of the input that are not needed to keep it syntactically correct.
62598fcc55399d3f056268d5
class SimpleProcProxy(ProcProxy): <NEW_LINE> <INDENT> def __init__(self, f, args, stdin=None, stdout=None, stderr=None, universal_newlines=False): <NEW_LINE> <INDENT> f = wrap_simple_command(f, args, stdin, stdout, stderr) <NEW_LINE> super().__init__(f, args, stdin, stdout, stderr, universal_newlines)
Variant of `ProcProxy` for simpler functions. The function passed into the initializer for `SimpleProcProxy` should have the form described in the xonsh tutorial. This function is then wrapped to make a new function of the form expected by `ProcProxy`.
62598fccbe7bc26dc9252038
class Admin(commands.Cog): <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> self.startup = datetime.datetime.now() <NEW_LINE> <DEDENT> def timedelta_str(self, dt): <NEW_LINE> <INDENT> days = dt.days <NEW_LINE> hours, r = divmod(dt.seconds, 3600) <NEW_LINE> minutes, sec = divmod(r, 60) <NEW_LINE> return f"{days} days, {hours} hours, {minutes} minutes and {sec} seconds." <NEW_LINE> <DEDENT> @commands.command() <NEW_LINE> async def extensions(self, ctx): <NEW_LINE> <INDENT> active_ext = "\n".join(tuple(self.bot.extensions)) <NEW_LINE> await ctx.send(active_ext) <NEW_LINE> <DEDENT> @commands.command() <NEW_LINE> async def reload(self, ctx, extension_name: str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.bot.reload_extension(extension_name) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> await ctx.send(f"Error reloading {extension_name}: {e}.") <NEW_LINE> return <NEW_LINE> <DEDENT> await ctx.send(f"Reloaded {extension_name}.") <NEW_LINE> <DEDENT> @commands.command() <NEW_LINE> async def uptime(self, ctx): <NEW_LINE> <INDENT> delta = datetime.datetime.now()-self.startup <NEW_LINE> delta_str = self.timedelta_str(delta) <NEW_LINE> await ctx.send(f"Uptime: {delta_str}") <NEW_LINE> <DEDENT> @commands.command() <NEW_LINE> async def ping(self, ctx): <NEW_LINE> <INDENT> ping = int(self.bot.latency*1000) <NEW_LINE> await ctx.send(f"Pong! (took {ping} ms)")
Several admin commands
62598fcc956e5f7376df585b
class StringProperty(Property): <NEW_LINE> <INDENT> def __init__(self, choices=None, **kwargs): <NEW_LINE> <INDENT> super(StringProperty, self).__init__(**kwargs) <NEW_LINE> self.choices = choices <NEW_LINE> if self.choices: <NEW_LINE> <INDENT> if not isinstance(self.choices, tuple): <NEW_LINE> <INDENT> raise ValueError("Choices must be a tuple of tuples") <NEW_LINE> <DEDENT> self.choice_map = dict(self.choices) <NEW_LINE> self.form_field_class = 'TypedChoiceField' <NEW_LINE> <DEDENT> <DEDENT> @validator <NEW_LINE> def inflate(self, value): <NEW_LINE> <INDENT> if self.choices and value not in self.choice_map: <NEW_LINE> <INDENT> raise ValueError("Invalid choice {}".format(value)) <NEW_LINE> <DEDENT> return unicode(value) <NEW_LINE> <DEDENT> @validator <NEW_LINE> def deflate(self, value): <NEW_LINE> <INDENT> if self.choices and value not in self.choice_map: <NEW_LINE> <INDENT> raise ValueError("Invalid choice {}".format(value)) <NEW_LINE> <DEDENT> return unicode(value) <NEW_LINE> <DEDENT> def default_value(self): <NEW_LINE> <INDENT> return unicode(super(StringProperty, self).default_value())
Store strings
62598fcc5fdd1c0f98e5e348
class TestTransactionsApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = zuora_client.api.transactions_api.TransactionsApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_g_et_transaction_invoice(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_g_et_transaction_payment(self): <NEW_LINE> <INDENT> pass
TransactionsApi unit test stubs
62598fccd8ef3951e32c8039
class ItemCreateForm(BaseItemProcessForm): <NEW_LINE> <INDENT> company = forms.ModelChoiceField(queryset=companies_models.Company.objects.all(), required=False) <NEW_LINE> class Meta(BaseItemProcessForm.Meta): <NEW_LINE> <INDENT> fields = BaseItemProcessForm.Meta.fields + ('company',) <NEW_LINE> <DEDENT> def clean_company(self): <NEW_LINE> <INDENT> return self.initial['company'] <NEW_LINE> <DEDENT> def clean_sku(self): <NEW_LINE> <INDENT> return self._validate_sku(self.cleaned_data['sku'], self.initial['company'])
Form for item create page
62598fcc60cbc95b063646f9
class mode: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> from collections import Counter <NEW_LINE> self.counter=Counter() <NEW_LINE> <DEDENT> def step(self, value): <NEW_LINE> <INDENT> if isfloat(value): <NEW_LINE> <INDENT> v=float(value) <NEW_LINE> if not isnan(v): <NEW_LINE> <INDENT> self.counter[v]+=1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> if self.counter=={}: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.counter.most_common()[0][0]
Returns the mode of the elements.
62598fcc5fc7496912d48457
class LogSnoop: <NEW_LINE> <INDENT> def __init__(self, orb, nearcast_schema): <NEW_LINE> <INDENT> self.orb = orb <NEW_LINE> self.nearcast_schema = nearcast_schema <NEW_LINE> <DEDENT> def orb_close(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_nearcast_message(self, cog_h, message_h, d_fields): <NEW_LINE> <INDENT> def format_message(): <NEW_LINE> <INDENT> sb = [] <NEW_LINE> sb.append('[%s/%s/%s] '%(self.orb.schema_h, cog_h, message_h)) <NEW_LINE> for idx, key in enumerate(self.nearcast_schema[message_h]): <NEW_LINE> <INDENT> if idx > 0: <NEW_LINE> <INDENT> sb.append(', ') <NEW_LINE> <DEDENT> sb.append('%s:%s'%(key, d_fields[key])) <NEW_LINE> <DEDENT> return ''.join(sb) <NEW_LINE> <DEDENT> nice = format_message() <NEW_LINE> log(nice)
Logs any message seen on the associated nearcast.
62598fcc9f28863672818a5a
class CodeBlockPreprocessor(preprocessors.Preprocessor): <NEW_LINE> <INDENT> KIND = 'sourcecode' <NEW_LINE> pattern_tag = re.compile(r'\[sourcecode:(.+?)\](.+?)\[/sourcecode\]', re.S) <NEW_LINE> pattern_ticks = re.compile(r'```(.+?)\n(.+?)\n```', re.S) <NEW_LINE> class Config(messages.Message): <NEW_LINE> <INDENT> classes = messages.BooleanField(1, default=False) <NEW_LINE> class_name = messages.StringField(2, default='code') <NEW_LINE> highlighter = messages.StringField(3, default='pygments') <NEW_LINE> <DEDENT> def __init__(self, pod, markdown_instance): <NEW_LINE> <INDENT> self.pod = pod <NEW_LINE> self.markdown = markdown_instance <NEW_LINE> <DEDENT> @property <NEW_LINE> @utils.memoize <NEW_LINE> def formatter(self): <NEW_LINE> <INDENT> return html.HtmlFormatter(noclasses=(not self.config.classes)) <NEW_LINE> <DEDENT> @property <NEW_LINE> @utils.memoize <NEW_LINE> def config(self): <NEW_LINE> <INDENT> return get_config( CodeBlockPreprocessor.KIND, CodeBlockPreprocessor.Config, self.pod) <NEW_LINE> <DEDENT> def run(self, lines): <NEW_LINE> <INDENT> class_name = self.config.class_name <NEW_LINE> def repl(m): <NEW_LINE> <INDENT> language = m.group(1) <NEW_LINE> content = m.group(2) <NEW_LINE> if self.config.highlighter == 'pygments': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> lexer = lexers.get_lexer_by_name(language) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> lexer = lexers.TextLexer() <NEW_LINE> <DEDENT> code = highlight(content, lexer, self.formatter) <NEW_LINE> return '\n\n<div class="%s">%s</div>\n\n' % (class_name, code) <NEW_LINE> <DEDENT> elif self.config.highlighter == 'plain': <NEW_LINE> <INDENT> return '\n\n<pre><code class="%s">%s</code></pre>\n\n' % (language, content) <NEW_LINE> <DEDENT> text = '{} is an invalid highlighter. Valid choices are: pygments, plain.' <NEW_LINE> raise ValueError(text.format(self.config.highlighter)) <NEW_LINE> <DEDENT> content = '\n'.join(lines) <NEW_LINE> content = self.pattern_tag.sub(repl, content) <NEW_LINE> content = self.pattern_ticks.sub(repl, content) <NEW_LINE> return content.split('\n')
Adapted from: https://bitbucket.org/birkenfeld/pygments-main/ src/e79a7126551c39d5f8c1b83a79c14e86992155a4/external/markdown-processor.py
62598fcc63b5f9789fe85533
class Kernel: <NEW_LINE> <INDENT> ipkg = None <NEW_LINE> running = None <NEW_LINE> variant = None <NEW_LINE> fpath = None <NEW_LINE> name = None <NEW_LINE> def __init__(self, ipkg, variant, fpath): <NEW_LINE> <INDENT> self.ipkg = ipkg <NEW_LINE> self.name = self.ipkg.name <NEW_LINE> self.running = False <NEW_LINE> self.variant = variant <NEW_LINE> self.fpath = fpath
Kernel object to map real kernels to package manager
62598fcc3617ad0b5ee06504
class Profile(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, on_delete=models.CASCADE) <NEW_LINE> reg_date = models.DateField(verbose_name='Registration date', auto_now_add=True) <NEW_LINE> avatar = models.ImageField(verbose_name='Avatar', blank=True, null=True) <NEW_LINE> def avatar_url(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.avatar.url <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return settings.STATIC_URL + '/img/avatar.png'
Дополнительные поля к пользователю: аватарка, дата регистрации.
62598fcc167d2b6e312b7334
class BlogPostSitemap(Sitemap): <NEW_LINE> <INDENT> changefreq = "never" <NEW_LINE> priority = 0.8 <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return Post.objects.filter(is_public=True) <NEW_LINE> <DEDENT> def location(self, obj): <NEW_LINE> <INDENT> return reverse('cms:post_detail', args=[obj.pk]) <NEW_LINE> <DEDENT> def lastmod(self, obj): <NEW_LINE> <INDENT> return obj.published_at
ブログ記事のサイトマップ
62598fcc377c676e912f6f55
class Worker(object): <NEW_LINE> <INDENT> def __init__(self, mqtt): <NEW_LINE> <INDENT> self.mqtt = mqtt <NEW_LINE> self.motor = MotorCtrlService() <NEW_LINE> self.speakService = SpeakService() <NEW_LINE> self.vid = CameraService() <NEW_LINE> self.mqtt.reg(Config.MQTT_MOTOR_MOVE_TOPIC_NAME, self.motor_move) <NEW_LINE> self.mqtt.reg(Config.MQTT_CAM_TOPIC_NAME, self.start_cam) <NEW_LINE> self.mqtt.reg(Config.MQTT_SPEAK_TOPIC_NAME, self.speak) <NEW_LINE> <DEDENT> def motor_move(self, msg): <NEW_LINE> <INDENT> self.motor.act(msg) <NEW_LINE> <DEDENT> def start_cam(self, msg): <NEW_LINE> <INDENT> self.vid.act(msg) <NEW_LINE> <DEDENT> def speak(self, msg): <NEW_LINE> <INDENT> self.speakService.act(msg)
Service Worker
62598fcc0fa83653e46f52a4
class Services: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.services = { "domain": { "name": "NonRootDomain" }, "domain_admin_account": { "email": "newtest@test.com", "firstname": "Test", "lastname": "User", "username": "doadmintest", "password": "password" }, "account": { "email": "newtest@test.com", "firstname": "Test", "lastname": "User", "username": "acc", "password": "password" }, "account_not_in_project": { "email": "newtest@test.com", "firstname": "Test", "lastname": "User", "username": "account_not_in_project", "password": "password" }, "project": { "name": "Project", "displaytext": "Project" }, "project2": { "name": "Project2", "displaytext": "Project2" }, "service_offering": { "name": "Tiny Instance", "displaytext": "Tiny Instance", "cpunumber": 1, "cpuspeed": 100, "memory": 64 }, "ostype": 'CentOS 5.3 (64-bit)', "host_anti_affinity": { "name": "", "type": "host anti-affinity" }, "virtual_machine" : { } }
Test Account Services
62598fccab23a570cc2d4f4d
class PackageCopyJobDerived(BaseRunnableJob): <NEW_LINE> <INDENT> __metaclass__ = EnumeratedSubclass <NEW_LINE> delegates(IPackageCopyJob) <NEW_LINE> def __init__(self, job): <NEW_LINE> <INDENT> self.context = job <NEW_LINE> self.logger = logging.getLogger() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(cls, job_id): <NEW_LINE> <INDENT> job = IStore(PackageCopyJob).get(PackageCopyJob, job_id) <NEW_LINE> if job.job_type != cls.class_job_type: <NEW_LINE> <INDENT> raise NotFoundError( 'No object found with id %d and type %s' % (job_id, cls.class_job_type.title)) <NEW_LINE> <DEDENT> return cls(job) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def iterReady(cls): <NEW_LINE> <INDENT> seen = set() <NEW_LINE> while True: <NEW_LINE> <INDENT> jobs = IStore(PackageCopyJob).find( PackageCopyJob, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.job == Job.id, Job.id.is_in(Job.ready_jobs), Not(Job.id.is_in(seen))) <NEW_LINE> jobs.order_by(PackageCopyJob.copy_policy) <NEW_LINE> job = jobs.first() <NEW_LINE> if job is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> seen.add(job.job_id) <NEW_LINE> yield cls(job) <NEW_LINE> <DEDENT> <DEDENT> def getOopsVars(self): <NEW_LINE> <INDENT> vars = super(PackageCopyJobDerived, self).getOopsVars() <NEW_LINE> vars.extend([ ('source_archive_id', self.context.source_archive_id), ('target_archive_id', self.context.target_archive_id), ('target_distroseries_id', self.context.target_distroseries_id), ('package_copy_job_id', self.context.id), ('package_copy_job_type', self.context.job_type.title), ]) <NEW_LINE> return vars <NEW_LINE> <DEDENT> def getOperationDescription(self): <NEW_LINE> <INDENT> return "copying a package" <NEW_LINE> <DEDENT> def getErrorRecipients(self): <NEW_LINE> <INDENT> return [format_address_for_person(self.requester)] <NEW_LINE> <DEDENT> @property <NEW_LINE> def copy_policy(self): <NEW_LINE> <INDENT> return self.context.copy_policy
Abstract class for deriving from PackageCopyJob.
62598fccfbf16365ca794479
class Settings: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 800 <NEW_LINE> self.screen_height = 600 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_limit = 1 <NEW_LINE> self.bullet_width = 5 <NEW_LINE> self.bullet_height = 10 <NEW_LINE> self.bullet_color = 60, 60, 60 <NEW_LINE> self.bullet_allowed = 3 <NEW_LINE> self.fleet_drop_speed = 10 <NEW_LINE> self.speedup_scale=1.1 <NEW_LINE> self.score_scale=1.5 <NEW_LINE> self.initialize_dynamic_settings() <NEW_LINE> <DEDENT> def initialize_dynamic_settings(self): <NEW_LINE> <INDENT> self.ship_speed_factor=1.5 <NEW_LINE> self.bullet_speed_factor=3 <NEW_LINE> self.alien_speed_factor=1 <NEW_LINE> self.fleet_direction = 1 <NEW_LINE> self.alien_points=50 <NEW_LINE> <DEDENT> def increase_speed(self): <NEW_LINE> <INDENT> self.ship_speed_factor*=self.speedup_scale <NEW_LINE> self.bullet_speed_factor*=self.speedup_scale <NEW_LINE> self.alien_speed_factor*=self.speedup_scale <NEW_LINE> self.alien_points=int(self.alien_points*self.score_scale)
存储所有设置的类
62598fcc0fa83653e46f52a6
class Error(exceptions.Error): <NEW_LINE> <INDENT> pass
Errors for this module.
62598fccbf627c535bcb186a
class pyHViewTranslationTool(object): <NEW_LINE> <INDENT> def __init__(self,v): <NEW_LINE> <INDENT> self.view=v <NEW_LINE> <DEDENT> def getView(self): <NEW_LINE> <INDENT> return self.view <NEW_LINE> <DEDENT> def onMouseDown(self,e): <NEW_LINE> <INDENT> self.p=pyHPoint(e.getX(),e.getY()) <NEW_LINE> <DEDENT> def onMouseUp(self,e): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onMouseMove(self,e): <NEW_LINE> <INDENT> t=self.view.getTransform() <NEW_LINE> dx=e.getX()-self.p.getX() <NEW_LINE> dy=e.getY()-self.p.getY() <NEW_LINE> dxp,dyp=t.scale(dx,dy) <NEW_LINE> t.tx+=dxp <NEW_LINE> t.ty+=dyp <NEW_LINE> <DEDENT> def onMouseDobleClick(self,e): <NEW_LINE> <INDENT> v=self.getView() <NEW_LINE> p=pyHPoint(e.getX(),e.getY()) <NEW_LINE> try: <NEW_LINE> <INDENT> f=v.findFigure(p) <NEW_LINE> r=f.getDisplayBox() <NEW_LINE> v.setTransformFitToRectangle(r) <NEW_LINE> <DEDENT> except (pyHFigureNotFound): <NEW_LINE> <INDENT> v.setTransformFitToDrawing() <NEW_LINE> <DEDENT> <DEDENT> def onMouseWheel(self,e): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onKeyPressed(self,e): <NEW_LINE> <INDENT> pass
classdocs
62598fcc4527f215b58ea28f
class JSONField(models.TextField): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(JSONField, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse_json(cls, value): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(value, str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return json.loads(value) <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> raise exceptions.ValidationError(_("Enter valid JSON.")) <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def print_json(cls, value): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> if isinstance(value, str): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> return json.dumps(value) <NEW_LINE> <DEDENT> def from_db_value(self, value, expression, connection, context): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return JSONField.parse_json(value) <NEW_LINE> <DEDENT> except (exceptions.ValidationError): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_prep_value(self, value): <NEW_LINE> <INDENT> return JSONField.print_json(value) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> return JSONField.parse_json(value) <NEW_LINE> <DEDENT> def formfield(self, **kwargs): <NEW_LINE> <INDENT> defaults = { 'form_class': JSONFormField, } <NEW_LINE> defaults.update(kwargs) <NEW_LINE> field = super(JSONField, self).formfield(**defaults) <NEW_LINE> if not field.help_text: <NEW_LINE> <INDENT> field.help_text = _("Enter valid JSON.") <NEW_LINE> <DEDENT> return field
Stores JSON object in a text field.
62598fccfbf16365ca79447b
class Logger(logging.Logger): <NEW_LINE> <INDENT> def __init__(self, name, level=logging.NOTSET): <NEW_LINE> <INDENT> super(Logger, self).__init__(name, level) <NEW_LINE> <DEDENT> def _compose_msg(self, *args, **kwargs): <NEW_LINE> <INDENT> global CODE <NEW_LINE> msg_list = [] <NEW_LINE> if len(args) > 0: <NEW_LINE> <INDENT> msg_list.extend(map(lambda x: str(x), args)) <NEW_LINE> <DEDENT> if len(kwargs) > 0: <NEW_LINE> <INDENT> for k, v in kwargs.items(): <NEW_LINE> <INDENT> k = k.encode(CODE) if isinstance(k, unicode) else str(k) <NEW_LINE> v = v.encode(CODE) if isinstance(v, unicode) else str(v) <NEW_LINE> msg_list.append('{0}={1}'.format(k, v)) <NEW_LINE> <DEDENT> <DEDENT> return '\t'.join(msg_list) <NEW_LINE> <DEDENT> def debug(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.isEnabledFor(logging.DEBUG): <NEW_LINE> <INDENT> msg = self._compose_msg(*args, **kwargs) <NEW_LINE> self._log(logging.DEBUG, msg, []) <NEW_LINE> <DEDENT> <DEDENT> def info(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.isEnabledFor(logging.INFO): <NEW_LINE> <INDENT> msg = self._compose_msg(*args, **kwargs) <NEW_LINE> self._log(logging.INFO, msg, []) <NEW_LINE> <DEDENT> <DEDENT> def warning(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.isEnabledFor(logging.WARNING): <NEW_LINE> <INDENT> msg = self._compose_msg(*args, **kwargs) <NEW_LINE> self._log(logging.WARNING, msg, []) <NEW_LINE> <DEDENT> <DEDENT> def error(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.isEnabledFor(logging.ERROR): <NEW_LINE> <INDENT> msg = self._compose_msg(*args, **kwargs) <NEW_LINE> self._log(logging.ERROR, msg, []) <NEW_LINE> <DEDENT> <DEDENT> def critical(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.isEnabledFor(logging.CRITICAL): <NEW_LINE> <INDENT> msg = self._compose_msg(*args, **kwargs) <NEW_LINE> self._log(logging.CRITICAL, msg, []) <NEW_LINE> <DEDENT> <DEDENT> def exc_log(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.isEnabledFor(logging.ERROR): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> exc = traceback.format_exc().rstrip() <NEW_LINE> exc_index = exc.rfind('\n') <NEW_LINE> exc = exc[exc_index + 1:] <NEW_LINE> log_contents = ['EXC_LOG', exc] <NEW_LINE> exc_list = list(traceback.extract_tb(exc_info()[2])) <NEW_LINE> for i, (file, lineno, func, msg) in enumerate(exc_list): <NEW_LINE> <INDENT> stack_info = { 'file': file, 'lineno': lineno, 'func': func, 'msg': msg, 'i': i, } <NEW_LINE> log_contents.append( '{i}|{file}|{lineno}|{func} {msg}'.format(**stack_info) ) <NEW_LINE> <DEDENT> msg = self._compose_msg(*log_contents) <NEW_LINE> self._log(logging.ERROR, msg, [])
A smart logger with many useful functions
62598fcc0fa83653e46f52a8
class MeshSegmentation(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "mesh.mesh_segmentation" <NEW_LINE> bl_label = "Segment Mesh" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> action: bpy.props.EnumProperty(name ="Action", items = [('assignMaterials', "Assign materials", "Assigns a different material for " "each found segment")], description = "What to do with the " "segmentation", default = 'assignMaterials') <NEW_LINE> k: bpy.props.IntProperty(name = "Clusters", description = "Amount of clusters", min = 2, default = 2) <NEW_LINE> delta: bpy.props.FloatProperty(name = "Delta", description = "Set close to zero for more " "importance on the angular " "distance, set close to one " "for more importance on the " "geodesic distance.", default = 0.03, min = 0, max = 1, subtype = 'FACTOR') <NEW_LINE> eta: bpy.props.FloatProperty(name = "Weight of convexity", description = "Set close to zero for more " "importance on concave angles, " "set close to one to treat " "concave and convex angles " "equally.", default = 0.15, min = 1e-10, max = 1, subtype = 'FACTOR') <NEW_LINE> ev_method: bpy.props.EnumProperty(name = "EV method", items = [('sparse', "Sparse", "Sparse method for eigenvector " "computation (scipy.sparse.linalg.eigsh)"), ('dense', "Dense", "Dense method for eigenvector computation " "(scipy.linalg.eigh)")], description = "Method to use for eigenvector computation. 'Sparse' " "should usually preferred, as it tends to be much faster " "without sacrificing quality. 'Dense' can be tried as " "fallback. Default", default = 'sparse') <NEW_LINE> kmeans_init: bpy.props.EnumProperty(name = "k-means initialization", items = [('liu_zhang', "Liu & Zhang", "Initialization by Liu & Zhang"), ('kmeans++', "k-means++", "Initialization from k-means++")], description = "Method to use for initializing centroids for k-means.", default = 'liu_zhang') <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> if bpy.ops.mesh.separate(type='LOOSE') != {'CANCELLED'}: <NEW_LINE> <INDENT> self.report({'ERROR'}, "Separated not connected parts, choose " "one of them for segmentation!") <NEW_LINE> return {'CANCELLED'} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> segmentation.segment_mesh(mesh = context.active_object.data, k = self.k, coefficients = (self.delta, self.eta), action = getattr(actions, self.action), ev_method = self.ev_method, kmeans_init = self.kmeans_init) <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> <DEDENT> def invoke(self, context, event): <NEW_LINE> <INDENT> if context.active_object.type == 'MESH': <NEW_LINE> <INDENT> return context.window_manager.invoke_props_dialog(self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.report({'ERROR'}, "Selected object is not a mesh!") <NEW_LINE> return {'CANCELLED'}
Segment a mesh
62598fcc283ffb24f3cf3c47
class GenomicsV1ApiClient(GenomicsApiClient): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(GenomicsV1ApiClient, self).__init__('v1') <NEW_LINE> <DEDENT> def ResourceFromName(self, name): <NEW_LINE> <INDENT> return self._registry.Parse(name, collection='genomics.operations') <NEW_LINE> <DEDENT> def Poller(self): <NEW_LINE> <INDENT> return waiter.CloudOperationPollerNoResources(self._client.operations) <NEW_LINE> <DEDENT> def GetOperation(self, resource): <NEW_LINE> <INDENT> return self._client.operations.Get( self._messages.GenomicsOperationsGetRequest( name=resource.RelativeName())) <NEW_LINE> <DEDENT> def CancelOperation(self, resource): <NEW_LINE> <INDENT> return self._client.operations.Cancel( self._messages.GenomicsOperationsCancelRequest( name=resource.RelativeName()))
Client for accessing the V1 genomics API.
62598fcc099cdd3c636755c2
class PermuteBijectorTest(test.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._rng = np.random.RandomState(42) <NEW_LINE> <DEDENT> def testBijector(self): <NEW_LINE> <INDENT> expected_permutation = np.int32([2, 0, 1]) <NEW_LINE> expected_x = np.random.randn(4, 2, 3) <NEW_LINE> expected_y = expected_x[..., expected_permutation] <NEW_LINE> with self.test_session() as sess: <NEW_LINE> <INDENT> permutation_ph = array_ops.placeholder(dtype=dtypes.int32) <NEW_LINE> bijector = Permute( permutation=permutation_ph, validate_args=True) <NEW_LINE> [ permutation_, x_, y_, fldj, ildj, ] = sess.run([ bijector.permutation, bijector.inverse(expected_y), bijector.forward(expected_x), bijector.forward_log_det_jacobian(expected_x, event_ndims=1), bijector.inverse_log_det_jacobian(expected_y, event_ndims=1), ], feed_dict={permutation_ph: expected_permutation}) <NEW_LINE> self.assertEqual("permute", bijector.name) <NEW_LINE> self.assertAllEqual(expected_permutation, permutation_) <NEW_LINE> self.assertAllClose(expected_y, y_, rtol=1e-6, atol=0) <NEW_LINE> self.assertAllClose(expected_x, x_, rtol=1e-6, atol=0) <NEW_LINE> self.assertAllClose(0., fldj, rtol=1e-6, atol=0) <NEW_LINE> self.assertAllClose(0., ildj, rtol=1e-6, atol=0) <NEW_LINE> <DEDENT> <DEDENT> def testRaisesOpError(self): <NEW_LINE> <INDENT> with self.test_session() as sess: <NEW_LINE> <INDENT> with self.assertRaisesOpError("Permutation over `d` must contain"): <NEW_LINE> <INDENT> permutation_ph = array_ops.placeholder(dtype=dtypes.int32) <NEW_LINE> bijector = Permute( permutation=permutation_ph, validate_args=True) <NEW_LINE> sess.run(bijector.inverse([1.]), feed_dict={permutation_ph: [1, 2]}) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def testBijectiveAndFinite(self): <NEW_LINE> <INDENT> permutation = np.int32([2, 0, 1]) <NEW_LINE> x = np.random.randn(4, 2, 3) <NEW_LINE> y = x[..., permutation] <NEW_LINE> with self.test_session(): <NEW_LINE> <INDENT> bijector = Permute(permutation=permutation, validate_args=True) <NEW_LINE> assert_bijective_and_finite( bijector, x, y, event_ndims=1, rtol=1e-6, atol=0)
Tests correctness of the Permute bijector.
62598fcc3346ee7daa337828
class AmmoFactory(object): <NEW_LINE> <INDENT> def __init__(self, factory): <NEW_LINE> <INDENT> self.factory = factory <NEW_LINE> self.load_plan = factory.get_load_plan() <NEW_LINE> self.ammo_generator = factory.get_ammo_generator() <NEW_LINE> self.filter = lambda missile: True <NEW_LINE> self.marker = factory.get_marker() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for ammo_tuple in ( (timestamp, marker or self.marker(missile), missile) for timestamp, (missile, marker) in izip(self.load_plan, self.ammo_generator) ): <NEW_LINE> <INDENT> yield ammo_tuple
A generator that produces ammo.
62598fcc5fcc89381b26632d
class RPIOFactory(LocalPiFactory): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(RPIOFactory, self).__init__() <NEW_LINE> RPIO.setmode(RPIO.BCM) <NEW_LINE> RPIO.setwarnings(False) <NEW_LINE> RPIO.wait_for_interrupts(threaded=True) <NEW_LINE> RPIO.PWM.setup() <NEW_LINE> RPIO.PWM.init_channel(0, 10000) <NEW_LINE> self.pin_class = RPIOPin <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> RPIO.PWM.cleanup() <NEW_LINE> RPIO.stop_waiting_for_interrupts() <NEW_LINE> RPIO.cleanup()
Uses the `RPIO`_ library to interface to the Pi's GPIO pins. This is the default pin implementation if the RPi.GPIO library is not installed, but RPIO is. Supports all features including PWM (hardware via DMA). .. note:: Please note that at the time of writing, RPIO is only compatible with Pi 1's; the Raspberry Pi 2 Model B is *not* supported. Also note that root access is required so scripts must typically be run with ``sudo``. You can construct RPIO pins manually like so:: from gpiozero.pins.rpio import RPIOFactory from gpiozero import LED factory = RPIOFactory() led = LED(12, pin_factory=factory) .. _RPIO: https://pythonhosted.org/RPIO/
62598fccff9c53063f51aa0e
class AbstractStreamAdapter(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def Adapt(cls, stream): <NEW_LINE> <INDENT> if not isinstance(stream, cls): <NEW_LINE> <INDENT> return cls(stream) <NEW_LINE> <DEDENT> return stream <NEW_LINE> <DEDENT> def __init__(self, stream): <NEW_LINE> <INDENT> self._stream = stream <NEW_LINE> <DEDENT> def available(self): <NEW_LINE> <INDENT> curPos = self._stream.tell() <NEW_LINE> self._stream.seek(0, SEEK_END) <NEW_LINE> endPos = self._stream.tell() <NEW_LINE> self._stream.seek(curPos) <NEW_LINE> return endPos-curPos <NEW_LINE> <DEDENT> def seek(self, *args): <NEW_LINE> <INDENT> return self._stream.seek(*args) <NEW_LINE> <DEDENT> def tell(self, *args): <NEW_LINE> <INDENT> return self._stream.tell(*args) <NEW_LINE> <DEDENT> def read(self, count): <NEW_LINE> <INDENT> return self._stream.read(count) <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> return self._stream.write(data)
TODO: Does this class need to exist? This class represents the necessary functions used for typical encoding. Right now, it's only used in PlaygroundStandardPacketEncoder. Does that mean we should make it a module?
62598fcc4527f215b58ea291
class AudioPlayerInterface(object): <NEW_LINE> <INDENT> deserialized_types = { } <NEW_LINE> attribute_map = { } <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, AudioPlayerInterface): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated. Do not edit the class manually.
62598fcc7cff6e4e811b5de9
class ImagesBrowser(QueueCommandBase): <NEW_LINE> <INDENT> def getImagesList(self): <NEW_LINE> <INDENT> images = {} <NEW_LINE> for img_id in range(20): <NEW_LINE> <INDENT> images[img_id] = dict(name='Image%03d' % img_id, desc='Image %s made by John Doe' % img_id, url='static/images/Image%03d.jpg' % img_id) <NEW_LINE> <DEDENT> return images
#TODO: document Class
62598fccab23a570cc2d4f4f
class AudioDescriptor(ABC): <NEW_LINE> <INDENT> pass
Abstract base class for audio descriptors (playback call specifiers)
62598fccaad79263cf42eb91
class BasePageElement(object): <NEW_LINE> <INDENT> def __set__(self, obj, value): <NEW_LINE> <INDENT> driver = obj.driver <NEW_LINE> WebDriverWait(driver, 100).until(lambda driver: driver.find_element_by_name(self.locator)) <NEW_LINE> driver.find_element_by_name(self.locator).send_keys(value) <NEW_LINE> <DEDENT> def __get__(self, obj, typ=None): <NEW_LINE> <INDENT> driver = obj.driver <NEW_LINE> WebDriverWait(driver, 100).until(lambda driver: driver.find_element_by_name(self.locator)) <NEW_LINE> element = driver.find_element_by_name(self.locator) <NEW_LINE> return element.get_attrbute("value")
Base pages class that is initalized on every pages obejct class.
62598fccec188e330fdf8c59
class CursorWrapper(): <NEW_LINE> <INDENT> def __init__(self, conn, NAME): <NEW_LINE> <INDENT> from .mongodb_serializer import TransformDjango <NEW_LINE> self.conn = conn <NEW_LINE> self.db_name = NAME <NEW_LINE> self.db = conn[NAME] <NEW_LINE> self.db.add_son_manipulator(TransformDjango()) <NEW_LINE> <DEDENT> def execute(self, query, args=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def commit(self, *args, **kw): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def rollback(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fetchone(self): <NEW_LINE> <INDENT> return {"test":1} <NEW_LINE> <DEDENT> def sql_flush(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> if not attr in self.__dict__: <NEW_LINE> <INDENT> return getattr(self.db, attr) <NEW_LINE> <DEDENT> self.__dict__[attr]
Connection is essentially a cursor in mongoDB. Let's imitate the methods cursor has
62598fcc3346ee7daa337829
class RedisDict(RedisCollection, SyncedDict): <NEW_LINE> <INDENT> _validators = (require_string_key,) <NEW_LINE> def __init__(self, client=None, key=None, data=None, parent=None, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__( client=client, key=key, data=data, parent=parent, *args, **kwargs )
A dict-like data structure that synchronizes with a persistent Redis database. Examples -------- >>> doc = RedisDict('data') >>> doc['foo'] = "bar" >>> assert doc['foo'] == "bar" >>> assert 'foo' in doc >>> del doc['foo'] >>> doc['foo'] = dict(bar=True) >>> doc {'foo': {'bar': True}} Parameters ---------- client : redis.Redis, optional A redis client (Default value = None). key : str, optional The key of the collection (Default value = None). data : :class:`collections.abc.Mapping`, optional The initial data passed to :class:`RedisDict`. If ``None``, defaults to ``{}`` (Default value = None). parent : RedisCollection, optional A parent instance of :class:`RedisCollection` or ``None``. If ``None``, the collection owns its own data (Default value = None). \*args : Positional arguments forwarded to parent constructors. \*\*kwargs : Keyword arguments forwarded to parent constructors. Warnings -------- While the :class:`RedisDict` object behaves like a :class:`dict`, there are important distinctions to remember. In particular, because operations are reflected as changes to an underlying database, copying a :class:`RedisDict` instance may exhibit unexpected behavior. If a true copy is required, you should use the call operator to get a dictionary representation, and if necessary construct a new :class:`RedisDict` instance.
62598fcc63b5f9789fe8553b
class SlowThrower(ThrowerAnt): <NEW_LINE> <INDENT> name = 'Slow' <NEW_LINE> implemented = False <NEW_LINE> damage = 0 <NEW_LINE> def throw_at(self, target): <NEW_LINE> <INDENT> if target: <NEW_LINE> <INDENT> apply_effect(make_slow, target, 3)
ThrowerAnt that causes Slow on Bees.
62598fccab23a570cc2d4f50
class DifferentialDriveTrajectoryTracker: <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def track(self, reference_location, reference_speed, robot_location): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def reset(self): <NEW_LINE> <INDENT> pass
Abstract class to make trajectory tracking of a differential drive mobile robot
62598fcc956e5f7376df5860
class TestSQLStatementCount(StorageDALTestCase): <NEW_LINE> <INDENT> mimetype = 'image/jpeg' <NEW_LINE> def _create_directory_with_five_files(self): <NEW_LINE> <INDENT> user = self.create_user() <NEW_LINE> directory = user.root.make_subdirectory('test') <NEW_LINE> for i in range(5): <NEW_LINE> <INDENT> directory.make_file_with_content( file_name='file-%s' % i, hash=b'hash', crc32=0, size=0, deflated_size=0, storage_key=uuid.uuid4(), mimetype=self.mimetype) <NEW_LINE> <DEDENT> return directory <NEW_LINE> <DEDENT> def test_move_directory_with_files(self): <NEW_LINE> <INDENT> directory = self._create_directory_with_five_files() <NEW_LINE> new_parent = directory.owner.root.make_subdirectory('test2') <NEW_LINE> with self.assertNumQueries(48): <NEW_LINE> <INDENT> directory.move(new_parent.id, directory.name) <NEW_LINE> <DEDENT> <DEDENT> def test_delete_directory_with_files(self): <NEW_LINE> <INDENT> directory = self._create_directory_with_five_files() <NEW_LINE> with self.assertNumQueries(41): <NEW_LINE> <INDENT> directory.delete(cascade=True) <NEW_LINE> <DEDENT> <DEDENT> def test_delete_file(self): <NEW_LINE> <INDENT> f = self.factory.make_file(mimetype=self.mimetype) <NEW_LINE> with self.assertNumQueries(6): <NEW_LINE> <INDENT> f.delete() <NEW_LINE> <DEDENT> <DEDENT> def test_make_file_with_content(self): <NEW_LINE> <INDENT> user = self.create_user() <NEW_LINE> directory = user.root.make_subdirectory('test') <NEW_LINE> hash_ = self.factory.get_fake_hash() <NEW_LINE> name = self.factory.get_unique_unicode() <NEW_LINE> size = self.factory.get_unique_integer() <NEW_LINE> crc32 = self.factory.get_unique_integer() <NEW_LINE> storage_key = uuid.uuid4() <NEW_LINE> with self.assertNumQueries(37): <NEW_LINE> <INDENT> directory.make_file_with_content( name, hash_, crc32, size, size, storage_key, mimetype=self.mimetype)
Test the number of SQL statements issued by some critical operations. The tests here should just assert that the number of SQL statements issued by some performance-sensitive operations are what we expect. This is necessary because when using an ORM it's way too easy to make changes that seem innocuous but in fact affect the performance in a significant (and bad) way. When that happens, one or more tests here may break and developers will then be forced to assess the consequences of their changes on those operations, and either provide a good reason for them or tweak their changes to avoid the extra SQL statement(s).
62598fcca05bb46b3848ac30
class RandomControl(CPUElement): <NEW_LINE> <INDENT> def connect(self, inputSources, outputValueNames, control, outputSignalNames): <NEW_LINE> <INDENT> CPUElement.connect(self, inputSources, outputValueNames, control, outputSignalNames) <NEW_LINE> assert(len(inputSources) == 0), 'Random control does not have any inputs' <NEW_LINE> assert(len(outputValueNames) == 0), 'Random control does not have output' <NEW_LINE> assert(len(control) == 0), 'Random control does not have any control signals' <NEW_LINE> assert(len(outputSignalNames) == 1), 'Random control has one control output' <NEW_LINE> self.signalName = outputSignalNames[0] <NEW_LINE> <DEDENT> def writeOutput(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setControlSignals(self): <NEW_LINE> <INDENT> self.outputControlSignals[self.signalName] = random.randint(0, 1)
Random control unit. It randomly sets it's output signal
62598fccadb09d7d5dc0a940
class SubTaskDetail(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Name = None <NEW_LINE> self.Result = None <NEW_LINE> self.ErrMsg = None <NEW_LINE> self.Type = None <NEW_LINE> self.Status = None <NEW_LINE> self.FailedIndices = None <NEW_LINE> self.FinishTime = None <NEW_LINE> self.Level = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Name = params.get("Name") <NEW_LINE> self.Result = params.get("Result") <NEW_LINE> self.ErrMsg = params.get("ErrMsg") <NEW_LINE> self.Type = params.get("Type") <NEW_LINE> self.Status = params.get("Status") <NEW_LINE> self.FailedIndices = params.get("FailedIndices") <NEW_LINE> self.FinishTime = params.get("FinishTime") <NEW_LINE> self.Level = params.get("Level") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
实例操作记录流程任务中的子任务信息(如升级检查任务中的各个检查项)
62598fccbe7bc26dc925203c
class Children: <NEW_LINE> <INDENT> def __init__(self, cids, gids): <NEW_LINE> <INDENT> self.children = [] <NEW_LINE> self.gifts = defaultdict(list) <NEW_LINE> for cid, gid in zip(tqdm(cids, miniters=9999), gids): <NEW_LINE> <INDENT> self.children.append(Child(cid, gid)) <NEW_LINE> self.gifts[gid].append(cid) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self.children[index] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.children) <NEW_LINE> <DEDENT> def replace(self, cid1, cid2): <NEW_LINE> <INDENT> gid1 = self[cid1].gid <NEW_LINE> gid2 = self[cid2].gid <NEW_LINE> self[cid1].gid = gid2 <NEW_LINE> self[cid2].gid = gid1 <NEW_LINE> <DEDENT> def mk_sub(self, path): <NEW_LINE> <INDENT> idset = [(c.id,c.gid) for c in self.children] <NEW_LINE> sub = pd.DataFrame(idset, columns=['ChildId', 'GiftId']) <NEW_LINE> sub.to_csv(path, index=False, compression='gzip') <NEW_LINE> return
cid is ChildId gid is GiftId
62598fcc5fcc89381b26632f
class DropQueryBuilder: <NEW_LINE> <INDENT> QUOTE_CHAR = '"' <NEW_LINE> SECONDARY_QUOTE_CHAR = "'" <NEW_LINE> ALIAS_QUOTE_CHAR = None <NEW_LINE> QUERY_CLS = Query <NEW_LINE> def __init__(self, dialect: Optional[Dialects] = None) -> None: <NEW_LINE> <INDENT> self._drop_target_kind = None <NEW_LINE> self._drop_target: Union[Database, Table, str] = "" <NEW_LINE> self._if_exists = None <NEW_LINE> self.dialect = dialect <NEW_LINE> <DEDENT> def _set_kwargs_defaults(self, kwargs: dict) -> None: <NEW_LINE> <INDENT> kwargs.setdefault("quote_char", self.QUOTE_CHAR) <NEW_LINE> kwargs.setdefault("secondary_quote_char", self.SECONDARY_QUOTE_CHAR) <NEW_LINE> kwargs.setdefault("dialect", self.dialect) <NEW_LINE> <DEDENT> @builder <NEW_LINE> def drop_database(self, database: Union[Database, str]) -> "DropQueryBuilder": <NEW_LINE> <INDENT> target = database if isinstance(database, Database) else Database(database) <NEW_LINE> self._set_target('DATABASE', target) <NEW_LINE> <DEDENT> @builder <NEW_LINE> def drop_table(self, table: Union[Table, str]) -> "DropQueryBuilder": <NEW_LINE> <INDENT> target = table if isinstance(table, Table) else Table(table) <NEW_LINE> self._set_target('TABLE', target) <NEW_LINE> <DEDENT> @builder <NEW_LINE> def drop_user(self, user: str) -> "DropQueryBuilder": <NEW_LINE> <INDENT> self._set_target('USER', user) <NEW_LINE> <DEDENT> @builder <NEW_LINE> def drop_view(self, view: str) -> "DropQueryBuilder": <NEW_LINE> <INDENT> self._set_target('VIEW', view) <NEW_LINE> <DEDENT> @builder <NEW_LINE> def if_exists(self) -> "DropQueryBuilder": <NEW_LINE> <INDENT> self._if_exists = True <NEW_LINE> <DEDENT> def _set_target(self, kind: str, target: Union[Database, Table, str]) -> None: <NEW_LINE> <INDENT> if self._drop_target: <NEW_LINE> <INDENT> raise AttributeError("'DropQuery' object already has attribute drop_target") <NEW_LINE> <DEDENT> self._drop_target_kind = kind <NEW_LINE> self._drop_target = target <NEW_LINE> <DEDENT> def get_sql(self, **kwargs: Any) -> str: <NEW_LINE> <INDENT> self._set_kwargs_defaults(kwargs) <NEW_LINE> if_exists = 'IF EXISTS ' if self._if_exists else '' <NEW_LINE> target_name: str = "" <NEW_LINE> if isinstance(self._drop_target, Database): <NEW_LINE> <INDENT> target_name = self._drop_target.get_sql(**kwargs) <NEW_LINE> <DEDENT> elif isinstance(self._drop_target, Table): <NEW_LINE> <INDENT> target_name = self._drop_target.get_sql(**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> target_name = format_quotes(self._drop_target, self.QUOTE_CHAR) <NEW_LINE> <DEDENT> return "DROP {kind} {if_exists}{name}".format( kind=self._drop_target_kind, if_exists=if_exists, name=target_name ) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.get_sql() <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return self.__str__()
Query builder used to build DROP queries.
62598fcc63b5f9789fe8553d
class BinaryTree: <NEW_LINE> <INDENT> def __init__(self, root_value): <NEW_LINE> <INDENT> self.tree = BinaryTreeNode(root_value) <NEW_LINE> <DEDENT> def add_child(self, new_value): <NEW_LINE> <INDENT> current_node = self.tree <NEW_LINE> if not current_node.has_left_child(): <NEW_LINE> <INDENT> current_node.set_left_child(BinaryTreeNode(new_value)) <NEW_LINE> current_node = current_node.get_left_child() <NEW_LINE> <DEDENT> elif not current_node.has_right_child(): <NEW_LINE> <INDENT> current_node.set_right_child(BinaryTreeNode(new_value)) <NEW_LINE> current_node = current_node.get_right_child() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_node = current_node[1]
Impelemntation of a binary tree Args: root_value (Object): The value of the root node Attributes: tree (BinaryTreeNode): The root node that represents the tree
62598fcc8a349b6b43686605
class KillBufferIterClass: <NEW_LINE> <INDENT> def __init__(self,c): <NEW_LINE> <INDENT> self.c = c <NEW_LINE> self.index = 0 <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> commands = self.c.killBufferCommands <NEW_LINE> aList = g.app.globalKillBuffer <NEW_LINE> if not aList: <NEW_LINE> <INDENT> self.index = 0 <NEW_LINE> return None <NEW_LINE> <DEDENT> if commands.reset is None: <NEW_LINE> <INDENT> i = self.index <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i = commands.reset <NEW_LINE> commands.reset = None <NEW_LINE> <DEDENT> if i < 0 or i >= len(aList): i = 0 <NEW_LINE> val = aList[i] <NEW_LINE> self.index = i + 1 <NEW_LINE> return val <NEW_LINE> <DEDENT> __next__ = next
Returns a list of positions in a subtree, possibly including the root of the subtree.
62598fcc4527f215b58ea295
class CsmUsageQuotaCollection(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'value': {'required': True}, 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[CsmUsageQuota]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: List["CsmUsageQuota"], **kwargs ): <NEW_LINE> <INDENT> super(CsmUsageQuotaCollection, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = None
Collection of CSM usage quotas. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar value: Required. Collection of resources. :vartype value: list[~azure.mgmt.web.v2020_09_01.models.CsmUsageQuota] :ivar next_link: Link to next page of resources. :vartype next_link: str
62598fcc7cff6e4e811b5ded
class Block(base.ManagedResource): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> klass = self.__class__.__name__ <NEW_LINE> return '<{}(blocked_user_id={!r})>'.format(klass, self.blocked_user_id) <NEW_LINE> <DEDENT> def exists(self): <NEW_LINE> <INDENT> return self.manager.between(other_user_id=self.blocked_user_id) <NEW_LINE> <DEDENT> def unblock(self): <NEW_LINE> <INDENT> return self.manager.unblock(other_user_id=self.blocked_user_id)
A block between you and another user.
62598fcc656771135c489a38
class VectorPrinter: <NEW_LINE> <INDENT> class Iterator: <NEW_LINE> <INDENT> def __init__ (self, start, finish, bit_vector): <NEW_LINE> <INDENT> self.bit_vector = bit_vector <NEW_LINE> self.count = 0 <NEW_LINE> if bit_vector: <NEW_LINE> <INDENT> self.item = start['_M_p'] <NEW_LINE> self.io = start['_M_offset'] <NEW_LINE> self.finish = finish['_M_p'] <NEW_LINE> self.fo = finish['_M_offset'] <NEW_LINE> self.isize = 8 * self.item.dereference().type.sizeof <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.item = start <NEW_LINE> self.finish = finish <NEW_LINE> <DEDENT> <DEDENT> def __iter__ (self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next (self): <NEW_LINE> <INDENT> count = self.count <NEW_LINE> self.count += 1 <NEW_LINE> if self.bit_vector: <NEW_LINE> <INDENT> if self.item == self.finish and self.io == self.fo: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> element = self.item.dereference() <NEW_LINE> value = 0 <NEW_LINE> if element & (1 << self.io): <NEW_LINE> <INDENT> value = 1 <NEW_LINE> <DEDENT> self.io += 1 <NEW_LINE> if self.io >= self.isize: <NEW_LINE> <INDENT> self.item += 1 <NEW_LINE> self.io = 0 <NEW_LINE> <DEDENT> return ('[%d]' % count, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.item == self.finish: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> element = self.item.dereference() <NEW_LINE> self.item += 1 <NEW_LINE> return ('[%d]' % count, element) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __init__ (self, typename, val): <NEW_LINE> <INDENT> self.typename = typename <NEW_LINE> self.val = get_non_debug_impl (val) <NEW_LINE> self.bit_vector = val.type.template_argument (0).code == gdb.TYPE_CODE_BOOL <NEW_LINE> <DEDENT> def children (self): <NEW_LINE> <INDENT> start = self.val['_M_start'] <NEW_LINE> finish = self.val['_M_finish'] <NEW_LINE> return self.Iterator (start, finish, self.bit_vector) <NEW_LINE> <DEDENT> def to_string (self): <NEW_LINE> <INDENT> if self.bit_vector: <NEW_LINE> <INDENT> start = self.val['_M_start']['_M_p'] <NEW_LINE> so = self.val['_M_start']['_M_offset'] <NEW_LINE> finish = self.val['_M_finish']['_M_p'] <NEW_LINE> fo = self.val['_M_finish']['_M_offset'] <NEW_LINE> end = self.val['_M_end_of_storage']['_M_data'] <NEW_LINE> isize = 8 * start.dereference().type.sizeof <NEW_LINE> length = (isize - so) + isize * (finish - start - 1) + fo <NEW_LINE> capacity = isize * (end - start) <NEW_LINE> return ('%s<bool> of length %d, capacity %d' % (self.typename, length, capacity)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = self.val['_M_start'] <NEW_LINE> finish = self.val['_M_finish'] <NEW_LINE> end = self.val['_M_end_of_storage']['_M_data'] <NEW_LINE> length = finish - start <NEW_LINE> capacity = end - start <NEW_LINE> ta0 = self.val.type.template_argument (0) <NEW_LINE> return ('%s<%s> of length %d, capacity %d' % (self.typename, ta0, length, capacity)) <NEW_LINE> <DEDENT> <DEDENT> def display_hint (self): <NEW_LINE> <INDENT> if print_vector_with_indices: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'array'
Pretty printer for std::vector.
62598fccec188e330fdf8c5d
class PreActBottleneck(nn.Module): <NEW_LINE> <INDENT> expansion = 4 <NEW_LINE> def __init__(self, in_planes, planes, stride=1): <NEW_LINE> <INDENT> super(PreActBottleneck, self).__init__() <NEW_LINE> self.bn1 = nn.BatchNorm2d(in_planes) <NEW_LINE> self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False) <NEW_LINE> self.bn2 = nn.BatchNorm2d(planes) <NEW_LINE> self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) <NEW_LINE> self.bn3 = nn.BatchNorm2d(planes) <NEW_LINE> self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False) <NEW_LINE> if stride != 1 or in_planes != self.expansion*planes: <NEW_LINE> <INDENT> self.shortcut = nn.Sequential( nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False) ) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> out = F.relu(self.bn1(x)) <NEW_LINE> shortcut = self.shortcut(out) if hasattr(self, 'shortcut') else x <NEW_LINE> out = self.conv1(out) <NEW_LINE> out = self.conv2(F.relu(self.bn2(out))) <NEW_LINE> out = self.conv3(F.relu(self.bn3(out))) <NEW_LINE> out += shortcut <NEW_LINE> return out
Pre-Activation ResNet bottleneck block.
62598fccbe7bc26dc925203d
class GetMessageEditData(Object): <NEW_LINE> <INDENT> ID = 0xfda68d36 <NEW_LINE> def __init__(self, peer, id: int): <NEW_LINE> <INDENT> self.peer = peer <NEW_LINE> self.id = id <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(b: BytesIO, *args) -> "GetMessageEditData": <NEW_LINE> <INDENT> peer = Object.read(b) <NEW_LINE> id = Int.read(b) <NEW_LINE> return GetMessageEditData(peer, id) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> b = BytesIO() <NEW_LINE> b.write(Int(self.ID, False)) <NEW_LINE> b.write(self.peer.write()) <NEW_LINE> b.write(Int(self.id)) <NEW_LINE> return b.getvalue()
Attributes: ID: ``0xfda68d36`` Args: peer: Either :obj:`InputPeerEmpty <pyrogram.api.types.InputPeerEmpty>`, :obj:`InputPeerSelf <pyrogram.api.types.InputPeerSelf>`, :obj:`InputPeerChat <pyrogram.api.types.InputPeerChat>`, :obj:`InputPeerUser <pyrogram.api.types.InputPeerUser>` or :obj:`InputPeerChannel <pyrogram.api.types.InputPeerChannel>` id: ``int`` ``32-bit`` Raises: :obj:`Error <pyrogram.Error>` Returns: :obj:`messages.MessageEditData <pyrogram.api.types.messages.MessageEditData>`
62598fcc60cbc95b06364705
class ShopifyObjectDataset(Dataset): <NEW_LINE> <INDENT> _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } <NEW_LINE> def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None): <NEW_LINE> <INDENT> super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations) <NEW_LINE> self.type = 'ShopifyObject'
Shopify Serivce dataset. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param type: Constant filled by server. :type type: str
62598fcc377c676e912f6f5b
class TestTPR402(_TestTopology, TPR402): <NEW_LINE> <INDENT> pass
Testing TPR version 58
62598fcc4527f215b58ea297
class FcoeNetworkSpec(unittest.TestCase, ModuleContructorTestCase, ValidateEtagTestCase, ErrorHandlingTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.configure_mocks(self, FcoeNetworkModule) <NEW_LINE> self.resource = self.mock_ov_client.fcoe_networks <NEW_LINE> ErrorHandlingTestCase.configure(self, method_to_fire=self.resource.get_by) <NEW_LINE> <DEDENT> def test_should_create_new_fcoe_network(self): <NEW_LINE> <INDENT> self.resource.get_by.return_value = [] <NEW_LINE> self.resource.create.return_value = DEFAULT_FCOE_NETWORK_TEMPLATE <NEW_LINE> self.mock_ansible_module.params = PARAMS_FOR_PRESENT <NEW_LINE> FcoeNetworkModule().run() <NEW_LINE> self.mock_ansible_module.exit_json.assert_called_once_with( changed=True, msg=FCOE_NETWORK_CREATED, ansible_facts=dict(fcoe_network=DEFAULT_FCOE_NETWORK_TEMPLATE) ) <NEW_LINE> <DEDENT> def test_should_not_update_when_data_is_equals(self): <NEW_LINE> <INDENT> self.resource.get_by.return_value = [DEFAULT_FCOE_NETWORK_TEMPLATE] <NEW_LINE> self.mock_ansible_module.params = PARAMS_FOR_PRESENT.copy() <NEW_LINE> FcoeNetworkModule().run() <NEW_LINE> self.mock_ansible_module.exit_json.assert_called_once_with( changed=False, msg=FCOE_NETWORK_ALREADY_EXIST, ansible_facts=dict(fcoe_network=DEFAULT_FCOE_NETWORK_TEMPLATE) ) <NEW_LINE> <DEDENT> def test_update_when_data_has_modified_attributes(self): <NEW_LINE> <INDENT> data_merged = DEFAULT_FCOE_NETWORK_TEMPLATE.copy() <NEW_LINE> data_merged['fabricType'] = 'DirectAttach' <NEW_LINE> self.resource.get_by.return_value = [DEFAULT_FCOE_NETWORK_TEMPLATE] <NEW_LINE> self.resource.update.return_value = data_merged <NEW_LINE> self.mock_ansible_module.params = PARAMS_WITH_CHANGES <NEW_LINE> FcoeNetworkModule().run() <NEW_LINE> self.mock_ansible_module.exit_json.assert_called_once_with( changed=True, msg=FCOE_NETWORK_UPDATED, ansible_facts=dict(fcoe_network=data_merged) ) <NEW_LINE> <DEDENT> def test_should_remove_fcoe_network(self): <NEW_LINE> <INDENT> self.resource.get_by.return_value = [DEFAULT_FCOE_NETWORK_TEMPLATE] <NEW_LINE> self.mock_ansible_module.params = PARAMS_FOR_ABSENT <NEW_LINE> FcoeNetworkModule().run() <NEW_LINE> self.mock_ansible_module.exit_json.assert_called_once_with( changed=True, msg=FCOE_NETWORK_DELETED, ansible_facts={} ) <NEW_LINE> <DEDENT> def test_should_do_nothing_when_fcoe_network_not_exist(self): <NEW_LINE> <INDENT> self.resource.get_by.return_value = [] <NEW_LINE> self.mock_ansible_module.params = PARAMS_FOR_ABSENT <NEW_LINE> FcoeNetworkModule().run() <NEW_LINE> self.mock_ansible_module.exit_json.assert_called_once_with( changed=False, msg=FCOE_NETWORK_ALREADY_ABSENT, ansible_facts={} )
ModuleContructorTestCase has common tests for class constructor and main function, also provides the mocks used in this test case ValidateEtagTestCase has common tests for the validate_etag attribute. ErrorHandlingTestCase has common tests for the module error handling.
62598fcc55399d3f056268e2
class RTidyr(RPackage): <NEW_LINE> <INDENT> homepage = "https://cloud.r-project.org/src/contrib/tidyr_1.0.2.tar.gz" <NEW_LINE> url = "https://cloud.r-project.org/src/contrib/tidyr_1.0.2.tar.gz" <NEW_LINE> version('1.0.2', md5='9118722418f48877650f6dcf9e160606') <NEW_LINE> depends_on('r-assertthat@0.2.1:', type=('build', 'run')) <NEW_LINE> depends_on('r-cli@2.0.2:', type=('build', 'run')) <NEW_LINE> depends_on('r-crayon@1.3.4:', type=('build', 'run')) <NEW_LINE> depends_on('r-digest@0.6.25:', type=('build', 'run')) <NEW_LINE> depends_on('r-dplyr@0.8.5:', type=('build', 'run')) <NEW_LINE> depends_on('r-ellipsis@0.3.0:', type=('build', 'run')) <NEW_LINE> depends_on('r-fansi@0.4.1:', type=('build', 'run')) <NEW_LINE> depends_on('r-glue@1.3.1:', type=('build', 'run')) <NEW_LINE> depends_on('r-lifecycle@0.2.0:', type=('build', 'run')) <NEW_LINE> depends_on('r-lobstr@1.1.1:', type=('build', 'run')) <NEW_LINE> depends_on('r-magrittr@1.5:', type=('build', 'run')) <NEW_LINE> depends_on('r-pillar@1.4.3:', type=('build', 'run')) <NEW_LINE> depends_on('r-pkgconfig@2.0.3:', type=('build', 'run')) <NEW_LINE> depends_on('r-purrr@0.3.3:', type=('build', 'run')) <NEW_LINE> depends_on('r-r6@2.4.1:', type=('build', 'run')) <NEW_LINE> depends_on('r-rcpp@1.0.3:', type=('build', 'run')) <NEW_LINE> depends_on('r-rlang@0.4.5:', type=('build', 'run')) <NEW_LINE> depends_on('r-stringi@1.4.6:', type=('build', 'run')) <NEW_LINE> depends_on('r-tibble@2.1.3:', type=('build', 'run')) <NEW_LINE> depends_on('r-tidyselect@1.0.0:', type=('build', 'run')) <NEW_LINE> depends_on('r-utf8@1.1.4:', type=('build', 'run')) <NEW_LINE> depends_on('r-vctrs@0.2.3:', type=('build', 'run')) <NEW_LINE> depends_on('r-zeallot@0.1.0:', type=('build', 'run'))
Tidy Messy Data
62598fcc283ffb24f3cf3c4f