code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ClockEvent(object): <NEW_LINE> <INDENT> def __init__(self, clock, loop, callback, timeout, starttime, cid, trigger=False): <NEW_LINE> <INDENT> self.clock = clock <NEW_LINE> self.cid = cid <NEW_LINE> self.loop = loop <NEW_LINE> self.weak_callback = None <NEW_LINE> self.callback = callback <NEW_LINE> self.timeout = timeout <NEW_LINE> self._is_triggered = trigger <NEW_LINE> self._last_dt = starttime <NEW_LINE> self._dt = 0. <NEW_LINE> if trigger: <NEW_LINE> <INDENT> clock._events[cid].append(self) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, *largs): <NEW_LINE> <INDENT> if not self._is_triggered: <NEW_LINE> <INDENT> self._is_triggered = True <NEW_LINE> self._last_dt = self.clock._last_tick <NEW_LINE> self.clock._events[self.cid].append(self) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> def get_callback(self): <NEW_LINE> <INDENT> callback = self.callback <NEW_LINE> if callback is not None: <NEW_LINE> <INDENT> return callback <NEW_LINE> <DEDENT> callback = self.weak_callback <NEW_LINE> if callback.is_dead(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return callback() <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_triggered(self): <NEW_LINE> <INDENT> return self._is_triggered <NEW_LINE> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> if self._is_triggered: <NEW_LINE> <INDENT> self._is_triggered = False <NEW_LINE> try: <NEW_LINE> <INDENT> self.clock._events[self.cid].remove(self) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def release(self): <NEW_LINE> <INDENT> self.weak_callback = WeakMethod(self.callback) <NEW_LINE> self.callback = None <NEW_LINE> <DEDENT> def tick(self, curtime, remove): <NEW_LINE> <INDENT> if curtime - self._last_dt < self.timeout - 0.005: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> self._dt = curtime - self._last_dt <NEW_LINE> self._last_dt = curtime <NEW_LINE> loop = self.loop <NEW_LINE> callback = self.get_callback() <NEW_LINE> if callback is None: <NEW_LINE> <INDENT> self._is_triggered = False <NEW_LINE> try: <NEW_LINE> <INDENT> remove(self) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> if not loop: <NEW_LINE> <INDENT> self._is_triggered = False <NEW_LINE> try: <NEW_LINE> <INDENT> remove(self) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> ret = callback(self._dt) <NEW_LINE> if loop and ret is False: <NEW_LINE> <INDENT> self._is_triggered = False <NEW_LINE> try: <NEW_LINE> <INDENT> remove(self) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> return loop <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<ClockEvent callback=%r>' % self.get_callback()
A class that describes a callback scheduled with kivy's :attr:`Clock`. This class is never created by the user; instead, kivy creates and returns an instance of this class when scheduling a callback. .. warning:: Most of the methods of this class are internal and can change without notice. The only exception are the :meth:`cancel` and :meth:`__call__` methods.
62598fa476e4537e8c3ef459
class IndirectMpich(Package): <NEW_LINE> <INDENT> homepage = "http://www.example.com" <NEW_LINE> url = "http://www.example.com/indirect_mpich-1.0.tar.gz" <NEW_LINE> version(1.0, 'foobarbaz') <NEW_LINE> depends_on('mpi') <NEW_LINE> depends_on('direct-mpich')
Test case for a package that depends on MPI and one of its dependencies requires a *particular version* of MPI.
62598fa4e64d504609df930f
class PooledDedicatedDBConnection: <NEW_LINE> <INDENT> def __init__(self, pool, con): <NEW_LINE> <INDENT> self._con = None <NEW_LINE> if not con.threadsafety(): <NEW_LINE> <INDENT> raise NotSupportedError("Database module is not thread-safe.") <NEW_LINE> <DEDENT> self._pool = pool <NEW_LINE> self._con = con <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self._con: <NEW_LINE> <INDENT> self._pool.cache(self._con) <NEW_LINE> self._con = None <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> if self._con: <NEW_LINE> <INDENT> return getattr(self._con, name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InvalidConnection <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass
Auxiliary proxy class for pooled dedicated connections.
62598fa421bff66bcd722b12
class CNN(Chain): <NEW_LINE> <INDENT> def __init__(self, input_channel, output_channel, filter_height, filter_width, mid_units, n_units, n_label): <NEW_LINE> <INDENT> super(CNN, self).__init__( conv1 = L.Convolution2D(input_channel, output_channel, (filter_height, filter_width)), l1 = L.Linear(mid_units, n_units), l2 = L.Linear(n_units, n_label), ) <NEW_LINE> <DEDENT> def forward(self, x, t, train=True): <NEW_LINE> <INDENT> h1 = F.max_pooling_2d(F.relu(self.conv1(x)), 3) <NEW_LINE> h2 = F.dropout(F.relu(self.l1(h1)), train=True) <NEW_LINE> y = self.l2(h2) <NEW_LINE> if train: <NEW_LINE> <INDENT> return F.softmax_cross_entropy(y, t) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return F.softmax_cross_entropy(y, t), F.accuracy(y, t) <NEW_LINE> <DEDENT> <DEDENT> def predict(self, x): <NEW_LINE> <INDENT> h1 = F.max_pooling_2d(F.relu(self.conv1(x)), 3) <NEW_LINE> h2 = F.dropout(F.relu(self.l1(h1))) <NEW_LINE> y = self.l2(h2) <NEW_LINE> return F.softmax(y).data
Convolutional Neural Network のモデル input_channel : 入力するチャンネル数(通常のカラー画像なら3) output_channel : 畳み込み後のチャンネル数 filter_height : 畳み込みに使用するフィルターの縦方向のサイズ filter_width : 畳み込みに使用するフィルターの横方向のサイズ mid_units : 全結合の隠れ層1のノード数 n_units : 全結合の隠れ層2のノード数 n_label : ラベルの出力数(今回は2)
62598fa410dbd63aa1c70a5c
class TestingConfig(Config): <NEW_LINE> <INDENT> TESTING = True <NEW_LINE> DEBUG = True <NEW_LINE> DATABASE_URL = os.getenv("DATABASE_TEST_URL")
Configurations for Testing,
62598fa40a50d4780f705288
class Alphabet: <NEW_LINE> <INDENT> def __init__(self, chars, encoding, chars_rc=None, encoding_rc=None, missing=255): <NEW_LINE> <INDENT> self.chars = np.frombuffer(chars, dtype=np.uint8) <NEW_LINE> self.encoding = np.zeros(256, dtype=np.uint8) + missing <NEW_LINE> self.encoding[self.chars] = encoding <NEW_LINE> if chars_rc is not None: <NEW_LINE> <INDENT> self.chars_rc = np.frombuffer(chars_rc, dtype=np.uint8) <NEW_LINE> self.encoding_rc = np.zeros(256, dtype=np.uint8) + missing <NEW_LINE> self.encoding_rc[self.chars_rc] = encoding_rc <NEW_LINE> <DEDENT> self.size = encoding.max() + 1 <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.size <NEW_LINE> <DEDENT> def __getitem__(self, i): <NEW_LINE> <INDENT> return chr(self.chars[i]) <NEW_LINE> <DEDENT> def encode(self, x, reverse_complement=False): <NEW_LINE> <INDENT> if not reverse_complement: <NEW_LINE> <INDENT> x = np.frombuffer(x, dtype=np.uint8) <NEW_LINE> string = self.encoding[x] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = np.frombuffer(x, dtype=np.uint8)[::-1] <NEW_LINE> string = self.encoding_rc[x] <NEW_LINE> <DEDENT> return string <NEW_LINE> <DEDENT> def decode(self, x, reverse_complement=False): <NEW_LINE> <INDENT> if not reverse_complement: <NEW_LINE> <INDENT> string = self.chars[x-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> string = self.chars_rc[x[::-1]-1] <NEW_LINE> <DEDENT> return string.tobytes()
biological sequence encoder
62598fa47d847024c075c272
class SeriesForm(forms.Form): <NEW_LINE> <INDENT> host = forms.CharField(max_length=constants.NAME_MAX_LENGTH) <NEW_LINE> timestamp = forms.FloatField(min_value=0) <NEW_LINE> samples = forms.Field() <NEW_LINE> def clean_samples(self): <NEW_LINE> <INDENT> cleaned = [] <NEW_LINE> for row in self.data['samples']: <NEW_LINE> <INDENT> form = SampleForm(data=row) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> cleaned.append(form.cleaned_data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValidationError(_('Invalid value.'), code='invalid') <NEW_LINE> <DEDENT> <DEDENT> return cleaned
Roughly validates received series of data.
62598fa46fb2d068a7693d8b
class AntiTank(fAirDefenceAntiTank): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.unitID = 18
...
62598fa42c8b7c6e89bd3672
class BrokerConnection(object): <NEW_LINE> <INDENT> def __init__(self, host, port, handler, buffer_size=1024 * 1024, source_host='', source_port=0, ssl_config=None): <NEW_LINE> <INDENT> self._buff = bytearray(buffer_size) <NEW_LINE> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self._handler = handler <NEW_LINE> self._socket = None <NEW_LINE> self.source_host = source_host <NEW_LINE> self.source_port = source_port <NEW_LINE> self._wrap_socket = ( ssl_config.wrap_socket if ssl_config else lambda x: x) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> <DEDENT> @property <NEW_LINE> def connected(self): <NEW_LINE> <INDENT> return self._socket is not None <NEW_LINE> <DEDENT> def connect(self, timeout): <NEW_LINE> <INDENT> log.debug("Connecting to %s:%s", self.host, self.port) <NEW_LINE> self._socket = self._wrap_socket( self._handler.Socket.create_connection( (self.host, self.port), timeout / 1000, (self.source_host, self.source_port) )) <NEW_LINE> if self._socket is not None: <NEW_LINE> <INDENT> log.debug("Successfully connected to %s:%s", self.host, self.port) <NEW_LINE> <DEDENT> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> if self._socket is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._socket.close() <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self._socket = None <NEW_LINE> <DEDENT> <DEDENT> def reconnect(self): <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> self.connect(10 * 1000) <NEW_LINE> <DEDENT> def request(self, request): <NEW_LINE> <INDENT> bytes_ = request.get_bytes() <NEW_LINE> if not self._socket: <NEW_LINE> <INDENT> raise SocketDisconnectedError <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._socket.sendall(bytes_) <NEW_LINE> <DEDENT> except SocketDisconnectedError: <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> def response(self): <NEW_LINE> <INDENT> size = bytes() <NEW_LINE> expected_len = 4 <NEW_LINE> while len(size) != expected_len: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> r = self._socket.recv(expected_len - len(size)) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> r = None <NEW_LINE> <DEDENT> if r is None or len(r) == 0: <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> raise SocketDisconnectedError <NEW_LINE> <DEDENT> size += r <NEW_LINE> <DEDENT> size = struct.unpack('!i', size)[0] <NEW_LINE> try: <NEW_LINE> <INDENT> recvall_into(self._socket, self._buff, size) <NEW_LINE> <DEDENT> except SocketDisconnectedError: <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> raise <NEW_LINE> <DEDENT> return buffer(self._buff[4:4 + size])
BrokerConnection thinly wraps a `socket.create_connection` call and handles the sending and receiving of data that conform to the kafka binary protocol over that socket.
62598fa41f5feb6acb162ace
class MissingImage(Exception): <NEW_LINE> <INDENT> pass
Raised when an expected image is not present.
62598fa401c39578d7f12c2b
class QualityRunner(Executor): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def _get_quality_scores(self, asset): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _read_result(self, asset): <NEW_LINE> <INDENT> result = {} <NEW_LINE> result.update(self._get_quality_scores(asset)) <NEW_LINE> executor_id = self.executor_id <NEW_LINE> return Result(asset, executor_id, result) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_scores_key(cls): <NEW_LINE> <INDENT> return f"{cls.TYPE}_scores" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_score_key(cls): <NEW_LINE> <INDENT> return f"{cls.TYPE}_score"
QualityRunner takes in a list of assets, and run quality assessment on them, and return a list of corresponding results. A QualityRunner must specify a unique type and version combination (by the TYPE and VERSION attribute), so that the Result generated by it can be identified and stored by ResultStore class. There are two ways to create a derived class of QualityRunner: a) Call a command-line executable directly, very similar to what FeatureExtractor does. You must: 1) Override TYPE and VERSION 2) Override _generate_result(self, asset), which call a command-line executable and generate quality scores in a log file. 3) Override _get_quality_scores(self, asset), which read the quality scores from the log file, and return the scores in a dictionary format. 4) If necessary, override _remove_log(self, asset) if Executor._remove_log(self, asset) doesn't work for your purpose (sometimes the command-line executable could generate output log files in some different format, like multiple files). For an example, follow PsnrQualityRunner. b) Override the Executor._run_on_asset(self, asset) method to bypass the regular routine, but instead, in the method construct a FeatureAssembler (which calls a FeatureExtractor (or many) and assembles a list of features, followed by using a TrainTestModel (pre-trained somewhere else) to predict the final quality score. You must: 1) Override TYPE and VERSION 2) Override _run_on_asset(self, asset), which runs a FeatureAssembler, collect a feature vector, run TrainTestModel.predict() on it, and return a Result object (in this case, both Executor._run_on_asset(self, asset) and QualityRunner._read_result(self, asset) get bypassed. 3) Override _remove_log(self, asset) by redirecting it to the FeatureAssembler. 4) Override _remove_result(self, asset) by redirecting it to the FeatureAssembler. For an example, follow VmafQualityRunner.
62598fa4009cb60464d013d1
class BgpPeerStatus(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'local_address': {'readonly': True}, 'neighbor': {'readonly': True}, 'asn': {'readonly': True, 'maximum': 4294967295, 'minimum': 0}, 'state': {'readonly': True}, 'connected_duration': {'readonly': True}, 'routes_received': {'readonly': True}, 'messages_sent': {'readonly': True}, 'messages_received': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'local_address': {'key': 'localAddress', 'type': 'str'}, 'neighbor': {'key': 'neighbor', 'type': 'str'}, 'asn': {'key': 'asn', 'type': 'long'}, 'state': {'key': 'state', 'type': 'str'}, 'connected_duration': {'key': 'connectedDuration', 'type': 'str'}, 'routes_received': {'key': 'routesReceived', 'type': 'long'}, 'messages_sent': {'key': 'messagesSent', 'type': 'long'}, 'messages_received': {'key': 'messagesReceived', 'type': 'long'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(BgpPeerStatus, self).__init__(**kwargs) <NEW_LINE> self.local_address = None <NEW_LINE> self.neighbor = None <NEW_LINE> self.asn = None <NEW_LINE> self.state = None <NEW_LINE> self.connected_duration = None <NEW_LINE> self.routes_received = None <NEW_LINE> self.messages_sent = None <NEW_LINE> self.messages_received = None
BGP peer status details. Variables are only populated by the server, and will be ignored when sending a request. :ivar local_address: The virtual network gateway's local address. :vartype local_address: str :ivar neighbor: The remote BGP peer. :vartype neighbor: str :ivar asn: The autonomous system number of the remote BGP peer. :vartype asn: long :ivar state: The BGP peer state. Possible values include: "Unknown", "Stopped", "Idle", "Connecting", "Connected". :vartype state: str or ~azure.mgmt.network.v2020_07_01.models.BgpPeerState :ivar connected_duration: For how long the peering has been up. :vartype connected_duration: str :ivar routes_received: The number of routes learned from this peer. :vartype routes_received: long :ivar messages_sent: The number of BGP messages sent. :vartype messages_sent: long :ivar messages_received: The number of BGP messages received. :vartype messages_received: long
62598fa432920d7e50bc5f03
class VOCSegmentation(Dataset): <NEW_LINE> <INDENT> NUM_CLASSES = 6 <NEW_LINE> def __init__(self, args, base_dir=Path.db_root_dir('pascal'), split='train', ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._base_dir = base_dir <NEW_LINE> self._image_dir = os.path.join(self._base_dir, 'JPEGImages') <NEW_LINE> self._cat_dir = os.path.join(self._base_dir, 'SegmentationClass') <NEW_LINE> if isinstance(split, str): <NEW_LINE> <INDENT> self.split = [split] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> split.sort() <NEW_LINE> self.split = split <NEW_LINE> <DEDENT> self.args = args <NEW_LINE> _splits_dir = os.path.join(self._base_dir, 'ImageSets', 'Segmentation') <NEW_LINE> self.im_ids = [] <NEW_LINE> self.images = [] <NEW_LINE> self.categories = [] <NEW_LINE> for splt in self.split: <NEW_LINE> <INDENT> with open(os.path.join(os.path.join(_splits_dir, splt + '.txt')), "r") as f: <NEW_LINE> <INDENT> lines = f.read().splitlines() <NEW_LINE> <DEDENT> for ii, line in enumerate(lines): <NEW_LINE> <INDENT> _image = os.path.join(self._image_dir, line + ".jpg") <NEW_LINE> _cat = os.path.join(self._cat_dir, line + ".png") <NEW_LINE> assert os.path.isfile(_image) <NEW_LINE> assert os.path.isfile(_cat) <NEW_LINE> self.im_ids.append(line) <NEW_LINE> self.images.append(_image) <NEW_LINE> self.categories.append(_cat) <NEW_LINE> <DEDENT> <DEDENT> assert (len(self.images) == len(self.categories)) <NEW_LINE> print('Number of images in {}: {:d}'.format(split, len(self.images))) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.images) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> _img, _target = self._make_img_gt_point_pair(index) <NEW_LINE> sample = {'image': _img, 'label': _target} <NEW_LINE> for split in self.split: <NEW_LINE> <INDENT> if split == "train": <NEW_LINE> <INDENT> return self.transform_tr(sample) <NEW_LINE> <DEDENT> elif split == 'val': <NEW_LINE> <INDENT> return self.transform_val(sample) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _make_img_gt_point_pair(self, index): <NEW_LINE> <INDENT> _img = Image.open(self.images[index]).convert('RGB') <NEW_LINE> _target = Image.open(self.categories[index]) <NEW_LINE> return _img, _target <NEW_LINE> <DEDENT> def transform_tr(self, sample): <NEW_LINE> <INDENT> composed_transforms = transforms.Compose([ tr.RandomHorizontalFlip(), tr.RandomScaleCrop(base_size=self.args.base_size, crop_size=self.args.crop_size), tr.RandomGaussianBlur(), tr.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)), tr.ToTensor()]) <NEW_LINE> return composed_transforms(sample) <NEW_LINE> <DEDENT> def transform_val(self, sample): <NEW_LINE> <INDENT> composed_transforms = transforms.Compose([ tr.FixScaleCrop(crop_size=self.args.crop_size), tr.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)), tr.ToTensor()]) <NEW_LINE> return composed_transforms(sample) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'VOC2012(split=' + str(self.split) + ')'
PascalVoc dataset
62598fa4442bda511e95c302
class itkContourMeanDistanceImageFilterIUC3IUC3(itkImageToImageFilterAPython.itkImageToImageFilterIUC3IUC3): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> ImageDimension = _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3_ImageDimension <NEW_LINE> InputHasNumericTraitsCheck = _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3_InputHasNumericTraitsCheck <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def SetInput1(self, *args): <NEW_LINE> <INDENT> return _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3_SetInput1(self, *args) <NEW_LINE> <DEDENT> def SetInput2(self, *args): <NEW_LINE> <INDENT> return _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3_SetInput2(self, *args) <NEW_LINE> <DEDENT> def GetInput1(self): <NEW_LINE> <INDENT> return _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3_GetInput1(self) <NEW_LINE> <DEDENT> def GetInput2(self): <NEW_LINE> <INDENT> return _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3_GetInput2(self) <NEW_LINE> <DEDENT> def GetMeanDistance(self): <NEW_LINE> <INDENT> return _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3_GetMeanDistance(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkContourMeanDistanceImageFilterPython.delete_itkContourMeanDistanceImageFilterIUC3IUC3 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkContourMeanDistanceImageFilterPython.itkContourMeanDistanceImageFilterIUC3IUC3_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkContourMeanDistanceImageFilterIUC3IUC3.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
Proxy of C++ itkContourMeanDistanceImageFilterIUC3IUC3 class
62598fa43539df3088ecc161
class PluginUrlRewriting(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.disabled_rewriters = [] <NEW_LINE> <DEDENT> def on_task_urlrewrite(self, task, config): <NEW_LINE> <INDENT> log.debug('Checking %s entries', len(task.accepted)) <NEW_LINE> for entry in task.accepted: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.url_rewrite(task, entry) <NEW_LINE> <DEDENT> except UrlRewritingError as e: <NEW_LINE> <INDENT> log.warning(e.value) <NEW_LINE> entry.fail() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def url_rewritable(self, task, entry): <NEW_LINE> <INDENT> for urlrewriter in plugin.get_plugins(interface='urlrewriter'): <NEW_LINE> <INDENT> if urlrewriter.name in self.disabled_rewriters: <NEW_LINE> <INDENT> log.trace('Skipping rewriter %s since it\'s disabled', urlrewriter.name) <NEW_LINE> continue <NEW_LINE> <DEDENT> log.trace('checking urlrewriter %s', urlrewriter.name) <NEW_LINE> if urlrewriter.instance.url_rewritable(task, entry): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> @plugin.priority(255) <NEW_LINE> def url_rewrite(self, task, entry): <NEW_LINE> <INDENT> tries = 0 <NEW_LINE> while self.url_rewritable(task, entry) and entry.accepted: <NEW_LINE> <INDENT> tries += 1 <NEW_LINE> if tries > 20: <NEW_LINE> <INDENT> raise UrlRewritingError('URL rewriting was left in infinite loop while rewriting url for %s, ' 'some rewriter is returning always True' % entry) <NEW_LINE> <DEDENT> for urlrewriter in plugin.get_plugins(interface='urlrewriter'): <NEW_LINE> <INDENT> name = urlrewriter.name <NEW_LINE> if name in self.disabled_rewriters: <NEW_LINE> <INDENT> log.trace('Skipping rewriter %s since it\'s disabled', name) <NEW_LINE> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if urlrewriter.instance.url_rewritable(task, entry): <NEW_LINE> <INDENT> old_url = entry['url'] <NEW_LINE> log.debug('Url rewriting %s' % entry['url']) <NEW_LINE> urlrewriter.instance.url_rewrite(task, entry) <NEW_LINE> if entry['url'] != old_url: <NEW_LINE> <INDENT> if entry.get('urls') and old_url in entry.get('urls'): <NEW_LINE> <INDENT> entry['urls'][entry['urls'].index(old_url)] = entry['url'] <NEW_LINE> <DEDENT> log.info('Entry \'%s\' URL rewritten to %s (with %s)', entry['title'], entry['url'], name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except UrlRewritingError as r: <NEW_LINE> <INDENT> raise UrlRewritingError('URL rewriting %s failed: %s' % (name, r.value)) <NEW_LINE> <DEDENT> except plugin.PluginError as e: <NEW_LINE> <INDENT> raise UrlRewritingError('URL rewriting %s failed: %s' % (name, e.value)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log.exception(e) <NEW_LINE> raise UrlRewritingError('%s: Internal error with url %s' % (name, entry['url']))
Provides URL rewriting framework
62598fa4d7e4931a7ef3bf48
class LayerIterator(object): <NEW_LINE> <INDENT> def __init__(self, layer): <NEW_LINE> <INDENT> self.layer = layer <NEW_LINE> self.i, self.j = 0, 0 <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.i == self.layer.width: <NEW_LINE> <INDENT> self.j += 1 <NEW_LINE> self.i = 0 <NEW_LINE> <DEDENT> if self.j == self.layer.height: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> value = self.layer[self.i, self.j] <NEW_LINE> self.i += 1 <NEW_LINE> return value <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self.i == self.layer.width: <NEW_LINE> <INDENT> self.j += 1 <NEW_LINE> self.i = 0 <NEW_LINE> <DEDENT> if self.j == self.layer.height: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> value = self.layer[self.i, self.j] <NEW_LINE> self.i += 1 <NEW_LINE> return value
Iterates over all the cells in a layer in column,row order.
62598fa497e22403b383adb9
class InitializeOAuthInputSet(InputSet): <NEW_LINE> <INDENT> def set_ConsumerKey(self, value): <NEW_LINE> <INDENT> super(InitializeOAuthInputSet, self)._set_input('ConsumerKey', value) <NEW_LINE> <DEDENT> def set_ConsumerSecret(self, value): <NEW_LINE> <INDENT> super(InitializeOAuthInputSet, self)._set_input('ConsumerSecret', value) <NEW_LINE> <DEDENT> def set_ForwardingURL(self, value): <NEW_LINE> <INDENT> super(InitializeOAuthInputSet, self)._set_input('ForwardingURL', value)
An InputSet with methods appropriate for specifying the inputs to the InitializeOAuth Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
62598fa4eab8aa0e5d30bc36
class _Coordinate: <NEW_LINE> <INDENT> def __init__(self, coord): <NEW_LINE> <INDENT> self.coord = coord <NEW_LINE> <DEDENT> def cart(self): <NEW_LINE> <INDENT> return self.coord <NEW_LINE> <DEDENT> def frac(self, lat): <NEW_LINE> <INDENT> return _xtal.cartesian_to_fractional(self.coord, lat) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_fractional(cls, coords, lat): <NEW_LINE> <INDENT> cartesian_coords = _xtal.fractional_to_cartesian(coords, lat) <NEW_LINE> return cls(cartesian_coords) <NEW_LINE> <DEDENT> def set_compare_method(self, method, *args): <NEW_LINE> <INDENT> self._equals = method(self, *args) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if hasattr(self, '_equals') is False: <NEW_LINE> <INDENT> self.set_compare_method(Equals, globaldef.tol) <NEW_LINE> <DEDENT> return self._equals(other) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return other.__class__(np.add(self.coord, other.cart())) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.coord.__str__() <NEW_LINE> <DEDENT> def __rmul__(self, CartOp): <NEW_LINE> <INDENT> return self.__class__(CartOp * self.coord)
Base class for both mutable and immutable Coordinate classes. Defines the functions that should be common for both.
62598fa491f36d47f2230df9
class JobsQueue(object): <NEW_LINE> <INDENT> def __init__(self, env): <NEW_LINE> <INDENT> self.env = env <NEW_LINE> self._serialized_jobs = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def job(self): <NEW_LINE> <INDENT> assert self._serialized_jobs, "Cannot create queue job for empty list" <NEW_LINE> return functools.partial(queue_jobs, self.env, self._serialized_jobs) <NEW_LINE> <DEDENT> def add(self, job_func, *args, **kwargs): <NEW_LINE> <INDENT> job_func = functools.partial(job_func, *args, **kwargs) <NEW_LINE> self._serialized_jobs.append( self.env.scheduler.serializer.dumps(job_func))
Jobs queue collects jobs list to run one-by-one
62598fa44f6381625f199414
class TestEnvironment(unittest.TestCase): <NEW_LINE> <INDENT> def rounded_compare(self, val1, val2): <NEW_LINE> <INDENT> print('Comparing {} and {} using round()'.format(val1, val2)) <NEW_LINE> return builtins.round(val1) == builtins.round(val2) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.resource_dir = resource_dir() <NEW_LINE> cls.spark = spark_test_session() <NEW_LINE> cls.img_path = os.path.join(cls.resource_dir, 'L8-B8-Robinson-IL.tiff') <NEW_LINE> cls.img_uri = 'file://' + cls.img_path <NEW_LINE> cls.img_rgb_path = os.path.join(cls.resource_dir, 'L8-B4_3_2-Elkton-VA.tiff') <NEW_LINE> cls.img_rgb_uri = 'file://' + cls.img_rgb_path <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def l8band_uri(cls, band_index): <NEW_LINE> <INDENT> return 'file://' + os.path.join(cls.resource_dir, 'L8-B{}-Elkton-VA.tiff'.format(band_index)) <NEW_LINE> <DEDENT> def create_layer(self): <NEW_LINE> <INDENT> from pyrasterframes.rasterfunctions import rf_convert_cell_type <NEW_LINE> rf = self.spark.read.geotiff(self.img_uri) .with_bounds() .with_center() <NEW_LINE> self.rf = rf.withColumn('tile2', rf_convert_cell_type('tile', 'float32')) .drop('tile') .withColumnRenamed('tile2', 'tile').as_layer() <NEW_LINE> self.prdf = self.spark.read.raster(self.img_uri) <NEW_LINE> self.df = self.prdf.withColumn('tile', rf_convert_cell_type('proj_raster', 'float32')) .drop('proj_raster') <NEW_LINE> <DEDENT> def assert_png(self, bytes): <NEW_LINE> <INDENT> self.assertEqual(bytes[0:8], bytearray([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]), "png header")
Base class for tests.
62598fa47047854f4633f286
class ApiError(Error): <NEW_LINE> <INDENT> def __init__(self, response): <NEW_LINE> <INDENT> self.response = response
Exception raised for errors in a GET request. Attributes: response -- response object
62598fa4d7e4931a7ef3bf49
class Cell(LdapObject): <NEW_LINE> <INDENT> _master_host_schema = [ ('master-idx', 'idx', int), ('master-hostname', 'hostname', str), ('master-zk-client-port', 'zk-client-port', int), ('master-zk-jmx-port', 'zk-jmx-port', int), ('master-zk-followers-port', 'zk-followers-port', int), ('master-zk-election-port', 'zk-election-port', int), ] <NEW_LINE> _schema = [ ('cell', '_id', str), ('archive-server', 'archive-server', str), ('archive-username', 'archive-username', str), ('location', 'location', str), ('ssq-namespace', 'ssq-namespace', str), ('username', 'username', str), ('version', 'version', str), ('root', 'root', str), ('data', 'data', dict), ('status', 'status', str), ('trait', 'traits', [str]), ('zk-auth-scheme', 'zk-auth-scheme', str), ] <NEW_LINE> _oc = 'tmCell' <NEW_LINE> _ou = 'cells' <NEW_LINE> _entity = 'cell' <NEW_LINE> @staticmethod <NEW_LINE> def schema(): <NEW_LINE> <INDENT> def _name_only(schema_rec): <NEW_LINE> <INDENT> return (schema_rec[0], None, None) <NEW_LINE> <DEDENT> return ( Cell._schema + [_name_only(e) for e in Cell._master_host_schema] ) <NEW_LINE> <DEDENT> def get(self, ident, dirty=False): <NEW_LINE> <INDENT> obj = super(Cell, self).get(ident, dirty=dirty) <NEW_LINE> obj['partitions'] = self.partitions(ident, dirty=dirty) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def partitions(self, ident, dirty=False): <NEW_LINE> <INDENT> return self.children(ident, Partition, dirty=dirty) <NEW_LINE> <DEDENT> def from_entry(self, entry, dn=None): <NEW_LINE> <INDENT> obj = super(Cell, self).from_entry(entry, dn) <NEW_LINE> grouped = _group_entry_by_opt(entry) <NEW_LINE> masters = _grouped_to_list_of_dict( grouped, 'tm-master-', Cell._master_host_schema) <NEW_LINE> obj.update({ 'masters': masters, }) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def to_entry(self, obj): <NEW_LINE> <INDENT> entry = super(Cell, self).to_entry(obj) <NEW_LINE> for master in obj.get('masters', []): <NEW_LINE> <INDENT> master_entry = _dict_2_entry(master, Cell._master_host_schema, 'tm-master', master['idx']) <NEW_LINE> entry.update(master_entry) <NEW_LINE> <DEDENT> return entry <NEW_LINE> <DEDENT> def delete(self, ident): <NEW_LINE> <INDENT> dn = self.dn(ident) <NEW_LINE> cell_partitions = self.admin.paged_search( search_base=dn, search_filter='(objectclass=tmPartition)', attributes=[] ) <NEW_LINE> for dn, _entry in cell_partitions: <NEW_LINE> <INDENT> self.admin.delete(dn) <NEW_LINE> <DEDENT> return super(Cell, self).delete(ident)
Cell object.
62598fa4009cb60464d013d2
class TimeThrottle(object): <NEW_LINE> <INDENT> def __init__(self, min_time_delta): <NEW_LINE> <INDENT> self.min_time_delta = total_seconds(min_time_delta) <NEW_LINE> self.previous_time = None <NEW_LINE> <DEDENT> def is_throttled(self): <NEW_LINE> <INDENT> if not self.previous_time: <NEW_LINE> <INDENT> self._update() <NEW_LINE> return False <NEW_LINE> <DEDENT> if time.time() - self.previous_time >= total_seconds(self.min_time_delta): <NEW_LINE> <INDENT> return self._update() <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def throttle_iterable(self, objects, callback=lambda o: None): <NEW_LINE> <INDENT> current_object = Missing <NEW_LINE> for obj in objects: <NEW_LINE> <INDENT> current_object = obj <NEW_LINE> args = callback(obj) or () <NEW_LINE> if not self.is_throttled(*args): <NEW_LINE> <INDENT> current_object = Missing <NEW_LINE> yield obj <NEW_LINE> <DEDENT> <DEDENT> if current_object is not Missing: <NEW_LINE> <INDENT> yield current_object <NEW_LINE> <DEDENT> <DEDENT> def _update(self): <NEW_LINE> <INDENT> self.previous_time = time.time()
Time based throttling class. >>> import datetime >>> from pytoolbox.unittest import asserts >>> def slow_range(*args): ... for i in xrange(*args): ... time.sleep(0.5) ... yield i >>> t1, t2 = (TimeThrottle(t) for t in (datetime.timedelta(minutes=1), 0.2)) >>> asserts.list_equal(list(t1.throttle_iterable((i, i) for i in xrange(10))), [(0, 0), (9, 9)]) >>> asserts.list_equal(list(t2.throttle_iterable(slow_range(3))), [0, 1, 2])
62598fa43539df3088ecc162
class SelectorBIC(ModelSelector): <NEW_LINE> <INDENT> def calculate_bic(self, state_num, l, n_features): <NEW_LINE> <INDENT> p = state_num**2 + (2*state_num*n_features) - 1 <NEW_LINE> return (-2) * l + p * math.log(len(self.X)) <NEW_LINE> <DEDENT> def select(self): <NEW_LINE> <INDENT> warnings.filterwarnings("ignore", category=DeprecationWarning) <NEW_LINE> score = float("inf") <NEW_LINE> bestmodel = None <NEW_LINE> for n in range(self.min_n_components, self.max_n_components + 1): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> model = self.base_model(n) <NEW_LINE> bic = self.calculate_bic(n, model.score(self.X, self.lengths), model.n_features) <NEW_LINE> if bic < score: <NEW_LINE> <INDENT> score = bic <NEW_LINE> bestmodel = model <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> return bestmodel
select the model with the lowest Bayesian Information Criterion(BIC) score http://www2.imm.dtu.dk/courses/02433/doc/ch6_slides.pdf Bayesian information criteria: BIC = -2 * logL + p * logN
62598fa460cbc95b063641f9
class BravoIRCClient(IRCClient): <NEW_LINE> <INDENT> def __init__(self, worlds, config): <NEW_LINE> <INDENT> self.factories = worlds <NEW_LINE> for factory in self.factories.itervalues(): <NEW_LINE> <INDENT> factory.chat_consumers.add(self) <NEW_LINE> <DEDENT> self.config = "irc %s" % config <NEW_LINE> self.name = self.config <NEW_LINE> self.host = configuration.get(self.config, "server") <NEW_LINE> self.nickname = configuration.get(self.config, "nick") <NEW_LINE> self.channels = set() <NEW_LINE> log.msg("Spawned IRC client '%s'!" % config) <NEW_LINE> <DEDENT> def signedOn(self): <NEW_LINE> <INDENT> for channel in configuration.get(self.config, "channels").split(","): <NEW_LINE> <INDENT> if configuration.has_option(self.config, "%s_key" % channel): <NEW_LINE> <INDENT> key = configuration.get(self.config, "%s_key" % channel) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = None <NEW_LINE> <DEDENT> self.join(channel, key) <NEW_LINE> <DEDENT> <DEDENT> def joined(self, channel): <NEW_LINE> <INDENT> log.msg("Joined %s on %s" % (channel, self.host)) <NEW_LINE> self.channels.add(channel) <NEW_LINE> <DEDENT> def left(self, channel): <NEW_LINE> <INDENT> log.msg("Parted %s on %s" % (channel, self.host)) <NEW_LINE> self.channels.discard(channel) <NEW_LINE> <DEDENT> def privmsg(self, user, channel, message): <NEW_LINE> <INDENT> response = [] <NEW_LINE> if message.startswith("&"): <NEW_LINE> <INDENT> if message.startswith("&help"): <NEW_LINE> <INDENT> response.append("I only know &help and &list, sorry.") <NEW_LINE> <DEDENT> elif message.startswith("&list"): <NEW_LINE> <INDENT> for factory in self.factories.itervalues(): <NEW_LINE> <INDENT> response.append("World %s:" % factory.name) <NEW_LINE> m = ", ".join(factory.protocols.iterkeys()) <NEW_LINE> response.append("Connected players: %s" % m) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if response: <NEW_LINE> <INDENT> for line in response: <NEW_LINE> <INDENT> self.msg(channel, line.encode("utf8")) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> factory, message = data <NEW_LINE> for channel in self.channels: <NEW_LINE> <INDENT> self.msg(channel, message.encode("utf8"))
Simple bot. This bot is heavily inspired by Cory Kolbeck's mc-bot, available at https://github.com/ckolbeck/mc-bot.
62598fa410dbd63aa1c70a5e
class Spectrogram(torch.nn.Module): <NEW_LINE> <INDENT> __constants__ = ['n_fft', 'win_length', 'hop_length', 'pad', 'power', 'normalized'] <NEW_LINE> def __init__(self, n_fft=400, win_length=None, hop_length=None, pad=0, window_fn=torch.hann_window, power=2, normalized=False, wkwargs=None): <NEW_LINE> <INDENT> super(Spectrogram, self).__init__() <NEW_LINE> self.n_fft = n_fft <NEW_LINE> self.win_length = win_length if win_length is not None else n_fft <NEW_LINE> self.hop_length = hop_length if hop_length is not None else self.win_length // 2 <NEW_LINE> window = window_fn(self.win_length) if wkwargs is None else window_fn(self.win_length, **wkwargs) <NEW_LINE> self.window = window <NEW_LINE> self.pad = pad <NEW_LINE> self.power = power <NEW_LINE> self.normalized = normalized <NEW_LINE> <DEDENT> def forward(self, waveform): <NEW_LINE> <INDENT> return F.spectrogram(waveform, self.pad, self.window, self.n_fft, self.hop_length, self.win_length, self.power, self.normalized)
Create a spectrogram from a audio signal Args: n_fft (int, optional): Size of FFT, creates ``n_fft // 2 + 1`` bins win_length (int): Window size. (Default: ``n_fft``) hop_length (int, optional): Length of hop between STFT windows. ( Default: ``win_length // 2``) pad (int): Two sided padding of signal. (Default: ``0``) window_fn (Callable[[...], torch.Tensor]): A function to create a window tensor that is applied/multiplied to each frame/window. (Default: ``torch.hann_window``) power (int): Exponent for the magnitude spectrogram, (must be > 0) e.g., 1 for energy, 2 for power, etc. (Default: ``2``) normalized (bool): Whether to normalize by magnitude after stft. (Default: ``False``) wkwargs (Dict[..., ...]): Arguments for window function. (Default: ``None``)
62598fa4e5267d203ee6b7bb
class JavaGradleWorkflow(BaseWorkflow): <NEW_LINE> <INDENT> NAME = "JavaGradleWorkflow" <NEW_LINE> CAPABILITY = Capability(language="java", dependency_manager="gradle", application_framework=None) <NEW_LINE> INIT_FILE = "lambda-build-init.gradle" <NEW_LINE> def __init__(self, source_dir, artifacts_dir, scratch_dir, manifest_path, **kwargs): <NEW_LINE> <INDENT> super(JavaGradleWorkflow, self).__init__(source_dir, artifacts_dir, scratch_dir, manifest_path, **kwargs) <NEW_LINE> self.os_utils = OSUtils() <NEW_LINE> self.build_dir = None <NEW_LINE> subprocess_gradle = SubprocessGradle(gradle_binary=self.binaries["gradle"], os_utils=self.os_utils) <NEW_LINE> copy_artifacts_action = JavaGradleCopyArtifactsAction( source_dir, artifacts_dir, self.build_output_dir, self.os_utils ) <NEW_LINE> if self.is_building_layer and is_experimental_maven_scope_and_layers_active(self.experimental_flags): <NEW_LINE> <INDENT> copy_artifacts_action = JavaGradleCopyLayerArtifactsAction( source_dir, artifacts_dir, self.build_output_dir, self.os_utils ) <NEW_LINE> <DEDENT> self.actions = [ JavaGradleBuildAction(source_dir, manifest_path, subprocess_gradle, scratch_dir, self.os_utils), copy_artifacts_action, ] <NEW_LINE> if self.dependencies_dir: <NEW_LINE> <INDENT> self.actions.append(CleanUpAction(self.dependencies_dir)) <NEW_LINE> if self.combine_dependencies: <NEW_LINE> <INDENT> self.actions.append(JavaCopyDependenciesAction(artifacts_dir, self.dependencies_dir, self.os_utils)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.actions.append(JavaMoveDependenciesAction(artifacts_dir, self.dependencies_dir, self.os_utils)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_resolvers(self): <NEW_LINE> <INDENT> return [GradleResolver(executable_search_paths=self.executable_search_paths)] <NEW_LINE> <DEDENT> def get_validators(self): <NEW_LINE> <INDENT> return [GradleValidator(self.runtime, self.architecture, self.os_utils)] <NEW_LINE> <DEDENT> @property <NEW_LINE> def build_output_dir(self): <NEW_LINE> <INDENT> if self.build_dir is None: <NEW_LINE> <INDENT> self.build_dir = os.path.join(self.scratch_dir, self._compute_scratch_subdir()) <NEW_LINE> <DEDENT> return self.build_dir <NEW_LINE> <DEDENT> def _compute_scratch_subdir(self): <NEW_LINE> <INDENT> sha1 = hashlib.sha1() <NEW_LINE> sha1.update(os.path.abspath(self.source_dir).encode("utf8")) <NEW_LINE> return sha1.hexdigest()
A Lambda builder workflow that knows how to build Java projects using Gradle.
62598fa463d6d428bbee2660
class BundlesField(datatype('BundlesField', ['address', 'bundles', 'filespecs_list', 'path_globs_list', 'excluded_path_globs_list']), Field): <NEW_LINE> <INDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return type(self) == type(other) and self.address == other.address <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.address)
Represents the `bundles` argument, each of which has a PathGlobs to represent its `fileset`.
62598fa4d486a94d0ba2be7b
class pattern_matched_exceptions(CodeTransformer): <NEW_LINE> <INDENT> def __init__(self, matcher=match): <NEW_LINE> <INDENT> self._matcher = matcher <NEW_LINE> <DEDENT> def visit_COMPARE_OP(self, instr): <NEW_LINE> <INDENT> if instr.arg == Comparisons.EXCEPTION_MATCH: <NEW_LINE> <INDENT> yield ROT_TWO().steal(instr) <NEW_LINE> yield POP_TOP() <NEW_LINE> yield self.LOAD_CONST(self._matcher) <NEW_LINE> yield ROT_TWO() <NEW_LINE> yield self.LOAD_CONST(exc_info) <NEW_LINE> yield CALL_FUNCTION(0) <NEW_LINE> yield CALL_FUNCTION_VAR(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield instr
Allows usage of arbitrary expressions and matching functions in `except` blocks. When an exception is raised in an except block in a function decorated with `pattern_matched_exceptions`, a matching function will be called with the block's expression and the three values returned by sys.exc_info(). If the matching function returns `True`, we enter the corresponding except-block, otherwise we continue to the next block, or re-raise if there are no more blocks to check Parameters ---------- matcher : function, optional, default is `transformers.exc_patterns.match`. A function accepting an expression and the values of sys.exc_info, returning True if the exception info "matches" the expression. The default behavior is to emulate standard python when the match expression is a *subtype* of Exception, and to compare exc.type and exc.args when the match expression is an *instance* of Exception. Example ------- >>> @pattern_matched_exceptions() ... def foo(): ... try: ... raise ValueError('bar') ... except ValueError('buzz'): ... return 'buzz' ... except ValueError('bar'): ... return 'bar' >>> foo() 'bar'
62598fa42c8b7c6e89bd3673
class LexicalError(Error): <NEW_LINE> <INDENT> def __init__(self, error_info, error_line): <NEW_LINE> <INDENT> super().__init__(error_info) <NEW_LINE> self.line = error_line
词法错误
62598fa40c0af96317c56230
class ManageExistingTask(flow_utils.CinderTask): <NEW_LINE> <INDENT> default_provides = set(['volume']) <NEW_LINE> def __init__(self, db, driver): <NEW_LINE> <INDENT> super(ManageExistingTask, self).__init__(addons=[ACTION]) <NEW_LINE> self.db = db <NEW_LINE> self.driver = driver <NEW_LINE> <DEDENT> def execute(self, context, volume, manage_existing_ref, size): <NEW_LINE> <INDENT> model_update = self.driver.manage_existing(volume, manage_existing_ref) <NEW_LINE> if not model_update: <NEW_LINE> <INDENT> model_update = {} <NEW_LINE> <DEDENT> model_update.update({'size': size}) <NEW_LINE> try: <NEW_LINE> <INDENT> volume.update(model_update) <NEW_LINE> volume.save() <NEW_LINE> <DEDENT> except exception.CinderException: <NEW_LINE> <INDENT> LOG.exception("Failed updating model of volume %(volume_id)s" " with creation provided model %(model)s", {'volume_id': volume.id, 'model': model_update}) <NEW_LINE> raise <NEW_LINE> <DEDENT> return {'volume': volume}
Brings an existing volume under Cinder management.
62598fa4be383301e02536a6
class recipe: <NEW_LINE> <INDENT> def __init__(self, name = None, id = 0, meta_data = {'preptime':0, 'cooktime':0, 'serve':0, 'type':[], 'tags':[]}, ingredient_list = [], instruction = ''): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._id = id <NEW_LINE> self._meta_data = meta_data <NEW_LINE> self._ingredient_list = ingredient_list <NEW_LINE> self._instruction = instruction <NEW_LINE> <DEDENT> def add_ingredient(self, ingredient_obj): <NEW_LINE> <INDENT> self._ingredient_list.append(ingredient_obj) <NEW_LINE> <DEDENT> def remove_ingredient(self, ingredient_name): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def add_tag(self, tag): <NEW_LINE> <INDENT> self._tags.append(tag) <NEW_LINE> <DEDENT> def update_meta_data(self, preptime = 0, cooktime = 0, serve = 0, type = [], tags = []): <NEW_LINE> <INDENT> self._meta_data['preptime'] = preptime <NEW_LINE> self._meta_data['cooktime'] = cooktime <NEW_LINE> self._meta_data['serve'] = serve <NEW_LINE> self._meta_data['type'] = type <NEW_LINE> self._meta_data['tags'] = tags <NEW_LINE> <DEDENT> def set_name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> def dictify(self): <NEW_LINE> <INDENT> temp_recipe = copy.deepcopy(self) <NEW_LINE> dict_ingredient_list = [] <NEW_LINE> for ing in temp_recipe._ingredient_list: <NEW_LINE> <INDENT> dict_ingredient_list.append(ing.__dict__) <NEW_LINE> <DEDENT> temp_dict = temp_recipe.__dict__ <NEW_LINE> temp_dict['_ingredient_list'] = dict_ingredient_list <NEW_LINE> return temp_dict <NEW_LINE> <DEDENT> def print_recipe(self): <NEW_LINE> <INDENT> print(self._name) <NEW_LINE> print(self._id) <NEW_LINE> print(self._meta_data) <NEW_LINE> print(self._ingredient_list) <NEW_LINE> print(self._instruction) <NEW_LINE> <DEDENT> def clear_recipe(self): <NEW_LINE> <INDENT> self._name = None <NEW_LINE> self._id = 0 <NEW_LINE> self._meta_data = {'preptime':0, 'cooktime':0, 'serve':0} <NEW_LINE> self._ingredient_list = [] <NEW_LINE> self._instruction = ''
A class representing a recipe.
62598fa43d592f4c4edbad7c
class GroupWithName(Matcher): <NEW_LINE> <INDENT> def __init__( self, name ): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __eq__(self, group): <NEW_LINE> <INDENT> if not isinstance(group, Group): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return group.name == self.name
Matches any Group with the given name.
62598fa438b623060ffa8f43
class findQuestionByID_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'qid', None, None, ), ) <NEW_LINE> def __init__(self, qid=None,): <NEW_LINE> <INDENT> self.qid = qid <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.qid = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('findQuestionByID_args') <NEW_LINE> if self.qid is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('qid', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.qid) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.qid) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - qid
62598fa4d268445f26639ada
class VolumeGetResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'continuation_token': 'str', 'total_item_count': 'int', 'items': 'list[Volume]' } <NEW_LINE> attribute_map = { 'continuation_token': 'continuation_token', 'total_item_count': 'total_item_count', 'items': 'items' } <NEW_LINE> required_args = { } <NEW_LINE> def __init__( self, continuation_token=None, total_item_count=None, items=None, ): <NEW_LINE> <INDENT> if continuation_token is not None: <NEW_LINE> <INDENT> self.continuation_token = continuation_token <NEW_LINE> <DEDENT> if total_item_count is not None: <NEW_LINE> <INDENT> self.total_item_count = total_item_count <NEW_LINE> <DEDENT> if items is not None: <NEW_LINE> <INDENT> self.items = items <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> if key not in self.attribute_map: <NEW_LINE> <INDENT> raise KeyError("Invalid key `{}` for `VolumeGetResponse`".format(key)) <NEW_LINE> <DEDENT> self.__dict__[key] = value <NEW_LINE> <DEDENT> def __getattribute__(self, item): <NEW_LINE> <INDENT> value = object.__getattribute__(self, item) <NEW_LINE> if isinstance(value, Property): <NEW_LINE> <INDENT> raise AttributeError <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> if hasattr(self, attr): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if issubclass(VolumeGetResponse, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, VolumeGetResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition.
62598fa41f5feb6acb162ad0
class BaseModel(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _get_keys(cls): <NEW_LINE> <INDENT> return class_mapper(cls).c.keys() <NEW_LINE> <DEDENT> def get_dict(self): <NEW_LINE> <INDENT> d = {} <NEW_LINE> for k in self._get_keys(): <NEW_LINE> <INDENT> d[k] = getattr(self, k) <NEW_LINE> <DEDENT> _json_attr = getattr(self, '__json__', None) <NEW_LINE> if _json_attr: <NEW_LINE> <INDENT> if callable(_json_attr): <NEW_LINE> <INDENT> _json_attr = _json_attr() <NEW_LINE> <DEDENT> for k, val in _json_attr.iteritems(): <NEW_LINE> <INDENT> d[k] = val <NEW_LINE> <DEDENT> <DEDENT> return d <NEW_LINE> <DEDENT> def get_appstruct(self): <NEW_LINE> <INDENT> l = [] <NEW_LINE> for k in self._get_keys(): <NEW_LINE> <INDENT> l.append((k, getattr(self, k),)) <NEW_LINE> <DEDENT> return l <NEW_LINE> <DEDENT> def populate_obj(self, populate_dict): <NEW_LINE> <INDENT> for k in self._get_keys(): <NEW_LINE> <INDENT> if k in populate_dict: <NEW_LINE> <INDENT> setattr(self, k, populate_dict[k]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def query(cls): <NEW_LINE> <INDENT> return Session().query(cls) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(cls, id_): <NEW_LINE> <INDENT> if id_: <NEW_LINE> <INDENT> return cls.query().get(id_) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def get_or_404(cls, id_): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> id_ = int(id_) <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> raise HTTPNotFound <NEW_LINE> <DEDENT> res = cls.query().get(id_) <NEW_LINE> if not res: <NEW_LINE> <INDENT> raise HTTPNotFound <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def getAll(cls): <NEW_LINE> <INDENT> return cls.get_all() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_all(cls): <NEW_LINE> <INDENT> return cls.query().all() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def delete(cls, id_): <NEW_LINE> <INDENT> obj = cls.query().get(id_) <NEW_LINE> Session().delete(obj) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if hasattr(self, '__unicode__'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return safe_str(self.__unicode__()) <NEW_LINE> <DEDENT> except UnicodeDecodeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return '<DB:%s>' % (self.__class__.__name__)
Base Model for all classess
62598fa4f548e778e596b453
class MonitoringSupport(cap.CapabilityNegotiationSupport): <NEW_LINE> <INDENT> def _reset_attributes(self): <NEW_LINE> <INDENT> super()._reset_attributes() <NEW_LINE> self._monitoring = set() <NEW_LINE> <DEDENT> def _destroy_user(self, nickname, channel=None, monitor_override=False): <NEW_LINE> <INDENT> if channel: <NEW_LINE> <INDENT> channels = [ self.channels[channel] ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> channels = self.channels.values() <NEW_LINE> <DEDENT> for ch in channels: <NEW_LINE> <INDENT> ch['users'].discard(nickname) <NEW_LINE> for status in self._nickname_prefixes.values(): <NEW_LINE> <INDENT> if status in ch['modes'] and nickname in ch['modes'][status]: <NEW_LINE> <INDENT> ch['modes'][status].remove(nickname) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if (monitor_override or not self.is_monitoring(nickname)) and (not channel or not any(nickname in ch['users'] for ch in self.channels.values())): <NEW_LINE> <INDENT> del self.users[nickname] <NEW_LINE> <DEDENT> <DEDENT> def monitor(self, target): <NEW_LINE> <INDENT> if 'monitor-notify' in self._capabilities and not self.is_monitoring(target): <NEW_LINE> <INDENT> yield from self.rawmsg('MONITOR', '+', target) <NEW_LINE> self._monitoring.add(target) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def unmonitor(self, target): <NEW_LINE> <INDENT> if 'monitor-notify' in self._capabilities and self.is_monitoring(target): <NEW_LINE> <INDENT> yield from self.rawmsg('MONITOR', '-', target) <NEW_LINE> self._monitoring.remove(target) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def is_monitoring(self, target): <NEW_LINE> <INDENT> return target in self._monitoring <NEW_LINE> <DEDENT> async def on_user_online(self, nickname): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> async def on_user_offline(self, nickname): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> async def on_capability_monitor_notify_available(self, value): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> async def on_raw_730(self, message): <NEW_LINE> <INDENT> for nick in message.params[1].split(','): <NEW_LINE> <INDENT> self._create_user(nick) <NEW_LINE> await self.on_user_online(nickname) <NEW_LINE> <DEDENT> <DEDENT> async def on_raw_731(self, message): <NEW_LINE> <INDENT> for nick in message.params[1].split(','): <NEW_LINE> <INDENT> self._destroy_user(nick, monitor_override=True) <NEW_LINE> await self.on_user_offline(nickname) <NEW_LINE> <DEDENT> <DEDENT> async def on_raw_732(self, message): <NEW_LINE> <INDENT> self._monitoring.update(message.params[1].split(',')) <NEW_LINE> <DEDENT> on_raw_733 = cap.CapabilityNegotiationSupport._ignored <NEW_LINE> async def on_raw_734(self, message): <NEW_LINE> <INDENT> self._monitoring.difference_update(message.params[1].split(','))
Support for monitoring the online/offline status of certain targets.
62598fa4d486a94d0ba2be7c
class shmarray(numpy.ndarray): <NEW_LINE> <INDENT> def __new__(cls, ctypesArray, shape, dtype=float, strides=None, offset=0, order=None): <NEW_LINE> <INDENT> tp = type(ctypesArray) <NEW_LINE> try: tp.__array_interface__ <NEW_LINE> except AttributeError: ctypeslib.prep_array(tp) <NEW_LINE> obj = numpy.ndarray.__new__(cls, shape, dtype, ctypesArray, offset, strides, order) <NEW_LINE> obj.ctypesArray = ctypesArray <NEW_LINE> return obj <NEW_LINE> <DEDENT> def __array_finalize__(self, obj): <NEW_LINE> <INDENT> if obj is None: return <NEW_LINE> self.ctypesArray = getattr(obj, 'ctypesArray', None) <NEW_LINE> <DEDENT> def __reduce_ex__(self, protocol): <NEW_LINE> <INDENT> return shmarray, (self.ctypesArray, self.shape, self.dtype, self.strides) <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> return __reduce_ex__(self, 0)
subclass of ndarray with overridden pickling functions which record dtype, shape etc... but defer pickling of the underlying data to the original data source. Doesn't actually handle allocation of the shared memory - this is done in create, and zeros, ones, (or create_copy) are the functions which should be used for creating a new shared memory array. TODO - add argument checking to ensure that the user is passing reasonable values.
62598fa456ac1b37e630209b
class RegressDims(keras.layers.Layer): <NEW_LINE> <INDENT> def __init__(self, mean=None, std=None, *args, **kwargs): <NEW_LINE> <INDENT> if mean is None: <NEW_LINE> <INDENT> mean = np.array([1.6570, 1.7999, 4.2907]) <NEW_LINE> <DEDENT> if std is None: <NEW_LINE> <INDENT> std = np.array([0.2681, 0.2243, 0.6281]) <NEW_LINE> <DEDENT> if isinstance(mean, (list, tuple)): <NEW_LINE> <INDENT> mean = np.array(mean) <NEW_LINE> <DEDENT> elif not isinstance(mean, np.ndarray): <NEW_LINE> <INDENT> raise ValueError('Expected mean to be a np.ndarray, list or tuple. Received: {}'.format(type(mean))) <NEW_LINE> <DEDENT> if isinstance(std, (list, tuple)): <NEW_LINE> <INDENT> std = np.array(std) <NEW_LINE> <DEDENT> elif not isinstance(std, np.ndarray): <NEW_LINE> <INDENT> raise ValueError('Expected std to be a np.ndarray, list or tuple. Received: {}'.format(type(std))) <NEW_LINE> <DEDENT> self.mean = mean <NEW_LINE> self.std = std <NEW_LINE> super(RegressDims, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def call(self, inputs, **kwargs): <NEW_LINE> <INDENT> return backend.dim_transform_inv(inputs, mean=self.mean, std=self.std) <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return input_shape <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = super(RegressDims, self).get_config() <NEW_LINE> config.update({ 'mean': self.mean.tolist(), 'std' : self.std.tolist(), }) <NEW_LINE> return config
Keras layer for applying regression values to dimensions.
62598fa4b7558d58954634de
class Token: <NEW_LINE> <INDENT> pass
An individual token.
62598fa48da39b475be03090
class AdminUserTest(base_test.BaseTest): <NEW_LINE> <INDENT> FAKE_ADMIN_EMAIL = 'fake_admin@email.com' <NEW_LINE> FAKE_ADMIN_EMAIL_2 = 'fake_admin_2@email.com' <NEW_LINE> FAKE_ADMIN_PASSWORD = 'fake admin password' <NEW_LINE> def testAdminUserToDict(self): <NEW_LINE> <INDENT> admin_user = models.AdminUser() <NEW_LINE> admin_user.email = self.FAKE_ADMIN_EMAIL <NEW_LINE> admin_user.set_password(self.FAKE_ADMIN_PASSWORD) <NEW_LINE> admin_user.save() <NEW_LINE> admin_dict = admin_user.to_dict() <NEW_LINE> self.assertEqual(admin_dict['id'], admin_user.id) <NEW_LINE> self.assertEqual(admin_dict['email'], admin_user.email) <NEW_LINE> self.assertNotIn('password', admin_dict) <NEW_LINE> <DEDENT> def testGetAdminByUsername(self): <NEW_LINE> <INDENT> self.assertIsNone(models.AdminUser.get_by_email( self.FAKE_ADMIN_EMAIL)) <NEW_LINE> admin_user = models.AdminUser() <NEW_LINE> admin_user.email = self.FAKE_ADMIN_EMAIL <NEW_LINE> admin_user.set_password(self.FAKE_ADMIN_PASSWORD) <NEW_LINE> admin_user.save() <NEW_LINE> self.assertIsNotNone(models.AdminUser.get_by_email( self.FAKE_ADMIN_EMAIL)) <NEW_LINE> self.assertEquals(models.AdminUser.get_by_email( self.FAKE_ADMIN_EMAIL), admin_user) <NEW_LINE> admin_user_2 = models.AdminUser() <NEW_LINE> admin_user_2.email = self.FAKE_ADMIN_EMAIL_2 <NEW_LINE> admin_user_2.set_password(self.FAKE_ADMIN_PASSWORD) <NEW_LINE> admin_user_2.save() <NEW_LINE> self.assertIsNotNone(models.AdminUser.get_by_email( self.FAKE_ADMIN_EMAIL_2)) <NEW_LINE> self.assertEquals(models.AdminUser.get_by_email( self.FAKE_ADMIN_EMAIL_2), admin_user_2) <NEW_LINE> admin_user.delete() <NEW_LINE> self.assertIsNone(models.AdminUser.get_by_email( self.FAKE_ADMIN_EMAIL)) <NEW_LINE> <DEDENT> def testAdminPassword(self): <NEW_LINE> <INDENT> other_password = 'random new password that doesnt match the old one' <NEW_LINE> self.assertNotEqual(other_password, self.FAKE_ADMIN_PASSWORD) <NEW_LINE> admin_user = models.AdminUser() <NEW_LINE> admin_user.email = self.FAKE_ADMIN_EMAIL <NEW_LINE> admin_user.set_password(self.FAKE_ADMIN_PASSWORD) <NEW_LINE> admin_user.save() <NEW_LINE> self.assertTrue(admin_user.does_password_match(self.FAKE_ADMIN_PASSWORD)) <NEW_LINE> self.assertFalse(admin_user.does_password_match(other_password)) <NEW_LINE> admin_user.set_password(other_password) <NEW_LINE> admin_user.save() <NEW_LINE> self.assertTrue(admin_user.does_password_match(other_password)) <NEW_LINE> self.assertFalse(admin_user.does_password_match(self.FAKE_ADMIN_PASSWORD))
Test for admin user model class functionality.
62598fa46aa9bd52df0d4d79
class User(Base, UserMixin): <NEW_LINE> <INDENT> __tablename__ = 'user' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> username = Column(String(250), nullable=True) <NEW_LINE> password = Column(String(250), nullable=True) <NEW_LINE> email = Column(String(250), nullable=True) <NEW_LINE> isoauth = Column(Boolean, nullable=True) <NEW_LINE> is_active = Column(Boolean, nullable=True, default=True) <NEW_LINE> __table_args__ = (UniqueConstraint('username', 'email', 'isoauth', name='_username_email_isoauth'),) <NEW_LINE> @property <NEW_LINE> def serialize(self): <NEW_LINE> <INDENT> return { 'id': self.id, 'name': self.username, 'email': self.email, 'isoauth': self.isoauth, 'is_active': self.is_active, } <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_authenticated(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_active(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_anonymous(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def get_id(self): <NEW_LINE> <INDENT> return unicode(self.id)
Model of a user. The UserMixin allows flask_login to use this class for a global authentication check mechanism.
62598fa456b00c62f0fb2761
class JSONField(six.with_metaclass(models.SubfieldBase, models.TextField)): <NEW_LINE> <INDENT> def to_python(self, value): <NEW_LINE> <INDENT> if value == "": <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if isinstance(value, six.string_types): <NEW_LINE> <INDENT> return json.loads(value) <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def get_prep_value(self, value): <NEW_LINE> <INDENT> if value == "": <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(value, dict): <NEW_LINE> <INDENT> value = json.dumps(value, cls=DjangoJSONEncoder) <NEW_LINE> <DEDENT> return super(JSONField, self).get_prep_value(value)
JSONField is a generic textfield that neatly serializes/unserializes JSON objects seamlessly. Django snippet #1478
62598fa4ac7a0e7691f723ba
class CustomSlide(BaseModel): <NEW_LINE> <INDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return get_locale_key(self.title) < get_locale_key(other.title) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return get_locale_key(self.title) == get_locale_key(other.title) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return self.id
CustomSlide model
62598fa4eab8aa0e5d30bc38
class CouldNotCompile(RuntimeError): <NEW_LINE> <INDENT> def __init__(self, message, stderr): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> self.stderr = stderr <NEW_LINE> RuntimeError.__init__(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> output = [ self.message, "---BEGIN STDERR---", self.stderr, "---END STDERR---" ] <NEW_LINE> return "\n".join(output)
Exception raised when a student's code could not be compiled into a single library file. :ivar message: A short message describing the exception. :ivar stderr: The output that was received through standard error. This is output by ``distutils.core.setup``.
62598fa499cbb53fe6830d84
class NamespaceFormatter(Formatter): <NEW_LINE> <INDENT> def __init__(self, namespace): <NEW_LINE> <INDENT> Formatter.__init__(self) <NEW_LINE> self.initial_namespace = namespace <NEW_LINE> self.namespace = self.initial_namespace <NEW_LINE> <DEDENT> def format(self, format_string, *args, **kwargs): <NEW_LINE> <INDENT> def escape_envvar(matchobj): <NEW_LINE> <INDENT> value = next((x for x in matchobj.groups() if x is not None)) <NEW_LINE> return "${{%s}}" % value <NEW_LINE> <DEDENT> regex = kwargs.get("regex") or ActionInterpreter.ENV_VAR_REGEX <NEW_LINE> format_string_ = re.sub(regex, escape_envvar, format_string) <NEW_LINE> if kwargs: <NEW_LINE> <INDENT> prev_namespace = self.namespace <NEW_LINE> self.namespace = dict(prev_namespace) <NEW_LINE> self.namespace.update(kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prev_namespace = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return Formatter.format(self, format_string_, *args, **kwargs) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if prev_namespace is not None: <NEW_LINE> <INDENT> self.namespace = prev_namespace <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def format_field(self, value, format_spec): <NEW_LINE> <INDENT> if isinstance(value, EscapedString): <NEW_LINE> <INDENT> value = str(value.formatted(str)) <NEW_LINE> <DEDENT> if isinstance(value, str): <NEW_LINE> <INDENT> value = self.format(value) <NEW_LINE> <DEDENT> return format(value, format_spec) <NEW_LINE> <DEDENT> def get_value(self, key, args, kwds): <NEW_LINE> <INDENT> if isinstance(key, str): <NEW_LINE> <INDENT> if key: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return kwds[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return self.namespace[key] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("zero length field name in format") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return Formatter.get_value(self, key, args, kwds)
String formatter that, as well as expanding '{variable}' strings, also protects environment variable references such as ${THIS} so they do not get expanded as though {THIS} is a formatting target. Also, environment variable references such as $THIS are converted to ${THIS}, which gives consistency across shells, and avoids some problems with non-curly-braced variables in some situations.
62598fa48e7ae83300ee8f50
class ShowPolicyMapTargetClass(ShowPolicyMapTypeSuperParser, ShowPolicyMapTypeSchema): <NEW_LINE> <INDENT> cli_command = ['show policy-map target service-group {num}'] <NEW_LINE> def cli(self, num='', output=None): <NEW_LINE> <INDENT> if output is None: <NEW_LINE> <INDENT> if num : <NEW_LINE> <INDENT> cmd = self.cli_command[0].format(num=num) <NEW_LINE> <DEDENT> show_output = self.device.execute(cmd) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> show_output = output <NEW_LINE> <DEDENT> return super().cli(output=show_output, num=num)
Parser for: * 'show policy-map target service-group {num}'
62598fa4cc0a2c111447aebe
class CarListPlugin(CMSPluginBase): <NEW_LINE> <INDENT> name = _("List of all cars") <NEW_LINE> render_template = "plugins/car_list.html" <NEW_LINE> def render(self, context, instance, placeholder): <NEW_LINE> <INDENT> context['plugin_id'] = instance.pk <NEW_LINE> car_details = {c.engage_id: c for c in VehicleDetails.objects.all()} <NEW_LINE> fleet_data = cse_api.get_fleet() <NEW_LINE> all_cars = [] <NEW_LINE> for loc in fleet_data["locations"].values(): <NEW_LINE> <INDENT> for car in loc['cars'].values(): <NEW_LINE> <INDENT> car["location"] = loc <NEW_LINE> all_cars.append(car) <NEW_LINE> if car["id"] in car_details: <NEW_LINE> <INDENT> car["description"] = car_details[car["id"]].description <NEW_LINE> car["image"] = car_details[car["id"]].image <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> all_cars = sorted(all_cars, key=lambda car: car["id"]) <NEW_LINE> all_car_ids = [car["id"] for car in all_cars] <NEW_LINE> live_availability = cse_api.get_availability(all_car_ids) <NEW_LINE> context['car_count'] = fleet_data["car_count"] <NEW_LINE> context['cars'] = all_cars <NEW_LINE> context['locations'] = fleet_data["locations"].values() <NEW_LINE> context['locations_count'] = len(fleet_data["locations"]) <NEW_LINE> if live_availability: <NEW_LINE> <INDENT> for car in all_cars: <NEW_LINE> <INDENT> if car["id"] in live_availability: <NEW_LINE> <INDENT> car["availability"] = live_availability[car["id"]] <NEW_LINE> <DEDENT> <DEDENT> context["updated_time"] = live_availability["updated_at"] <NEW_LINE> <DEDENT> return context
Plugin that displays a list of all the cars, with pictures and live availability.
62598fa4d53ae8145f91833c
class TestCustomerSort(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testCustomerSort(self): <NEW_LINE> <INDENT> model = squareconnect.models.customer_sort.CustomerSort()
CustomerSort unit test stubs
62598fa456ac1b37e630209c
class ComponentRegistry(object): <NEW_LINE> <INDENT> log.info('Registry loaded') <NEW_LINE> __instance__ = None <NEW_LINE> def __new__(cls): <NEW_LINE> <INDENT> if not cls.__instance__: <NEW_LINE> <INDENT> cls.__instance__ = object.__new__(cls) <NEW_LINE> <DEDENT> return cls.__instance__ <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create(cls): <NEW_LINE> <INDENT> log.info('Registry Initialising') <NEW_LINE> registry = cls() <NEW_LINE> registry.component_list = {} <NEW_LINE> registry.functions_list = {} <NEW_LINE> registry.running_under_test = 'nose' in sys.argv[0] <NEW_LINE> registry.initialising = True <NEW_LINE> return registry <NEW_LINE> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> if key in self.component_list: <NEW_LINE> <INDENT> return self.component_list[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not self.initialising: <NEW_LINE> <INDENT> trace_error_handler(log) <NEW_LINE> log.error('Service %s not found in list' % key) <NEW_LINE> raise KeyError('Service %s not found in list' % key) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def register(self, key, reference): <NEW_LINE> <INDENT> if key in self.component_list: <NEW_LINE> <INDENT> trace_error_handler(log) <NEW_LINE> log.error('Duplicate component exception %s' % key) <NEW_LINE> raise KeyError('Duplicate component exception %s' % key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.component_list[key] = reference <NEW_LINE> <DEDENT> <DEDENT> def remove(self, key): <NEW_LINE> <INDENT> if key in self.component_list: <NEW_LINE> <INDENT> del self.component_list[key] <NEW_LINE> <DEDENT> <DEDENT> def register_function(self, event, function): <NEW_LINE> <INDENT> if event in self.functions_list: <NEW_LINE> <INDENT> self.functions_list[event].append(function) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.functions_list[event] = [function] <NEW_LINE> <DEDENT> <DEDENT> def remove_function(self, event, function): <NEW_LINE> <INDENT> if event in self.functions_list: <NEW_LINE> <INDENT> self.functions_list[event].remove(function) <NEW_LINE> <DEDENT> <DEDENT> def execute(self, event, *args, **kwargs): <NEW_LINE> <INDENT> results = [] <NEW_LINE> if event in self.functions_list: <NEW_LINE> <INDENT> for function in self.functions_list[event]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = function(*args, **kwargs) <NEW_LINE> if result: <NEW_LINE> <INDENT> results.append(result) <NEW_LINE> <DEDENT> <DEDENT> except TypeError: <NEW_LINE> <INDENT> trace_error_handler(log) <NEW_LINE> log.exception('Exception for function %s', function) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> trace_error_handler(log) <NEW_LINE> log.error("Event %s called but not registered" % event) <NEW_LINE> <DEDENT> return results
This is the Component Registry. It is a singleton object and is used to provide a look up component for common objects.
62598fa4a79ad16197769f11
class BaseFeature(ABC): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def apply(self, audio_data): <NEW_LINE> <INDENT> raise NotImplementedError
Base class for audio feature extraction All abstractmethod needs to be common methods in feature extractors
62598fa4627d3e7fe0e06d5c
class Pin(object): <NEW_LINE> <INDENT> __VALID_PORT_DIR = ['input', 'output'] <NEW_LINE> def __init__(self, name=None, direction=None, description=''): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._description = description <NEW_LINE> self.direction = direction <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return self._description <NEW_LINE> <DEDENT> @property <NEW_LINE> def direction(self): <NEW_LINE> <INDENT> return self._direction <NEW_LINE> <DEDENT> @direction.setter <NEW_LINE> def direction(self, val): <NEW_LINE> <INDENT> assert val in self.__VALID_PORT_DIR, mcode.ERR_004 % self.name <NEW_LINE> self._direction = val
Pin class defines I/O of a circuit. Note that only "input" and "output" are valid io type of the system. Since Verilog only allows uni-directional signal flow, "inout" pin is not allowed.
62598fa4e5267d203ee6b7bd
class CrawledResource: <NEW_LINE> <INDENT> def __init__(self, resource, origin_urls:list, id_in_origin=""): <NEW_LINE> <INDENT> if not origin_urls: <NEW_LINE> <INDENT> raise ValueError("Expected the resource to have an origin.") <NEW_LINE> <DEDENT> self._resource = resource <NEW_LINE> self._origin_urls = origin_urls <NEW_LINE> self._id_in_origin = id_in_origin <NEW_LINE> <DEDENT> @property <NEW_LINE> def crawled_resource(self): <NEW_LINE> <INDENT> return self._resource <NEW_LINE> <DEDENT> @property <NEW_LINE> def origin_urls(self): <NEW_LINE> <INDENT> return self._origin_urls <NEW_LINE> <DEDENT> @property <NEW_LINE> def provider(self): <NEW_LINE> <INDENT> return {"description": "This crawler crawls urls and adds them to the database.", "name": __name__.split(".", 1)[0], "url": "https://github.com/schul-cloud/schul_cloud_url_crawler", "url_trace": self.origin_urls} <NEW_LINE> <DEDENT> @property <NEW_LINE> def resource(self): <NEW_LINE> <INDENT> resource = self._resource.copy() <NEW_LINE> resource.setdefault("providers", []) <NEW_LINE> resource["providers"] = resource["providers"] + [self.provider] <NEW_LINE> return resource <NEW_LINE> <DEDENT> def get_api_resource_post(self, id_prefix=""): <NEW_LINE> <INDENT> return {"data":{"attributes":self.resource, "id": id_prefix+self.id, "type":"resource"}} <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self.origin_id + self._id_in_origin <NEW_LINE> <DEDENT> @property <NEW_LINE> def origin_id(self): <NEW_LINE> <INDENT> return hashlib.sha256(self.origin_url.encode()).hexdigest() <NEW_LINE> <DEDENT> @property <NEW_LINE> def origin_url(self): <NEW_LINE> <INDENT> return self._origin_urls[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def id_in_origin(self): <NEW_LINE> <INDENT> return self._id_in_origin <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<{} {}>".format(self.__class__.__name__, self.id)
A resource crawled by the crawler. This is an adapter bewteen crawler and API. The id is computed by the originating url and the id it has in the url.
62598fa42c8b7c6e89bd3675
class PortfolioModule(BarGraphModule): <NEW_LINE> <INDENT> def __init__(self, series: List[Dict[str, str]], height: int = 150, width: int = 500, data_collector_name: str = "datacollector", fiat_values: bool = False, desc: str = "", title: str = "", group: str = "") -> None: <NEW_LINE> <INDENT> super().__init__(series, height, width, data_collector_name, desc, title, group) <NEW_LINE> self.fiat_values = fiat_values <NEW_LINE> self.sent_data = False <NEW_LINE> <DEDENT> def render(self, model: HavvenModel) -> PortfolioTuple: <NEW_LINE> <INDENT> data_collector: "DataCollector" = getattr( model, self.data_collector_name ) <NEW_LINE> if len(data_collector.agent_vars["Agents"]) <= 1: <NEW_LINE> <INDENT> self.sent_data = False <NEW_LINE> <DEDENT> if not self.sent_data: <NEW_LINE> <INDENT> vals: PortfolioTuple = (["Fiat", "Escrowed Havvens", "Havvens", "Nomins", "Issued Nomins"], ["darkgreen", "darkred", "red", "deepskyblue", "blue"], [1, 1, 1, 1, 1], [], [], [], [], [], []) <NEW_LINE> static_val_len = 4 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vals = ([], [], [], [], []) <NEW_LINE> static_val_len = 0 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> agents = sorted( data_collector.agent_vars["Agents"][-1], key=lambda x: x[0] ) <NEW_LINE> for item in agents: <NEW_LINE> <INDENT> if not self.sent_data: <NEW_LINE> <INDENT> vals[3].append(item[1].name) <NEW_LINE> <DEDENT> breakdown = item[1].portfolio(self.fiat_values) <NEW_LINE> for i in range(len(breakdown)): <NEW_LINE> <INDENT> if i+1 == len(breakdown): <NEW_LINE> <INDENT> vals[i + static_val_len].append(-float(breakdown[i])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vals[i + static_val_len].append(float(breakdown[i])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.sent_data = True <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> vals = [] <NEW_LINE> <DEDENT> return vals
A bar graph that will show the bars stacked in terms of wealth of different types: escrowed_havvens, unescrowed_havvens, nomins, fiat
62598fa43cc13d1c6d46561c
class ScoreForm(messages.Message): <NEW_LINE> <INDENT> user_name = messages.StringField(1, required=True) <NEW_LINE> date = messages.StringField(2, required=True) <NEW_LINE> won = messages.BooleanField(3, required=True) <NEW_LINE> guesses = messages.IntegerField(4, required=True) <NEW_LINE> performance=messages.FloatField(5,required=True)
ScoreForm for outbound Score information
62598fa40c0af96317c56232
class KarmaAssistantUnitTests(BaseTest): <NEW_LINE> <INDENT> def test_check_if_correlates_to_userid(self): <NEW_LINE> <INDENT> mock_handler_succeed = mock.MagicMock(name="MockSlackHandler") <NEW_LINE> mock_handler_succeed.get_userid_from_name.return_value = "SOME USER ID" <NEW_LINE> mock_handler_succeed.get_user_obj_from_id.return_value = { "team_id": "SOME TEAM ID", } <NEW_LINE> mock_handler_fail = mock.MagicMock(name="MockSlackHandler") <NEW_LINE> mock_handler_fail.get_userid_from_name.return_value = None <NEW_LINE> fake_event = { "team_id": "SOME TEAM ID", } <NEW_LINE> ka = KarmaAssistant() <NEW_LINE> with mock.patch.object(ka, "bot", mock_handler_succeed): <NEW_LINE> <INDENT> userid = ka.check_if_correlates_to_userid( fake_event, "SOME USERNAME" ) <NEW_LINE> self.assertEqual( "SOME USER ID", userid ) <NEW_LINE> <DEDENT> with mock.patch.object(ka, "bot", mock_handler_fail): <NEW_LINE> <INDENT> userid = ka.check_if_correlates_to_userid( fake_event, "SOME USERNAME" ) <NEW_LINE> self.assertIsNone(userid) <NEW_LINE> <DEDENT> <DEDENT> def test_check_if_correlates_to_username(self): <NEW_LINE> <INDENT> mock_handler_succeed = mock.MagicMock(name="MockSlackHandler") <NEW_LINE> mock_handler_succeed.get_username_from_id.return_value = "SOME USERNAME" <NEW_LINE> mock_handler_succeed.get_user_obj_from_id.return_value = { "team_id": "SOME TEAM ID", } <NEW_LINE> mock_handler_fail = mock.MagicMock(name="MockSlackHandler") <NEW_LINE> mock_handler_fail.get_user_from_id.return_value = None <NEW_LINE> fake_event = { "team_id": "SOME TEAM ID", } <NEW_LINE> ka = KarmaAssistant() <NEW_LINE> with mock.patch.object(ka, "bot", mock_handler_succeed): <NEW_LINE> <INDENT> userid = ka.check_if_correlates_to_username( fake_event, "SOME USERNAME" ) <NEW_LINE> self.assertEqual( "SOME USERNAME", userid ) <NEW_LINE> <DEDENT> with mock.patch.object(ka, "bot", mock_handler_fail): <NEW_LINE> <INDENT> userid = ka.check_if_correlates_to_username( fake_event, "SOME USERNAME" ) <NEW_LINE> self.assertIsNone(userid)
Tests for the KarmaAssistant.
62598fa43617ad0b5ee06003
class RecomResultItem(object): <NEW_LINE> <INDENT> swagger_types = { 'item': 'Item', 'rank': 'float', 'recommendation_id': 'str' } <NEW_LINE> attribute_map = { 'item': 'item', 'rank': 'rank', 'recommendation_id': 'recommendationId' } <NEW_LINE> def __init__(self, item=None, rank=None, recommendation_id=None, _configuration=None): <NEW_LINE> <INDENT> if _configuration is None: <NEW_LINE> <INDENT> _configuration = Configuration() <NEW_LINE> <DEDENT> self._configuration = _configuration <NEW_LINE> self._item = None <NEW_LINE> self._rank = None <NEW_LINE> self._recommendation_id = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.item = item <NEW_LINE> self.rank = rank <NEW_LINE> self.recommendation_id = recommendation_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def item(self): <NEW_LINE> <INDENT> return self._item <NEW_LINE> <DEDENT> @item.setter <NEW_LINE> def item(self, item): <NEW_LINE> <INDENT> if self._configuration.client_side_validation and item is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `item`, must not be `None`") <NEW_LINE> <DEDENT> self._item = item <NEW_LINE> <DEDENT> @property <NEW_LINE> def rank(self): <NEW_LINE> <INDENT> return self._rank <NEW_LINE> <DEDENT> @rank.setter <NEW_LINE> def rank(self, rank): <NEW_LINE> <INDENT> if self._configuration.client_side_validation and rank is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `rank`, must not be `None`") <NEW_LINE> <DEDENT> self._rank = rank <NEW_LINE> <DEDENT> @property <NEW_LINE> def recommendation_id(self): <NEW_LINE> <INDENT> return self._recommendation_id <NEW_LINE> <DEDENT> @recommendation_id.setter <NEW_LINE> def recommendation_id(self, recommendation_id): <NEW_LINE> <INDENT> if self._configuration.client_side_validation and recommendation_id is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `recommendation_id`, must not be `None`") <NEW_LINE> <DEDENT> self._recommendation_id = recommendation_id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(RecomResultItem, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, RecomResultItem): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, RecomResultItem): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fa43d592f4c4edbad7e
class ConcreteClientData(ClientData): <NEW_LINE> <INDENT> def __init__( self, client_ids: Iterable[str], create_tf_dataset_for_client_fn: Callable[[str], tf.data.Dataset], ): <NEW_LINE> <INDENT> py_typecheck.check_type(client_ids, collections.Iterable) <NEW_LINE> py_typecheck.check_callable(create_tf_dataset_for_client_fn) <NEW_LINE> if not client_ids: <NEW_LINE> <INDENT> raise ValueError('At least one client_id is required.') <NEW_LINE> <DEDENT> self._client_ids = list(client_ids) <NEW_LINE> self._create_tf_dataset_for_client_fn = create_tf_dataset_for_client_fn <NEW_LINE> if isinstance(self._create_tf_dataset_for_client_fn, computation_base.Computation): <NEW_LINE> <INDENT> self._dataset_computation = self._create_tf_dataset_for_client_fn <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._dataset_computation = None <NEW_LINE> <DEDENT> example_dataset = create_tf_dataset_for_client_fn(next(iter(client_ids))) <NEW_LINE> self._element_type_structure = example_dataset.element_spec <NEW_LINE> <DEDENT> @property <NEW_LINE> def client_ids(self) -> List[str]: <NEW_LINE> <INDENT> return self._client_ids <NEW_LINE> <DEDENT> def create_tf_dataset_for_client(self, client_id: str) -> tf.data.Dataset: <NEW_LINE> <INDENT> return self._create_tf_dataset_for_client_fn(client_id) <NEW_LINE> <DEDENT> @property <NEW_LINE> def element_type_structure(self): <NEW_LINE> <INDENT> return self._element_type_structure <NEW_LINE> <DEDENT> @property <NEW_LINE> def dataset_computation(self): <NEW_LINE> <INDENT> if self._dataset_computation is not None: <NEW_LINE> <INDENT> return self._dataset_computation <NEW_LINE> <DEDENT> raise NotImplementedError
A generic `ClientData` object. This is a simple implementation of client_data, where Datasets are specified as a function from client_id to Dataset. The `ConcreteClientData.preprocess` classmethod is provided as a utility used to wrap another `ClientData` with an additional preprocessing function.
62598fa4460517430c431fb3
class Order(BaseModel, db.Model): <NEW_LINE> <INDENT> __tablename__ = "ih_order_info" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey("ih_user_profile.id"), nullable=False) <NEW_LINE> house_id = db.Column(db.Integer, db.ForeignKey("ih_house_info.id"), nullable=False) <NEW_LINE> begin_date = db.Column(db.DateTime, nullable=False) <NEW_LINE> end_date = db.Column(db.DateTime, nullable=False) <NEW_LINE> days = db.Column(db.Integer, nullable=False) <NEW_LINE> house_price = db.Column(db.Integer, nullable=False) <NEW_LINE> amount = db.Column(db.Integer, nullable=False) <NEW_LINE> status = db.Column( db.Enum( "WAIT_ACCEPT", "WAIT_PAYMENT", "PAID", "WAIT_COMMENT", "COMPLETE", "CANCELED", "REJECTED" ), default="WAIT_ACCEPT", index=True) <NEW_LINE> comment = db.Column(db.Text) <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> order_dict = { "order_id":self.id, "title":self.house.title, "img_url":constants.QINIU_URL_DOMAIN + self.house.index_image_url if self.house.index_image_url else "" ,"start_date":self.begin_date.strftime("%Y-%m-%d"), "end_date":self.end_date.strftime("%Y-%m-%d"), "ctime":self.create_time.strftime("%Y-%m-%d %H:%M:%S"), "days":self.days, "amount":self.amount, "comment":self.comment if self.comment else "" } <NEW_LINE> return order_dict
订单
62598fa44e4d5625663722d5
class timelogEvent(object): <NEW_LINE> <INDENT> def __init__(self, name, timestamp, duration): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.timestamp = timestamp <NEW_LINE> self.duration = duration <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{name: %s, date: %s, duration: %s}" % (self.name, self.timestamp, self.duration) <NEW_LINE> <DEDENT> def subtract_logged_time(self, logged_time): <NEW_LINE> <INDENT> self.duration = self.duration - self.logged_time
a custom data structure for shotgun time log
62598fa48c0ade5d55dc35e8
class TestProcessingStateContext(unittest.TestCase): <NEW_LINE> <INDENT> def tearDown(self): <NEW_LINE> <INDENT> state.reset_instance() <NEW_LINE> <DEDENT> def test_basic_functionality(self): <NEW_LINE> <INDENT> with processing_state_context(): <NEW_LINE> <INDENT> n_jobs = 2 <NEW_LINE> pool = multiprocessing.Pool(processes=4) <NEW_LINE> results = [] <NEW_LINE> for i in range(n_jobs): <NEW_LINE> <INDENT> results.append(pool.apply_async(worker1, (i,))) <NEW_LINE> <DEDENT> pool.close() <NEW_LINE> pool.join() <NEW_LINE> <DEDENT> for i in range(n_jobs): <NEW_LINE> <INDENT> self.assertTrue(results[i].get()) <NEW_LINE> self.assertEqual(state[str(i)], i) <NEW_LINE> <DEDENT> <DEDENT> def test_deleting(self): <NEW_LINE> <INDENT> with processing_state_context(): <NEW_LINE> <INDENT> n_jobs = 2 <NEW_LINE> pool = multiprocessing.Pool(processes=4) <NEW_LINE> results = [] <NEW_LINE> for i in range(n_jobs): <NEW_LINE> <INDENT> results.append(pool.apply_async(worker2, (i,))) <NEW_LINE> <DEDENT> pool.close() <NEW_LINE> pool.join() <NEW_LINE> <DEDENT> for i in range(n_jobs): <NEW_LINE> <INDENT> self.assertNotIn(str(i), state) <NEW_LINE> <DEDENT> <DEDENT> def test_raises_on_getting(self): <NEW_LINE> <INDENT> with processing_state_context(): <NEW_LINE> <INDENT> n_jobs = 200 <NEW_LINE> pool = multiprocessing.Pool(processes=4) <NEW_LINE> results = [] <NEW_LINE> for i in range(n_jobs): <NEW_LINE> <INDENT> results.append(pool.apply_async(worker3, (i,))) <NEW_LINE> <DEDENT> pool.close() <NEW_LINE> pool.join() <NEW_LINE> <DEDENT> for i in range(n_jobs): <NEW_LINE> <INDENT> self.assertTrue(results[i].get()) <NEW_LINE> <DEDENT> <DEDENT> def test_raises_on_setting(self): <NEW_LINE> <INDENT> with processing_state_context(): <NEW_LINE> <INDENT> n_jobs = 200 <NEW_LINE> pool = multiprocessing.Pool(processes=4) <NEW_LINE> results = [] <NEW_LINE> for _ in range(n_jobs): <NEW_LINE> <INDENT> results.append(pool.apply_async(worker4)) <NEW_LINE> <DEDENT> pool.close() <NEW_LINE> pool.join() <NEW_LINE> <DEDENT> for i in range(n_jobs): <NEW_LINE> <INDENT> self.assertTrue(results[i].get()) <NEW_LINE> <DEDENT> <DEDENT> def test_raises_on_deleting(self): <NEW_LINE> <INDENT> with processing_state_context(): <NEW_LINE> <INDENT> n_jobs = 200 <NEW_LINE> pool = multiprocessing.Pool(processes=4) <NEW_LINE> results = [] <NEW_LINE> for _ in range(n_jobs): <NEW_LINE> <INDENT> results.append(pool.apply_async(worker5)) <NEW_LINE> <DEDENT> pool.close() <NEW_LINE> pool.join() <NEW_LINE> <DEDENT> for i in range(n_jobs): <NEW_LINE> <INDENT> self.assertTrue(results[i].get()) <NEW_LINE> <DEDENT> <DEDENT> def test_after_exception(self): <NEW_LINE> <INDENT> state['a'] = 12 <NEW_LINE> try: <NEW_LINE> <INDENT> with processing_state_context(): <NEW_LINE> <INDENT> raise RuntimeError <NEW_LINE> <DEDENT> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.assertEqual(state['a'], 12)
Test the processing_state_context
62598fa44428ac0f6e6583dc
@attr.s(auto_attribs=True) <NEW_LINE> class ThirdPartyReleaseInfoValidator(ReleaseInfoValidatorBase): <NEW_LINE> <INDENT> def __attrs_post_init__(self): <NEW_LINE> <INDENT> self.content_validator = ContentFileValidator( scheme=ThirdPartyReleaseInfoContentScheme, content_type=self.content_type)
Special alias for `THIRD_PARTY_RELEASE.INFO` file validator.
62598fa4e5267d203ee6b7be
class TestPrettyPrintRows(unittest.TestCase): <NEW_LINE> <INDENT> def test_pretty_print_rows_empty_inputs(self): <NEW_LINE> <INDENT> self.assertEqual(pretty_print_rows([]),"") <NEW_LINE> <DEDENT> def test_pretty_print_rows_single_row(self): <NEW_LINE> <INDENT> rows = [['hello:','A salutation']] <NEW_LINE> self.assertEqual(pretty_print_rows(rows), "hello: A salutation") <NEW_LINE> <DEDENT> def test_pretty_print_rows_multiple_rows(self): <NEW_LINE> <INDENT> rows = [['-','hello:','A salutation'], ['-','goodbye:','The End']] <NEW_LINE> self.assertEqual(pretty_print_rows(rows), "- hello: A salutation\n- goodbye: The End ") <NEW_LINE> <DEDENT> def test_pretty_print_rows_prepend(self): <NEW_LINE> <INDENT> rows = [['-','hello:','A salutation'], ['-','goodbye:','The End']] <NEW_LINE> self.assertEqual(pretty_print_rows(rows,prepend=True), "- hello: A salutation\n- goodbye: The End")
Tests for the pretty_print_rows function
62598fa4d486a94d0ba2be7e
class TestInlineResponse20088Site(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testInlineResponse20088Site(self): <NEW_LINE> <INDENT> pass
InlineResponse20088Site unit test stubs
62598fa4aad79263cf42e689
class ElementWrapper(object): <NEW_LINE> <INDENT> def __init__(self, wrapped): <NEW_LINE> <INDENT> self.wrapped = wrapped <NEW_LINE> <DEDENT> def xpath(self, xpath): <NEW_LINE> <INDENT> return [ElementWrapper(sel) for sel in self.wrapped.find_elements_by_xpath(xpath)] <NEW_LINE> <DEDENT> def text_content(self): <NEW_LINE> <INDENT> return self.wrapped.text <NEW_LINE> <DEDENT> @property <NEW_LINE> def text(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def itertext(self): <NEW_LINE> <INDENT> return [self.wrapped.text] <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return getattr(self.wrapped, attr) <NEW_LINE> <DEDENT> @property <NEW_LINE> class attrib(object): <NEW_LINE> <INDENT> def __init__(self, el): <NEW_LINE> <INDENT> self.el = el <NEW_LINE> <DEDENT> def __getitem__(self, k): <NEW_LINE> <INDENT> v = self.el.get_attribute(k) <NEW_LINE> if v is None: <NEW_LINE> <INDENT> raise KeyError('Attribute %r was not found' % k) <NEW_LINE> <DEDENT> return v <NEW_LINE> <DEDENT> def get(self, k, default=None): <NEW_LINE> <INDENT> v = self.el.get_attribute(k) <NEW_LINE> if v is None: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> return v
Wrapper to Selenium element to ressemble lxml. Some differences: - only a subset of lxml's Element class are available - cannot access XPath "text()", only Elements See https://seleniumhq.github.io/selenium/docs/api/py/webdriver_remote/selenium.webdriver.remote.webelement.html
62598fa4236d856c2adc9393
@python_2_unicode_compatible <NEW_LINE> class Heuristic(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=200) <NEW_LINE> category_code = models.CharField(max_length=20, choices=HEURISTIC_CATEGORIES.items()) <NEW_LINE> @property <NEW_LINE> def category(self): <NEW_LINE> <INDENT> return HEURISTIC_CATEGORIES[self.category_code] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
A heuristic, measured by answers to questions
62598fa432920d7e50bc5f07
class AddressCompleteness: <NEW_LINE> <INDENT> implements(ICompleteness) <NEW_LINE> adapts(IAddress) <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> def isComplete(self): <NEW_LINE> <INDENT> if self.context.address_1 and self.context.zip_code and self.context.city and self.context.country: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> self.context.plone_utils.addPortalMessage( _("Some address data is missing. Please check your Address")) <NEW_LINE> return False
Provides ICompleteness for address content objects
62598fa497e22403b383adbd
class BaseGeometry: <NEW_LINE> <INDENT> def area(self): <NEW_LINE> <INDENT> raise Exception("area() is not implemented")
BaseGeometry: empty class
62598fa4435de62698e9bca6
class UploadForm(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.form_fields = [] <NEW_LINE> self.files = [] <NEW_LINE> self.boundary = self.make_upload_from_boundary() <NEW_LINE> self.content_type = 'multipart/form-data; boundary=%s' % self.boundary <NEW_LINE> <DEDENT> def make_upload_from_boundary(self, text=None): <NEW_LINE> <INDENT> _width = len(repr(sys.maxint - 1)) <NEW_LINE> _fmt = '%%0%dd' % _width <NEW_LINE> token = random.randrange(sys.maxint) <NEW_LINE> boundary = '----' + (_fmt % token) <NEW_LINE> if text is None: <NEW_LINE> <INDENT> return boundary <NEW_LINE> <DEDENT> b = boundary <NEW_LINE> counter = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> cre = re.compile('^--' + re.escape(b) + '(--)?$', re.MULTILINE) <NEW_LINE> if not cre.search(text): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> b = boundary + '.' + str(counter) <NEW_LINE> counter += 1 <NEW_LINE> <DEDENT> return b <NEW_LINE> <DEDENT> def get_content_type(self): <NEW_LINE> <INDENT> return self.content_type <NEW_LINE> <DEDENT> def add_field(self, name, value): <NEW_LINE> <INDENT> self.form_fields.append((str(name), str(value))) <NEW_LINE> return <NEW_LINE> <DEDENT> def add_file(self, fieldname, filename, fileHandle, mimetype=None): <NEW_LINE> <INDENT> body = fileHandle.read() <NEW_LINE> if mimetype is None: <NEW_LINE> <INDENT> mimetype = (mimetypes.guess_type(filename)[0] or 'applicatioin/octet-stream') <NEW_LINE> <DEDENT> self.files.append((fieldname, filename, mimetype, body)) <NEW_LINE> return <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> import sys <NEW_LINE> reload(sys) <NEW_LINE> sys.setdefaultencoding('utf-8') <NEW_LINE> parts = [] <NEW_LINE> part_boundary = '--' + self.boundary <NEW_LINE> parts.extend( [part_boundary, 'Content-Disposition: form-data; name="%s"' % name, '', value, ] for name, value in self.form_fields) <NEW_LINE> if self.files: <NEW_LINE> <INDENT> parts.extend([ part_boundary, 'Content-Disposition: form-data; name="%s"; filename="%s"' % (field_name, filename), 'Content-Type: %s' % content_type, '', body, ] for field_name, filename, content_type, body in self.files) <NEW_LINE> <DEDENT> flattened = list(itertools.chain(*parts)) <NEW_LINE> flattened.append('--' + self.boundary + '--') <NEW_LINE> flattened.append('') <NEW_LINE> return '\r\n'.join(flattened)
上传对象构造
62598fa491f36d47f2230dfb
class PerfectForecastConfig(ConfigBase): <NEW_LINE> <INDENT> def __init__(self, source, derived): <NEW_LINE> <INDENT> self._observed = GHCN_CAMS_PRECL(source) <NEW_LINE> self._forecast = {'Observed': PerfectForecast(self._observed)} <NEW_LINE> self._static = DefaultStatic(source) <NEW_LINE> self._workspace = paths.DefaultWorkspace(derived) <NEW_LINE> <DEDENT> def historical_years(self): <NEW_LINE> <INDENT> return range(1948, 2018) <NEW_LINE> <DEDENT> def result_fit_years(self): <NEW_LINE> <INDENT> return range(1950, 2010) <NEW_LINE> <DEDENT> def models(self): <NEW_LINE> <INDENT> return self._forecast.keys() <NEW_LINE> <DEDENT> def forecast_ensemble_members(self, model, yearmon, *, lag_hours: Optional[int] = None): <NEW_LINE> <INDENT> assert model in self.models() <NEW_LINE> return '1' <NEW_LINE> <DEDENT> def forecast_targets(self, yearmon): <NEW_LINE> <INDENT> return dates.get_next_yearmons(yearmon, 9) <NEW_LINE> <DEDENT> def forecast_data(self, model: str): <NEW_LINE> <INDENT> return self._forecast[model] <NEW_LINE> <DEDENT> def observed_data(self): <NEW_LINE> <INDENT> return self._observed <NEW_LINE> <DEDENT> def static_data(self): <NEW_LINE> <INDENT> return self._static <NEW_LINE> <DEDENT> def workspace(self) -> paths.DefaultWorkspace: <NEW_LINE> <INDENT> return self._workspace
Configuration that uses observed data as a forecast. Can be used for evaluating the agricultural assessment (which must use forecasts) retrospectively.
62598fa48e7ae83300ee8f52
class TOCMacro(List): <NEW_LINE> <INDENT> grammar = contiguous( "TableOfContents", optional( "(", optional(attr("maxDepth", re.compile(r"\d+"))), ")")) <NEW_LINE> def compose(self, parser, attr_of): <NEW_LINE> <INDENT> global pageYaml <NEW_LINE> pageYaml["autotoc"] = "true" <NEW_LINE> return("") <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def test(cls): <NEW_LINE> <INDENT> parse("TableOfContents", cls) <NEW_LINE> parse("TableOfContents(2)", cls)
TableOfContents Macros insert TOC's. There ya go. <<TableOfContents>> <<TableOfContents([maxdepth])>> <<TableOfContents(2)>>
62598fa491af0d3eaad39cbf
class CodeContext(object): <NEW_LINE> <INDENT> def __init__(self, code, path): <NEW_LINE> <INDENT> self.code = code <NEW_LINE> self.path = path <NEW_LINE> self._file = None <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.code is None: <NEW_LINE> <INDENT> self._file = open(self.path, 'rU') <NEW_LINE> self.code = self._file.read() <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, t, value, traceback): <NEW_LINE> <INDENT> if not self._file is None: <NEW_LINE> <INDENT> self._file.close() <NEW_LINE> <DEDENT> if t and LOGGER.level == logging.DEBUG: <NEW_LINE> <INDENT> LOGGER.debug(traceback)
Read file if code is None.
62598fa463d6d428bbee2663
class ValuesXLS(Values): <NEW_LINE> <INDENT> __doc__ %= QUERY_LIMIT <NEW_LINE> def header(self, ctx, req): <NEW_LINE> <INDENT> return ['ID', 'Language', 'Concept', 'Form', 'Reference', 'Comment'] <NEW_LINE> <DEDENT> def row(self, ctx, req, item): <NEW_LINE> <INDENT> res = super(Values, self).row(ctx, req, item) <NEW_LINE> res.insert(1, hyperlink(req.resource_url(item.valueset.parameter), item.valueset.parameter.__unicode__())) <NEW_LINE> res.insert(2, hyperlink(req.resource_url(item.valueset.language), item.valueset.language.__unicode__())) <NEW_LINE> res.append(hyperlink(req.resource_url(item.valueset.contribution), item.valueset.contribution.__unicode__())) <NEW_LINE> res.append(item.comment) <NEW_LINE> return res
Represent table of Value instances as excel sheet (maximal %d rows).
62598fa476e4537e8c3ef45f
class LRUCache1: <NEW_LINE> <INDENT> def __init__(self, capacity: int): <NEW_LINE> <INDENT> self.cache = OrderedDict() <NEW_LINE> self.capacity = capacity <NEW_LINE> <DEDENT> def get(self, key: int) -> int: <NEW_LINE> <INDENT> val = self.cache.get(key, -1) <NEW_LINE> if val != -1: self.cache.move_to_end(key, last=False) <NEW_LINE> return val <NEW_LINE> <DEDENT> def put(self, key: int, value: int) -> None: <NEW_LINE> <INDENT> if len(self.cache) == self.capacity and key not in self.cache: <NEW_LINE> <INDENT> self.cache.popitem() <NEW_LINE> <DEDENT> self.cache[key] = value <NEW_LINE> self.cache.move_to_end(key, last=False)
基于顺序哈希表,通过
62598fa4498bea3a75a579d4
class FreeType(_MetaType): <NEW_LINE> <INDENT> def __init__(self, **options): <NEW_LINE> <INDENT> _MetaType.__init__(self, FreeType.FreeTypeInstance) <NEW_LINE> keys = list(options.keys()) <NEW_LINE> keys.sort() <NEW_LINE> orderedopts = [(k, options[k]) for k in keys if isinstance(options[k], Forward)] + [(k, options[k]) for k in keys if not isinstance(options[k], Forward)] <NEW_LINE> self.options = OrderedDict(orderedopts) <NEW_LINE> <DEDENT> def assign(self, val): <NEW_LINE> <INDENT> for key, value_type in self.options.items(): <NEW_LINE> <INDENT> if value_type is None and val is None: <NEW_LINE> <INDENT> return self.__call__(**{key: None}) <NEW_LINE> <DEDENT> elif isinstance(value_type, Iterable) and val in value_type: <NEW_LINE> <INDENT> return self.__call__(**{key: val}) <NEW_LINE> <DEDENT> elif value_type.has_member(val): <NEW_LINE> <INDENT> return self.__call__(**{key: val}) <NEW_LINE> <DEDENT> <DEDENT> assert False, "Unknown value type" <NEW_LINE> <DEDENT> def has_member(self, other): <NEW_LINE> <INDENT> if isinstance(other, _Instance) and other.cls == self: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for k, v in self.options.items(): <NEW_LINE> <INDENT> if (not isinstance(v, Forward) or v.ref != self) and _MetaType.is_instance(other, v): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, FreeType) and other.options == self.options <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "\n\t" + '\n\t\t'.join(["%s= ⟨⟨ %s ⟩⟩" % e for e in self.options.items()]) <NEW_LINE> <DEDENT> class FreeTypeInstance(_Instance): <NEW_LINE> <INDENT> def __init__(self, cls, *args, **kwargs): <NEW_LINE> <INDENT> _Instance.__init__(self, cls) <NEW_LINE> assert len(args) == 0, "Must use a keyword for constructing a free type" <NEW_LINE> assert len(kwargs) == 1, "Only one constructor allowed" <NEW_LINE> key, val = kwargs.popitem() <NEW_LINE> assert key in self.cls.options, "Unknown constructor type (%s)" % key <NEW_LINE> self.__dict__['_key'] = key <NEW_LINE> self._hash = hash(frozenset(self.__dict__)) <NEW_LINE> value_type = self.cls.options[key] <NEW_LINE> self.__dict__['_value'] = val <NEW_LINE> <DEDENT> def __getattr__(self, item): <NEW_LINE> <INDENT> if item.startswith('_'): <NEW_LINE> <INDENT> return AttributeError <NEW_LINE> <DEDENT> assert self._key == item, "Wrong type (%s, %s)" % (self._key, item) <NEW_LINE> return self._value <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(self, FreeType.FreeTypeInstance) or not isinstance(other, FreeType.FreeTypeInstance): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self._key == other._key and self._value == other._value <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return self._hash <NEW_LINE> <DEDENT> def inran(self, item): <NEW_LINE> <INDENT> assert item in self.cls.options, "Unknown constructor type" <NEW_LINE> return self._key == item <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._key + ' ⟨⟨ ' + str(self._value) + " ⟩⟩\n"
Free type declaration Z: Ans ::= ok<<Z>> | error Python: Ans = FreeType(ok=int, error=None) Z: Degree ::= status <<0..3>> ba = status 0 Python: Degree = FreeType(status=range(0,4)) ba = Degree('status', 0)
62598fa460cbc95b063641fd
@skipIf(HAS_BOTO is False, "The boto module must be installed.") <NEW_LINE> @skipIf( _has_required_boto() is False, ( "The boto3 module must be greater than or equal to version {}, " "and botocore must be greater than or equal to {}".format( required_boto3_version, required_botocore_version ) ), ) <NEW_LINE> class BotoLambdaAliasTestCase(BotoLambdaStateTestCaseBase, BotoLambdaTestCaseMixin): <NEW_LINE> <INDENT> def test_present_when_alias_does_not_exist(self): <NEW_LINE> <INDENT> self.conn.list_aliases.side_effect = [{"Aliases": []}, {"Aliases": [alias_ret]}] <NEW_LINE> self.conn.create_alias.return_value = alias_ret <NEW_LINE> result = self.salt_states["boto_lambda.alias_present"]( "alias present", FunctionName="testfunc", Name=alias_ret["Name"], FunctionVersion=alias_ret["FunctionVersion"], ) <NEW_LINE> self.assertTrue(result["result"]) <NEW_LINE> self.assertEqual(result["changes"]["new"]["alias"]["Name"], alias_ret["Name"]) <NEW_LINE> <DEDENT> def test_present_when_alias_exists(self): <NEW_LINE> <INDENT> self.conn.list_aliases.return_value = {"Aliases": [alias_ret]} <NEW_LINE> self.conn.create_alias.return_value = alias_ret <NEW_LINE> result = self.salt_states["boto_lambda.alias_present"]( "alias present", FunctionName="testfunc", Name=alias_ret["Name"], FunctionVersion=alias_ret["FunctionVersion"], Description=alias_ret["Description"], ) <NEW_LINE> self.assertTrue(result["result"]) <NEW_LINE> self.assertEqual(result["changes"], {}) <NEW_LINE> <DEDENT> @pytest.mark.slow_test <NEW_LINE> def test_present_with_failure(self): <NEW_LINE> <INDENT> self.conn.list_aliases.side_effect = [{"Aliases": []}, {"Aliases": [alias_ret]}] <NEW_LINE> self.conn.create_alias.side_effect = ClientError(error_content, "create_alias") <NEW_LINE> result = self.salt_states["boto_lambda.alias_present"]( "alias present", FunctionName="testfunc", Name=alias_ret["Name"], FunctionVersion=alias_ret["FunctionVersion"], ) <NEW_LINE> self.assertFalse(result["result"]) <NEW_LINE> self.assertTrue("An error occurred" in result["comment"]) <NEW_LINE> <DEDENT> def test_absent_when_alias_does_not_exist(self): <NEW_LINE> <INDENT> self.conn.list_aliases.return_value = {"Aliases": [alias_ret]} <NEW_LINE> result = self.salt_states["boto_lambda.alias_absent"]( "alias absent", FunctionName="testfunc", Name="myalias" ) <NEW_LINE> self.assertTrue(result["result"]) <NEW_LINE> self.assertEqual(result["changes"], {}) <NEW_LINE> <DEDENT> def test_absent_when_alias_exists(self): <NEW_LINE> <INDENT> self.conn.list_aliases.return_value = {"Aliases": [alias_ret]} <NEW_LINE> result = self.salt_states["boto_lambda.alias_absent"]( "alias absent", FunctionName="testfunc", Name=alias_ret["Name"] ) <NEW_LINE> self.assertTrue(result["result"]) <NEW_LINE> self.assertEqual(result["changes"]["new"]["alias"], None) <NEW_LINE> <DEDENT> def test_absent_with_failure(self): <NEW_LINE> <INDENT> self.conn.list_aliases.return_value = {"Aliases": [alias_ret]} <NEW_LINE> self.conn.delete_alias.side_effect = ClientError(error_content, "delete_alias") <NEW_LINE> result = self.salt_states["boto_lambda.alias_absent"]( "alias absent", FunctionName="testfunc", Name=alias_ret["Name"] ) <NEW_LINE> self.assertFalse(result["result"]) <NEW_LINE> self.assertTrue("An error occurred" in result["comment"])
TestCase for salt.modules.boto_lambda state.module aliases
62598fa4e76e3b2f99fd88e8
class FairModel: <NEW_LINE> <INDENT> def __init__(self, model, inds, times, last_hist=5, nsteps=6, nydims=None): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.y_hist_inds = inds <NEW_LINE> self.times = times <NEW_LINE> self.last_hist = last_hist <NEW_LINE> self.nsteps = nsteps <NEW_LINE> self.nydims = nydims <NEW_LINE> <DEDENT> def predict(self, X, batch_size=None): <NEW_LINE> <INDENT> return predict_sequential(self.model, X, self.y_hist_inds, self.times, last_hist=self.last_hist, nsteps=self.nsteps, nydims=self.nydims) <NEW_LINE> <DEDENT> def evaluate(self, X, y, batch_size=None): <NEW_LINE> <INDENT> yhat = self.predict(X) <NEW_LINE> idx = ~np.isnan(yhat).any(axis=-1) <NEW_LINE> return np.mean(np.square(yhat[idx] - y[idx]), axis=-1).mean()
returns sequential prediction
62598fa4925a0f43d25e7ef0
class Action: <NEW_LINE> <INDENT> def __init__(self, data, limit=None): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> self._limit = limit <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @property <NEW_LINE> def limit(self): <NEW_LINE> <INDENT> return self._limit <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return copy(self)
The class for the action object that is returned by each task. The action object encapsulates the information that is returned by a task to the system. It contains the data that should be passed on to the successor tasks and a list of immediate successor tasks that should be executed. The latter allows to limit the execution of successor tasks.
62598fa4e1aae11d1e7ce77c
class Dialog_Info(Gtk.Dialog): <NEW_LINE> <INDENT> def __init__(self, parent, title, message): <NEW_LINE> <INDENT> Gtk.Dialog.__init__(self, title, parent, 0, (Gtk.STOCK_OK, Gtk.ResponseType.OK)) <NEW_LINE> self.set_default_size(150, 100) <NEW_LINE> self.label = Gtk.Label(message) <NEW_LINE> self.box = self.get_content_area() <NEW_LINE> self.box.set_spacing(6) <NEW_LINE> self.box.set_margin_start(6) <NEW_LINE> self.box.set_margin_end(6) <NEW_LINE> self.box.set_margin_top(6) <NEW_LINE> self.box.set_margin_bottom(6) <NEW_LINE> self.box.add(self.label) <NEW_LINE> self.show_all()
A simple dialog to inform about the process of saving the current list. It pops up after fullfilled saving of the list.
62598fa43d592f4c4edbad80
class StringSplitter(DFPBase): <NEW_LINE> <INDENT> def __init__( self, inputs=[], outputs=[], separator=None, index=None, keep=0 ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.inputs = inputs <NEW_LINE> self.outputs = outputs <NEW_LINE> self.separator = separator <NEW_LINE> self.index = index <NEW_LINE> self.keep = keep <NEW_LINE> <DEDENT> def transform(self, df): <NEW_LINE> <INDENT> if self.separator is not None: <NEW_LINE> <INDENT> for input, output in zip(self.inputs, self.outputs): <NEW_LINE> <INDENT> df[output] = df[input].map(lambda x: str(x).split(self.separator)[self.keep]) <NEW_LINE> <DEDENT> <DEDENT> elif self.index is not None: <NEW_LINE> <INDENT> if self.keep == 0: <NEW_LINE> <INDENT> for input, output in zip(self.inputs, self.outputs): <NEW_LINE> <INDENT> df[output] = df[input].map(lambda x: str(x)[:self.index]) <NEW_LINE> <DEDENT> <DEDENT> elif self.keep == -1: <NEW_LINE> <INDENT> for input, output in zip(self.inputs, self.outputs): <NEW_LINE> <INDENT> df[output] = df[input].map(lambda x: str(x)[self.index:]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert False, 'keep can be set only to 0 or -1' <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert False, 'Specify separator or index' <NEW_LINE> <DEDENT> return df <NEW_LINE> <DEDENT> def to_onnx_operator(self, graph): <NEW_LINE> <INDENT> for input_column, output_column in zip(self.inputs, self.outputs): <NEW_LINE> <INDENT> ops = [] <NEW_LINE> input_tensor = graph.get_current_tensor(input_column) <NEW_LINE> input_tensor_name = input_tensor.name <NEW_LINE> if input_tensor.type != TensorProto.STRING: <NEW_LINE> <INDENT> cast_kwargs = {'to': TensorProto.STRING} <NEW_LINE> cast_tensor = graph.get_tmp_tensor() <NEW_LINE> ops.append(helper.make_node('Cast', [input_tensor.name], [cast_tensor], graph.get_node_name('Cast'), **cast_kwargs)) <NEW_LINE> input_tensor_name = cast_tensor.name <NEW_LINE> <DEDENT> output_tensor = graph.get_next_tensor(output_column, TensorProto.STRING) <NEW_LINE> kwargs = {} <NEW_LINE> if self.separator is not None: <NEW_LINE> <INDENT> kwargs['separator'] = self.separator <NEW_LINE> <DEDENT> elif self.index is not None: <NEW_LINE> <INDENT> kwargs['index'] = self.index <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert False, 'Seprator or index needs to be specified' <NEW_LINE> <DEDENT> kwargs['keep'] = self.keep <NEW_LINE> ops.append(helper.make_node('StringSplit', [input_tensor_name], [output_tensor.name], graph.get_node_name('StringSplit'), domain='ai.onnx.ml', **kwargs)) <NEW_LINE> graph.add([input_tensor], [output_tensor], ops)
Split strings in a colum. Parameters ---------- inputs : List of strings Column labels. outputs: List of strings Column labels. separator: String A string to separate a string. index: Int An index to split a string. keep: Int (0 or -1), default is 0 When this value is 0, the first string is stored to the output column. When this value is -1, the last string is stored to the output column. Examples: ---------- >>> df = pd.DataFrame({'Email': ['taro.jp.com', 'alice.us.com', 'bob.us']}) Keep the first string >>> tf1 = StringSplitter(inputs=['Email'], outputs=['Email_prefix'], separator='.', keep=0) >>> tf1.fit_transform(df) Email Email_prefix 0 taro.jp.com taro 1 alice.us.com alice 2 bob.us bob Keep the last string >>> tf2 = StringSplitter(inputs=['Email'], outputs=['Email_suffix'], separator='.', keep=-1) >>> tf2.fit_transform(df) Email Email_suffix 0 taro.jp.com com 1 alice.us.com com 2 bob.us us
62598fa4fff4ab517ebcd697
class CLMFitter(MultiScaleParametricFitter): <NEW_LINE> <INDENT> def __init__(self, clm, algorithms): <NEW_LINE> <INDENT> self._model = clm <NEW_LINE> super(CLMFitter, self).__init__( scales=clm.scales, reference_shape=clm.reference_shape, holistic_features=clm.holistic_features, algorithms=algorithms) <NEW_LINE> <DEDENT> @property <NEW_LINE> def clm(self): <NEW_LINE> <INDENT> return self._model
Abstract class for defining a CLM fitter. .. note:: When using a method with a parametric shape model, the first step is to **reconstruct the initial shape** using the shape model. The generated reconstructed shape is then used as initialisation for the iterative optimisation. This step takes place at each scale and it is not considered as an iteration, thus it is not counted for the provided `max_iters`. Parameters ---------- clm : :map:`CLM` or `subclass` The trained CLM model. algorithms : `list` of `class` The list of algorithm objects that will perform the fitting per scale.
62598fa47cff6e4e811b58da
class PlainPickle(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.pjc = {} <NEW_LINE> self.pjd = {} <NEW_LINE> <DEDENT> def save(self, name='params.txt'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(name, 'w') as save_file: <NEW_LINE> <INDENT> for key, value in self.pjd.items(): <NEW_LINE> <INDENT> comment_char = '#' <NEW_LINE> if value == None: <NEW_LINE> <INDENT> value = '' <NEW_LINE> <DEDENT> comment = '' <NEW_LINE> try: <NEW_LINE> <INDENT> comment = self.get_comment(key) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if comment == None: <NEW_LINE> <INDENT> comment = '' <NEW_LINE> <DEDENT> save_file.write('{}: {} {}{}\n'.format(key, value, comment_char, comment)) <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> <DEDENT> def add(self, key, value=None, comment=None): <NEW_LINE> <INDENT> self.pjd[key] = value <NEW_LINE> self.pjc[key] = comment <NEW_LINE> <DEDENT> def get_set(self, key): <NEW_LINE> <INDENT> return [self.get_value(key), self.get_comment(key)] <NEW_LINE> <DEDENT> def get_value(self, key): <NEW_LINE> <INDENT> return self.pjd[key] <NEW_LINE> <DEDENT> def get_comment(self, key): <NEW_LINE> <INDENT> return self.pjc[key] <NEW_LINE> <DEDENT> def exists(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.get_value(key) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def read(self, name='params.txt'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(name, 'r') as read_file: <NEW_LINE> <INDENT> txt = read_file.read() <NEW_LINE> lines = txt.split('\n') <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> key = None <NEW_LINE> value = None <NEW_LINE> comment = None <NEW_LINE> try: <NEW_LINE> <INDENT> parts = line.split(':',1) <NEW_LINE> key = (parts[0]).strip() <NEW_LINE> vals = (parts[1].strip()).split('#', 1) <NEW_LINE> value = (vals[0]).strip() <NEW_LINE> if value == '': <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> comment = (vals[1]).strip() <NEW_LINE> if comment == '': <NEW_LINE> <INDENT> comment = None <NEW_LINE> <DEDENT> <DEDENT> except IndexError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.add(key, value, comment) <NEW_LINE> <DEDENT> <DEDENT> return 0 <NEW_LINE> <DEDENT> except SyntaxError: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.pjc.clear() <NEW_LINE> self.pjd.clear()
A class to create and read sets of project parameters in a readable text format. The pickled files can be externally edited and re-read.
62598fa4f548e778e596b456
class Attrib(object): <NEW_LINE> <INDENT> NetType = enum(WIRELESS = 'wireless', ETHERNET = 'ethernet', PTP_WIRED = 'point-to-point-wired', PTP_WIRELESS = 'point-to-point-wireless') <NEW_LINE> MembType = enum(INTERFACE = 'interface', CHANNEL = 'channel', SWITCH = 'switch', HUB = 'hub', TUNNEL = 'tunnel', NETWORK = "network") <NEW_LINE> DevType = enum(HOST = 'host', ROUTER = 'router', SWITCH = 'switch', HUB = 'hub') <NEW_LINE> NodeType = enum(ROUTER = 'router', HOST = 'host', MDR = 'mdr', PC = 'PC', RJ45 = 'rj45') <NEW_LINE> Alias = enum(ID = "COREID")
scenario plan attribute constants
62598fa4dd821e528d6d8de6
class MaxRowReducer(MultiStatementReducer): <NEW_LINE> <INDENT> prepare_first = ( "if {0} is not None:", " %(result)s = ({0}, %(row)s)", ) <NEW_LINE> reduce = ( "if {1} is not None and {0}[0] < {1}:", " %(result)s = ({1}, %(row)s)", ) <NEW_LINE> default = None <NEW_LINE> post_conversion = GetItem(1)
Reducer which finds an item with max value of the expression and returns this item
62598fa4be8e80087fbbef14
class ResearchExperimentReplicateListFilter(FilterSet): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = ResearchExperimentReplicate <NEW_LINE> fields = {'experimentreplicate': ['exact'], 'object_id': ['exact'], 'content_type': ['exact'], } <NEW_LINE> order_by = ['experimentreplicate']
Filter query list from research experiment replicate database table
62598fa4d486a94d0ba2be80
class AnnounceThread(StoppableThread): <NEW_LINE> <INDENT> name = "Announcer" <NEW_LINE> announceInterval = 60 <NEW_LINE> def run(self): <NEW_LINE> <INDENT> lastSelfAnnounced = 0 <NEW_LINE> while not self._stopped and state.shutdown == 0: <NEW_LINE> <INDENT> processed = 0 <NEW_LINE> if lastSelfAnnounced < time.time() - self.announceInterval: <NEW_LINE> <INDENT> self.announceSelf() <NEW_LINE> lastSelfAnnounced = time.time() <NEW_LINE> <DEDENT> if processed == 0: <NEW_LINE> <INDENT> self.stop.wait(10) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def announceSelf(): <NEW_LINE> <INDENT> for connection in BMConnectionPool().udpSockets.values(): <NEW_LINE> <INDENT> if not connection.announcing: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for stream in state.streamsInWhichIAmParticipating: <NEW_LINE> <INDENT> addr = ( stream, Peer( '127.0.0.1', config.safeGetInt( 'bitmessagesettings', 'port')), time.time()) <NEW_LINE> connection.append_write_buf(assemble_addr([addr]))
A thread to manage regular announcing of this node
62598fa4009cb60464d013d7
class Insert(ValuesBase): <NEW_LINE> <INDENT> __visit_name__ = 'insert' <NEW_LINE> _supports_multi_parameters = True <NEW_LINE> def __init__(self, table, values=None, inline=False, bind=None, prefixes=None, returning=None, **kwargs): <NEW_LINE> <INDENT> ValuesBase.__init__(self, table, values, prefixes) <NEW_LINE> self._bind = bind <NEW_LINE> self.select = self.select_names = None <NEW_LINE> self.inline = inline <NEW_LINE> self._returning = returning <NEW_LINE> self.kwargs = kwargs <NEW_LINE> <DEDENT> def get_children(self, **kwargs): <NEW_LINE> <INDENT> if self.select is not None: <NEW_LINE> <INDENT> return self.select, <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return () <NEW_LINE> <DEDENT> <DEDENT> @_generative <NEW_LINE> def from_select(self, names, select): <NEW_LINE> <INDENT> if self.parameters: <NEW_LINE> <INDENT> raise exc.InvalidRequestError( "This construct already inserts value expressions") <NEW_LINE> <DEDENT> self.parameters, self._has_multi_parameters = self._process_colparams(dict((n, null()) for n in names)) <NEW_LINE> self.select_names = names <NEW_LINE> self.select = _interpret_as_select(select) <NEW_LINE> <DEDENT> def _copy_internals(self, clone=_clone, **kw): <NEW_LINE> <INDENT> self.parameters = self.parameters.copy() <NEW_LINE> if self.select is not None: <NEW_LINE> <INDENT> self.select = _clone(self.select)
Represent an INSERT construct. The :class:`.Insert` object is created using the :func:`~.expression.insert()` function. .. seealso:: :ref:`coretutorial_insert_expressions`
62598fa4aad79263cf42e68a
class FileLikeTests(unittest.TestCase): <NEW_LINE> <INDENT> def testReadFileGivenFileObject(self): <NEW_LINE> <INDENT> f = open(ct_name, 'rb') <NEW_LINE> ct = read_file(f) <NEW_LINE> got = ct.ImagePositionPatient <NEW_LINE> DS = pydicom.valuerep.DS <NEW_LINE> expected = [DS('-158.135803'), DS('-179.035797'), DS('-75.699997')] <NEW_LINE> self.assertTrue(got == expected, "ImagePosition(Patient) values not as expected") <NEW_LINE> self.assertEqual(ct.file_meta.ImplementationClassUID, '1.3.6.1.4.1.5962.2', "ImplementationClassUID not the expected value") <NEW_LINE> self.assertEqual(ct.file_meta.ImplementationClassUID, ct.file_meta[0x2, 0x12].value, "ImplementationClassUID does not match the value accessed by tag number") <NEW_LINE> got = ct.ImagePositionPatient <NEW_LINE> expected = [DS('-158.135803'), DS('-179.035797'), DS('-75.699997')] <NEW_LINE> self.assertTrue(got == expected, "ImagePosition(Patient) values not as expected") <NEW_LINE> self.assertEqual(ct.Rows, 128, "Rows not 128") <NEW_LINE> self.assertEqual(ct.Columns, 128, "Columns not 128") <NEW_LINE> self.assertEqual(ct.BitsStored, 16, "Bits Stored not 16") <NEW_LINE> self.assertEqual(len(ct.PixelData), 128 * 128 * 2, "Pixel data not expected length") <NEW_LINE> f.close() <NEW_LINE> <DEDENT> def testReadFileGivenFileLikeObject(self): <NEW_LINE> <INDENT> with open(ct_name, 'rb') as f: <NEW_LINE> <INDENT> file_like = BytesIO(f.read()) <NEW_LINE> <DEDENT> ct = read_file(file_like) <NEW_LINE> got = ct.ImagePositionPatient <NEW_LINE> DS = pydicom.valuerep.DS <NEW_LINE> expected = [DS('-158.135803'), DS('-179.035797'), DS('-75.699997')] <NEW_LINE> self.assertTrue(got == expected, "ImagePosition(Patient) values not as expected") <NEW_LINE> self.assertEqual(len(ct.PixelData), 128 * 128 * 2, "Pixel data not expected length") <NEW_LINE> file_like.close()
Test that can read DICOM files with file-like object rather than filename
62598fa43539df3088ecc167
class PdfError(LookupError): <NEW_LINE> <INDENT> pass
raise this when there's a pdf error for my app
62598fa4097d151d1a2c0eda
class ChannelType(IntEnum): <NEW_LINE> <INDENT> GUILD_TEXT = 0 <NEW_LINE> DM = 1 <NEW_LINE> GUILD_VOICE = 2 <NEW_LINE> GROUP_DM = 3 <NEW_LINE> GUILD_CATEGORY = 4 <NEW_LINE> GUILD_NEWS = 5 <NEW_LINE> GUILD_STORE = 6 <NEW_LINE> GUILD_NEWS_THREAD = 10 <NEW_LINE> GUILD_PUBLIC_THREAD = 11 <NEW_LINE> GUILD_PRIVATE_THREAD = 12 <NEW_LINE> GUILD_STAGE_VOICE = 13
An enumerable object representing the type of channels.
62598fa45fdd1c0f98e5de4b
class _OutputTextBoxWidget(QPlainTextEdit): <NEW_LINE> <INDENT> def __init__(self, master, light_up_plug, letter_group_plug): <NEW_LINE> <INDENT> super().__init__(master) <NEW_LINE> self.setPlaceholderText("Encrypted message will appear here") <NEW_LINE> self.setReadOnly(True) <NEW_LINE> self.setStyleSheet("background-color: white;") <NEW_LINE> self.__light_up_plug = light_up_plug <NEW_LINE> self.__letter_group_plug = letter_group_plug <NEW_LINE> font = QFont("Monospace", 12) <NEW_LINE> self.setFont(font) <NEW_LINE> <DEDENT> def sync_length(self, length): <NEW_LINE> <INDENT> text = letter_groups( self.toPlainText().replace(" ", "")[:length], self.__letter_group_plug() ) <NEW_LINE> self.setPlainText(text) <NEW_LINE> self.moveCursor(QTextCursor.End) <NEW_LINE> try: <NEW_LINE> <INDENT> self.__light_up_plug(self.toPlainText()[-1]) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> self.__light_up_plug("") <NEW_LINE> <DEDENT> <DEDENT> def insert(self, text): <NEW_LINE> <INDENT> text = self.toPlainText().replace(" ", "") + text <NEW_LINE> self.setPlainText(letter_groups(text, self.__letter_group_plug())) <NEW_LINE> self.__light_up_plug(text[-1]) <NEW_LINE> self.moveCursor(QTextCursor.End) <NEW_LINE> <DEDENT> def text(self): <NEW_LINE> <INDENT> return self.toPlainText()
Displays read-only text, allows synchronized scrolling and text selection
62598fa466673b3332c3027c
class PyLintCodeReviewer(CodeReviewer): <NEW_LINE> <INDENT> def __init__(self, out_stream): <NEW_LINE> <INDENT> super().__init__(out_stream) <NEW_LINE> <DEDENT> def _name(self): <NEW_LINE> <INDENT> return "PyLint Code Reviewer" <NEW_LINE> <DEDENT> def _execute_review(self, file_path, out_stream): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with Popen(['pylint.exe', file_path], stdout=PIPE, stderr=PIPE) as proc: <NEW_LINE> <INDENT> for line in io.TextIOWrapper(proc.stdout, encoding="utf-8"): <NEW_LINE> <INDENT> out_stream.write(line) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except (OSError, ValueError, SubprocessError): <NEW_LINE> <INDENT> raise EFailedToReview("error in subprocess")
A code reviewer class, which uses a PyLint tool to review python scripts.
62598fa4cc0a2c111447aec3
class Base: <NEW_LINE> <INDENT> __nb_objects = 0 <NEW_LINE> def __init__(self, id=None): <NEW_LINE> <INDENT> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Base.__nb_objects += 1 <NEW_LINE> self.id = Base.__nb_objects <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def to_json_string(list_dictionaries): <NEW_LINE> <INDENT> if list_dictionaries is None or not len(list_dictionaries): <NEW_LINE> <INDENT> return "[]" <NEW_LINE> <DEDENT> return json.dumps(list_dictionaries) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_json_string(json_string): <NEW_LINE> <INDENT> if not isinstance(json_string, str) or len(json_string) == 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return json.loads(json_string)
A `Base` class using a private class attribute `__nb_objects` to manage the public instance attribute `id` in all our future classes and to avoid duplicating the same code (by extension, same bugs).
62598fa4090684286d593635
class ReplicapoolupdaterUpdatesCancelRequest(messages.Message): <NEW_LINE> <INDENT> instanceGroupManager = messages.StringField(1, required=True) <NEW_LINE> project = messages.StringField(2, required=True) <NEW_LINE> update = messages.StringField(3, required=True) <NEW_LINE> zone = messages.StringField(4, required=True)
A ReplicapoolupdaterUpdatesCancelRequest object. Fields: instanceGroupManager: Name of the instance group manager for this request. project: Project ID for this request. update: Unique (in the context of a group) handle of an update. zone: Zone for the instance group manager.
62598fa421bff66bcd722b19
class TestRecurring: <NEW_LINE> <INDENT> def test_print_recurring_report( self, runner: CliRunner, repo_e2e: Repository ) -> None: <NEW_LINE> <INDENT> parent = RecurrentTaskFactory.create(description="D", priority=1, area="A") <NEW_LINE> repo_e2e.add(parent) <NEW_LINE> repo_e2e.commit() <NEW_LINE> expected_output = [ r".*", r" +ID +│ +Descr.* +│ +Recur +│ +RecurType +│ +Area +| +Pri +│ +Due.*", r".*", fr" +{parent.id_} +│ +{parent.description} +│ +{parent.recurrence} +│ +" fr"{parent.recurrence_type.value.title()} +│ +{parent.area} +│ +" fr"{parent.priority} +│ +{parent.due.year}.*", r".*", ] <NEW_LINE> result = runner.invoke(cli, ["recurring"]) <NEW_LINE> assert result.exit_code == 0 <NEW_LINE> assert report_prints_expected(result.stdout, expected_output, result.stderr) <NEW_LINE> <DEDENT> def test_print_recurring_report_can_specify_filter( self, runner: CliRunner, insert_parent_tasks_e2e: Tuple[List[RecurrentTask], List[Task]], repo_e2e: Repository, ) -> None: <NEW_LINE> <INDENT> parent = RecurrentTaskFactory.create( description="D", area="special", priority=1 ) <NEW_LINE> repo_e2e.add(parent) <NEW_LINE> repo_e2e.commit() <NEW_LINE> expected_output = [ r".*", r" +ID +│ +Descri.* +│ +Recur +│ +RecurType +│ +Area +| +Pri +│ +Due.*", r".*", fr" +{parent.id_} +│ +{parent.description} +│ +{parent.recurrence} +│ +" fr"{parent.recurrence_type.value.title()} +│ +{parent.area} +│ +" fr"{parent.priority} +│ +{parent.due.year}.*", r".*", ] <NEW_LINE> result = runner.invoke(cli, ["recurring", "area:special"]) <NEW_LINE> assert result.exit_code == 0 <NEW_LINE> assert report_prints_expected(result.stdout, expected_output, result.stderr)
Test the implementation of the recurring report. It's an alias to `report open`, so we only need to test that it works as expected by default and that it accepts a task filter.
62598fa445492302aabfc384
class UrlDispatcherNonRootTests(AppTestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpTree(cls): <NEW_LINE> <INDENT> cls.root = SimpleTextPage.objects.create( title="Text1", slug="sibling1", status=SimpleTextPage.PUBLISHED, author=cls.user, contents="TEST_CONTENTS", ) <NEW_LINE> <DEDENT> @override_settings(ROOT_URLCONF="fluent_pages.tests.testapp.urls_nonroot") <NEW_LINE> def test_urlconf_root(self): <NEW_LINE> <INDENT> sibling1 = Page.objects.get_for_path( "/sibling1/" ) <NEW_LINE> self.assert200("/pages/sibling1/") <NEW_LINE> self.assert404("/sibling1/") <NEW_LINE> self.assertEqual( sibling1.get_absolute_url(), "/pages/sibling1/", "UrlNode.get_absolute_url() should other URLConf root into account (got: {}).".format( sibling1.get_absolute_url() ), ) <NEW_LINE> sibling1.save() <NEW_LINE> self.assertEqual( sibling1._cached_url, "/sibling1/", "UrlNode keeps paths relative to the include()", ) <NEW_LINE> <DEDENT> @override_settings(ROOT_URLCONF="fluent_pages.tests.testapp.urls_nonroot") <NEW_LINE> def test_admin_redirect(self): <NEW_LINE> <INDENT> admin_url = "http://testserver/admin/fluent_pages/page/{pk}/change/".format( pk=self.root.pk ) <NEW_LINE> self.assertRedirects( self.client.get("/pages/sibling1/@admin"), admin_url, status_code=302, target_status_code=302, ) <NEW_LINE> self.assertRedirects( self.client.get("/pages/non-existent/@admin"), "http://testserver/pages/non-existent/", status_code=302, target_status_code=404, )
Tests for URL resolving with a non-root URL include.
62598fa410dbd63aa1c70a64
class MixtureNQExpr(nql.NeuralQueryExpression): <NEW_LINE> <INDENT> def _follow_relation_set(self, rel_expr, inverted): <NEW_LINE> <INDENT> if not self.context.is_group(rel_expr.type_name): <NEW_LINE> <INDENT> raise nql.RelationNameError(rel_expr.type_name, 'Expression type is not a relation group.') <NEW_LINE> <DEDENT> scope_qualifier = '' if inverted else '_inverse' <NEW_LINE> scope = 'follow_group_%s_%s' % (rel_expr.type_name, scope_qualifier) <NEW_LINE> with tf.name_scope(scope): <NEW_LINE> <INDENT> mixture = None <NEW_LINE> for r_id in range(self.context.get_max_id(rel_expr.type_name)): <NEW_LINE> <INDENT> r_name = self.context.get_entity_name(r_id, rel_expr.type_name) <NEW_LINE> addend = self._follow_named_rel(r_name, inverted) * rel_expr.tf[:, r_id] <NEW_LINE> if mixture is None: <NEW_LINE> <INDENT> mixture = addend <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mixture = mixture + addend <NEW_LINE> <DEDENT> <DEDENT> g = self.context.get_group(rel_expr.type_name) <NEW_LINE> output_type = self.context.get_range(g.object_rel) <NEW_LINE> return self.context.as_nql(mixture, output_type)
Implements x.follow(r) as sum_i r[i] x.dot(M_i). Here r[i] is scalar weight of relation i in vector r, M_i is sparse matrix for relation i, and x.dot(M_i) is vector-matrix product. This is the 'late mixing' method.
62598fa4fff4ab517ebcd698
class DeviceResource(Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return map(Device.to_json, Device.query.all())
The device resource handles API requests relating to robomussel data. Will output all device information.
62598fa4627d3e7fe0e06d60
class Solution: <NEW_LINE> <INDENT> def findFirstBadVersion(self, n): <NEW_LINE> <INDENT> pass
@param n: An integer @return: An integer which is the first bad version.
62598fa44a966d76dd5eed96
class EnCat(PetCat): <NEW_LINE> <INDENT> def eat(self): <NEW_LINE> <INDENT> print("英短啥都吃")
英国短毛猫
62598fa4851cf427c66b817d