code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class PassInfo(JsonDeserializable): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def de_json(cls, json_type): <NEW_LINE> <INDENT> obj = cls.check_json(json_type) <NEW_LINE> last_pass_change = None <NEW_LINE> next_pass_change = None <NEW_LINE> password_used = obj['passwordUsed'] <NEW_LINE> if password_used: <NEW_LINE> <INDENT> last_pass_change = None <NEW_LINE> next_pass_change = None <NEW_LINE> if obj['lastPassChange']: <NEW_LINE> <INDENT> last_pass_change = cls.decode_date(obj['lastPassChange']) <NEW_LINE> <DEDENT> if obj['nextPassChange']: <NEW_LINE> <INDENT> next_pass_change = cls.decode_date(obj['nextPassChange']) <NEW_LINE> <DEDENT> <DEDENT> return cls(last_pass_change, next_pass_change, password_used, obj) <NEW_LINE> <DEDENT> def __init__(self, last_pass_change, next_pass_change, password_used, obj): <NEW_LINE> <INDENT> self.raw = obj <NEW_LINE> self.last_pass_change = last_pass_change <NEW_LINE> self.next_pass_change = next_pass_change <NEW_LINE> self.password_used = password_used
Данные о пароле к сайту qiwi.com Attributes ---------- last_pass_change : str Дата/время последнего изменения пароля сайта qiwi.com next_pass_change : str Дата/время следующего (планового) изменения пароля сайта qiwi.com password_used : bool Логический признак использования пароля (фактически означает, что пользователь заходит на сайт)
62598fc057b8e32f5250823d
class NavServer(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=20) <NEW_LINE> host = models.CharField(max_length=20) <NEW_LINE> port = models.IntegerField(default=0) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Holds the IP and Port of the available Nav Servers
62598fc04f88993c371f0629
class processer(ProcesserBase): <NEW_LINE> <INDENT> seq = "p66" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> ProcesserBase.__init__(self) <NEW_LINE> <DEDENT> def process(self, data): <NEW_LINE> <INDENT> is_lose = Process66Main._is_car_title_lose(data['car_title']) <NEW_LINE> if is_lose: <NEW_LINE> <INDENT> dr.insert_data(data) <NEW_LINE> self.logger.debug("(%s) Item ignore, lose car_title." % data['domain']) <NEW_LINE> return None <NEW_LINE> <DEDENT> is_lose = Process66Main._is_car_brand_car_series_lose(data['car_brand'], data['car_series']) <NEW_LINE> if is_lose: <NEW_LINE> <INDENT> dr.insert_data(data) <NEW_LINE> self.logger.debug("(%s) Item ignore, lose car_series." % data['domain']) <NEW_LINE> return None <NEW_LINE> <DEDENT> is_lose = Process66Main._is_car_price_lose(data['car_price']) <NEW_LINE> if is_lose: <NEW_LINE> <INDENT> dr.insert_data(data) <NEW_LINE> self.logger.debug("(%s) Item ignore, lose car_price." % data['domain']) <NEW_LINE> return None <NEW_LINE> <DEDENT> is_lose = Process66Main._is_contact_phone_contact_mobile_contact_mail_contact_qq_lose(data['contact_phone'], data['contact_mobile'], data['contact_mail'], data['contact_qq']) <NEW_LINE> if is_lose: <NEW_LINE> <INDENT> dr.insert_data(data) <NEW_LINE> self.logger.debug("(%s) Item ignore, lose contact." % data['domain']) <NEW_LINE> return None <NEW_LINE> <DEDENT> is_lose = Process66Main._is_source_province_source_zone_lose(data['source_province'], data['source_zone']) <NEW_LINE> if is_lose: <NEW_LINE> <INDENT> dr.insert_data(data) <NEW_LINE> self.logger.debug("(%s) Item ignore, lose source." % data['domain']) <NEW_LINE> return None <NEW_LINE> <DEDENT> time.sleep(0.08) <NEW_LINE> return data
主要字段的规整化、涉及到分词提取
62598fc063d6d428bbee29f1
class itkMaskImageFilterICVF33IUL3ICVF33(itkMaskImageFilterICVF33IUL3ICVF33_Superclass): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> MaskEqualityComparableCheck = _itkMaskImageFilterPython.itkMaskImageFilterICVF33IUL3ICVF33_MaskEqualityComparableCheck <NEW_LINE> InputConvertibleToOutputCheck = _itkMaskImageFilterPython.itkMaskImageFilterICVF33IUL3ICVF33_InputConvertibleToOutputCheck <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkMaskImageFilterPython.itkMaskImageFilterICVF33IUL3ICVF33___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def SetOutsideValue(self, *args): <NEW_LINE> <INDENT> return _itkMaskImageFilterPython.itkMaskImageFilterICVF33IUL3ICVF33_SetOutsideValue(self, *args) <NEW_LINE> <DEDENT> def GetOutsideValue(self): <NEW_LINE> <INDENT> return _itkMaskImageFilterPython.itkMaskImageFilterICVF33IUL3ICVF33_GetOutsideValue(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkMaskImageFilterPython.delete_itkMaskImageFilterICVF33IUL3ICVF33 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkMaskImageFilterPython.itkMaskImageFilterICVF33IUL3ICVF33_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkMaskImageFilterPython.itkMaskImageFilterICVF33IUL3ICVF33_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkMaskImageFilterICVF33IUL3ICVF33.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
Proxy of C++ itkMaskImageFilterICVF33IUL3ICVF33 class
62598fc02c8b7c6e89bd3a01
class UpSampling1D(ZooKerasLayer): <NEW_LINE> <INDENT> def __init__(self, length=2, input_shape=None, **kwargs): <NEW_LINE> <INDENT> super(UpSampling1D, self).__init__(None, length, list(input_shape) if input_shape else None, **kwargs)
UpSampling layer for 1D inputs. Repeats each temporal step 'length' times along the time axis. The input of this layer should be 3D. When you use this layer as the first layer of a model, you need to provide the argument input_shape (a shape tuple, does not include the batch dimension). # Arguments length: Int. UpSampling factor. Default is 2. input_shape: A shape tuple, not including batch. name: String to set the name of the layer. If not specified, its name will by default to be a generated string. >>> upsampling1d = UpSampling1D(length=3, input_shape=(3, 12)) creating: createZooKerasUpSampling1D
62598fc0d486a94d0ba2c210
class LatchingBehaviour(ImmediateBehaviour): <NEW_LINE> <INDENT> def release_immediate(self, component, mode): <NEW_LINE> <INDENT> component.pop_unselected_modes() <NEW_LINE> <DEDENT> def release_delayed(self, component, mode): <NEW_LINE> <INDENT> pop_last_mode(component, mode)
Behaviour that will jump back to the previous mode when the button is released after having been held for some time. If the button is quickly pressed, the selected mode will stay.
62598fc0d8ef3951e32c7f7d
class PasswordStoreError(Exception): <NEW_LINE> <INDENT> pass
Error in the execution of password store.
62598fc07047854f4633f614
class Checkpoint(Callback): <NEW_LINE> <INDENT> def __init__( self, target='model.pt', monitor='valid_loss_best', ): <NEW_LINE> <INDENT> self.monitor = monitor <NEW_LINE> self.target = target <NEW_LINE> <DEDENT> def on_epoch_end(self, net, **kwargs): <NEW_LINE> <INDENT> if self.monitor is None: <NEW_LINE> <INDENT> do_checkpoint = True <NEW_LINE> <DEDENT> elif callable(self.monitor): <NEW_LINE> <INDENT> do_checkpoint = self.monitor(net) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> do_checkpoint = net.history[-1, self.monitor] <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise SkorchException( "Monitor value '{}' cannot be found in history. " "Make sure you have validation data if you use " "validation scores for checkpointing.".format(e.args[0])) <NEW_LINE> <DEDENT> <DEDENT> if do_checkpoint: <NEW_LINE> <INDENT> target = self.target <NEW_LINE> if isinstance(self.target, str): <NEW_LINE> <INDENT> target = self.target.format( net=net, last_epoch=net.history[-1], last_batch=net.history[-1, 'batches', -1], ) <NEW_LINE> <DEDENT> if net.verbose > 0: <NEW_LINE> <INDENT> print("Checkpoint! Saving model to {}.".format(target)) <NEW_LINE> <DEDENT> net.save_params(target)
Save the model during training if the given metric improved. This callback works by default in conjunction with the validation scoring callback since it creates a ``valid_loss_best`` value in the history which the callback uses to determine if this epoch is save-worthy. You can also specify your own metric to monitor or supply a callback that dynamically evaluates whether the model should be saved in this epoch. Example: >>> net = MyNet(callbacks=[Checkpoint()]) >>> net.fit(X, y) Example using a custom monitor where only models are saved in epochs where the validation *and* the train loss is best: >>> monitor = lambda net: all(net.history[-1, ( ... 'train_loss_best', 'valid_loss_best')]) >>> net = MyNet(callbacks=[Checkpoint(monitor=monitor)]) >>> net.fit(X, y) Parameters ---------- target : file-like object, str File path to the file or file-like object. See NeuralNet.save_params for details what this value may be. If the value is a string you can also use format specifiers to, for example, indicate the current epoch. Accessible format values are ``net``, ``last_epoch`` and ``last_batch``. Example to include last epoch number in file name: >>> cb = Checkpoint(target="target_{last_epoch[epoch]}.pt") monitor : str, function, None Value of the history to monitor or callback that determines whether this epoch should to a checkpoint. The callback takes the network instance as parameter. In case ``monitor`` is set to ``None``, the callback will save the network at every epoch. **Note:** If you supply a lambda expression as monitor, you cannot pickle the wrapper anymore as lambdas cannot be pickled. You can mitigate this problem by using importable functions instead.
62598fc05fdd1c0f98e5e1d3
class TimeDeltaParameter(Parameter): <NEW_LINE> <INDENT> def _apply_regex(self, regex, input): <NEW_LINE> <INDENT> from datetime import timedelta <NEW_LINE> import re <NEW_LINE> re_match = re.match(regex, input) <NEW_LINE> if re_match: <NEW_LINE> <INDENT> kwargs = {} <NEW_LINE> has_val = False <NEW_LINE> for k, v in six.iteritems(re_match.groupdict(default="0")): <NEW_LINE> <INDENT> val = int(v) <NEW_LINE> has_val = has_val or val != 0 <NEW_LINE> kwargs[k] = val <NEW_LINE> <DEDENT> if has_val: <NEW_LINE> <INDENT> return timedelta(**kwargs) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _parseIso8601(self, input): <NEW_LINE> <INDENT> def field(key): <NEW_LINE> <INDENT> return "(?P<%s>\d+)%s" % (key, key[0].upper()) <NEW_LINE> <DEDENT> def optional_field(key): <NEW_LINE> <INDENT> return "(%s)?" % field(key) <NEW_LINE> <DEDENT> regex = "P(%s|%s(T%s)?)" % (field("weeks"), optional_field("days"), "".join([optional_field(key) for key in ["hours", "minutes", "seconds"]])) <NEW_LINE> return self._apply_regex(regex, input) <NEW_LINE> <DEDENT> def _parseSimple(self, input): <NEW_LINE> <INDENT> keys = ["weeks", "days", "hours", "minutes", "seconds"] <NEW_LINE> regex = "".join(["((?P<%s>\d+) ?%s(%s)?(%s)? ?)?" % (k, k[0], k[1:-1], k[-1]) for k in keys]) <NEW_LINE> return self._apply_regex(regex, input) <NEW_LINE> <DEDENT> def parse(self, input): <NEW_LINE> <INDENT> result = self._parseIso8601(input) <NEW_LINE> if not result: <NEW_LINE> <INDENT> result = self._parseSimple(input) <NEW_LINE> <DEDENT> if result: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ParameterException("Invalid time delta - could not parse %s" % input)
Class that maps to timedelta using strings in any of the following forms: * ``n {w[eek[s]]|d[ay[s]]|h[our[s]]|m[inute[s]|s[second[s]]}`` (e.g. "1 week 2 days" or "1 h") Note: multiple arguments must be supplied in longest to shortest unit order * ISO 8601 duration ``PnDTnHnMnS`` (each field optional, years and months not supported) * ISO 8601 duration ``PnW`` See https://en.wikipedia.org/wiki/ISO_8601#Durations
62598fc0f9cc0f698b1c53f0
class Error(Exception): <NEW_LINE> <INDENT> pass
wrong parameter
62598fc0f548e778e596b7e1
class CSDM_Leaf_Dynamics(SimulationObject): <NEW_LINE> <INDENT> class Parameters(ParamTemplate): <NEW_LINE> <INDENT> CSDM_MAX = Float() <NEW_LINE> CSDM_MIN = Float() <NEW_LINE> CSDM_A = Float() <NEW_LINE> CSDM_B = Float() <NEW_LINE> CSDM_T1 = Float() <NEW_LINE> CSDM_T2 = Float() <NEW_LINE> <DEDENT> class StateVariable(StatesTemplate): <NEW_LINE> <INDENT> LAI = Float() <NEW_LINE> DAYNR = Int() <NEW_LINE> LAIMAX = Float() <NEW_LINE> <DEDENT> def _CSDM(self, daynr): <NEW_LINE> <INDENT> p = self.params <NEW_LINE> LAI_growth = 1./(1. + exp(-p.CSDM_B*(daynr - p.CSDM_T1)))**2 <NEW_LINE> LAI_senescence = -exp(p.CSDM_A*(daynr - p.CSDM_T2)) <NEW_LINE> LAI = p.CSDM_MIN + p.CSDM_MAX*(LAI_growth + LAI_senescence) <NEW_LINE> if LAI < p.CSDM_MIN: <NEW_LINE> <INDENT> msg = ("LAI of CSDM model smaller then lower LAI limit "+ "(CSDM_MIN)! Adjusting LAI to CSDM_MIN.") <NEW_LINE> self.logger.warn(msg) <NEW_LINE> LAI = max(p.CSDM_MIN, LAI) <NEW_LINE> <DEDENT> return LAI <NEW_LINE> <DEDENT> def initialize(self, day, kiosk, parvalues): <NEW_LINE> <INDENT> self.params = self.Parameters(parvalues) <NEW_LINE> LAI = self._CSDM(1) <NEW_LINE> self.states = self.StateVariable(kiosk, LAI=LAI, DAYNR=1, LAIMAX=self.params.CSDM_MIN, publish="LAI") <NEW_LINE> <DEDENT> @prepare_rates <NEW_LINE> def calc_rates(self, day, drv): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @prepare_states <NEW_LINE> def integrate(self, day, delt=1.0): <NEW_LINE> <INDENT> self.states.DAYNR += 1 <NEW_LINE> self.states.LAI = self._CSDM(self.states.DAYNR) <NEW_LINE> if self.states.LAI > self.states.LAIMAX: <NEW_LINE> <INDENT> self.states.LAIMAX = self.states.LAI <NEW_LINE> <DEDENT> if self.states.DAYNR > self.params.CSDM_T2: <NEW_LINE> <INDENT> self._send_signal(signal=signals.crop_finish, day=day, finish_type="Canopy died according to CSDM leaf model.", crop_delete=True)
Leaf dynamics according to the Canopy Structure Dynamic Model. The only difference is that in the real CSDM the temperature sum is the driving variable, while in this case it is simply the day number since the start of the model. Reference: Koetz et al. 2005. Use of coupled canopy structure dynamic and radiative transfer models to estimate biophysical canopy characteristics. Remote Sensing of Environment. Volume 95, Issue 1, 15 March 2005, Pages 115-124. http://dx.doi.org/10.1016/j.rse.2004.11.017
62598fc03317a56b869be670
class DescribeRelatedIngressesRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.EnvironmentId = None <NEW_LINE> self.ClusterNamespace = None <NEW_LINE> self.SourceChannel = None <NEW_LINE> self.ApplicationId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.EnvironmentId = params.get("EnvironmentId") <NEW_LINE> self.ClusterNamespace = params.get("ClusterNamespace") <NEW_LINE> self.SourceChannel = params.get("SourceChannel") <NEW_LINE> self.ApplicationId = params.get("ApplicationId") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
DescribeRelatedIngresses请求参数结构体
62598fc07c178a314d78d6e0
class VSarsa(ParallelAgent): <NEW_LINE> <INDENT> def __init__(self, q, policy, discount_factor=0.99): <NEW_LINE> <INDENT> self.q = q <NEW_LINE> self.policy = policy <NEW_LINE> self.discount_factor = discount_factor <NEW_LINE> self._state = None <NEW_LINE> self._action = None <NEW_LINE> <DEDENT> def act(self, state): <NEW_LINE> <INDENT> action = self.policy.no_grad(state) <NEW_LINE> self._train(state.reward, state, action) <NEW_LINE> self._state = state <NEW_LINE> self._action = action <NEW_LINE> return action <NEW_LINE> <DEDENT> def eval(self, state): <NEW_LINE> <INDENT> return self.policy.eval(state) <NEW_LINE> <DEDENT> def _train(self, reward, next_state, next_action): <NEW_LINE> <INDENT> if self._state: <NEW_LINE> <INDENT> value = self.q(self._state, self._action) <NEW_LINE> target = reward + self.discount_factor * self.q.target(next_state, next_action) <NEW_LINE> loss = mse_loss(value, target) <NEW_LINE> self.q.reinforce(loss)
Vanilla SARSA (VSarsa). SARSA (State-Action-Reward-State-Action) is an on-policy alternative to Q-learning. Unlike Q-learning, SARSA attempts to learn the Q-function for the current policy rather than the optimal policy. This approach is more stable but may not result in the optimal policy. However, this problem can be mitigated by decaying the exploration rate over time. Args: q (QNetwork): An Approximation of the Q function. policy (GreedyPolicy): A policy derived from the Q-function. discount_factor (float): Discount factor for future rewards.
62598fc05166f23b2e243621
class RFLight(Light): <NEW_LINE> <INDENT> def __init__(self, location: str, uid: str, room: str, gpio: int, code_on: int, code_off: int) -> None: <NEW_LINE> <INDENT> super(RFLight, self).__init__(location, uid, room) <NEW_LINE> self._gpio = gpio <NEW_LINE> self._code_on = code_on <NEW_LINE> self._code_off = code_off <NEW_LINE> <DEDENT> def status_setter(self, status: dict): <NEW_LINE> <INDENT> logging.debug('Enable TX for device \'%s\'...' % self.uid) <NEW_LINE> device = rpi_rf.RFDevice(self._gpio) <NEW_LINE> enabled = device.enable_tx() <NEW_LINE> if not enabled: <NEW_LINE> <INDENT> logging.error('Failed to enable TX for \'%s\'' % self.uid) <NEW_LINE> <DEDENT> if status['state']: <NEW_LINE> <INDENT> device.tx_code(self._code_on) <NEW_LINE> <DEDENT> elif not status['state']: <NEW_LINE> <INDENT> device.tx_code(self._code_off) <NEW_LINE> <DEDENT> device.cleanup() <NEW_LINE> del(device) <NEW_LINE> super(RFLight, self).status_setter(status)
A RF 433 mHz light service. Provide a light service for lights plugged into a RF 433 mHz controllable power socket. This class uses the `rpi-rf <https://github.com/milaq/rpi-rf>`_ module to control a RF 433 mHz transmitter.
62598fc063d6d428bbee29f3
class CDU_ConvRepIndexing(object): <NEW_LINE> <INDENT> def __init__(self, dsz, S, dimK=None, dimN=2): <NEW_LINE> <INDENT> ds = DictionarySize(dsz, dimN) <NEW_LINE> self.dimCd = ds.ndim - dimN - 1 <NEW_LINE> self.Cd = ds.nchn <NEW_LINE> self.M = ds.nflt <NEW_LINE> self.dsz = dsz <NEW_LINE> if dimK is None: <NEW_LINE> <INDENT> rdim = S.ndim - dimN <NEW_LINE> if rdim == 0: <NEW_LINE> <INDENT> (dimC, dimK) = (0, 0) <NEW_LINE> <DEDENT> elif rdim == 1: <NEW_LINE> <INDENT> dimC = self.dimCd <NEW_LINE> dimK = S.ndim - dimN - dimC <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (dimC, dimK) = (1, 1) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> dimC = S.ndim - dimN - dimK <NEW_LINE> <DEDENT> self.dimN = dimN <NEW_LINE> self.dimC = dimC <NEW_LINE> self.dimK = dimK <NEW_LINE> if self.dimC == 1: <NEW_LINE> <INDENT> self.C = S.shape[dimN] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.C = 1 <NEW_LINE> <DEDENT> self.Cx = self.C - self.Cd + 1 <NEW_LINE> if self.Cd > 1 and self.C != self.Cd: <NEW_LINE> <INDENT> raise ValueError("Multi-channel dictionary with signal with " "mismatched number of channels (Cd=%d, C=%d)" % (self.Cd, self.C)) <NEW_LINE> <DEDENT> if self.dimK == 1: <NEW_LINE> <INDENT> self.K = S.shape[self.dimN + self.dimC] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.K = 1 <NEW_LINE> <DEDENT> self.Nv = S.shape[0:dimN] <NEW_LINE> self.N = np.prod(np.array(self.Nv)) <NEW_LINE> self.axisN = tuple(range(0, dimN)) <NEW_LINE> self.axisC = dimN <NEW_LINE> self.axisK = dimN + 1 <NEW_LINE> self.axisM = dimN + 2 <NEW_LINE> self.shpD = self.Nv + (self.Cd,) + (1,) + (self.M,) <NEW_LINE> self.shpS = self.Nv + (self.C,) + (self.K,) + (1,) <NEW_LINE> self.shpX = self.Nv + (self.Cx,) + (self.K,) + (self.M,) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return pprint.pformat(vars(self))
Manage the inference of problem dimensions and the roles of :class:`numpy.ndarray` indices for convolutional representations in convolutional dictionary update problems (e.g. :class:`.ConvCnstrMODBase` and derived classes).
62598fc0aad79263cf42ea16
class Ssh(object): <NEW_LINE> <INDENT> def __init__(self, host, port=None, user=None, password=None, private_key=None): <NEW_LINE> <INDENT> ssh_port = 22 <NEW_LINE> if port is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ssh_port = int(port) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise TypeError("ssh port should be an integer") <NEW_LINE> <DEDENT> <DEDENT> self.log = logging.getLogger("niav") <NEW_LINE> self.connection = None <NEW_LINE> self.host = host <NEW_LINE> self.port = ssh_port <NEW_LINE> self.user = "root" if user is None else user <NEW_LINE> self.password = password <NEW_LINE> self.private_key = private_key <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> if self.connection is None: <NEW_LINE> <INDENT> self.log.debug("opening connection to '%s'" % self.host) <NEW_LINE> self.connection = paramiko.SSHClient() <NEW_LINE> self.connection.set_missing_host_key_policy(paramiko.AutoAddPolicy()) <NEW_LINE> self.connection.connect(self.host, port=self.port, username=self.user, password=self.password, key_filename=self.private_key) <NEW_LINE> <DEDENT> self.log.debug("connected to '%s'" % self.host) <NEW_LINE> <DEDENT> def execute(self, cmd, encoding="utf-8"): <NEW_LINE> <INDENT> self.connect() <NEW_LINE> self.log.debug("executing command '%s'" % cmd.encode(encoding)) <NEW_LINE> c_stdin, c_stdout, c_stderr = self.connection.exec_command(cmd) <NEW_LINE> exit_code = c_stdout.channel.recv_exit_status() <NEW_LINE> stdout = c_stdout.read().decode(encoding) <NEW_LINE> stderr = c_stderr.read().decode(encoding) <NEW_LINE> self.log.debug("exit code: %s" % exit_code) <NEW_LINE> self.log.debug("stdout: %s" % stdout) <NEW_LINE> self.log.debug("stderr: %s" % stderr) <NEW_LINE> return stdout, stderr, exit_code <NEW_LINE> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> if self.connection: <NEW_LINE> <INDENT> self.connection.close() <NEW_LINE> <DEDENT> <DEDENT> def sftp_get(self, remote_path, local_path): <NEW_LINE> <INDENT> self.connect() <NEW_LINE> self.log.debug("copying file '%s:%s' to '%s'" % (self.host, local_path, remote_path)) <NEW_LINE> sftp = self.connection.open_sftp() <NEW_LINE> sftp.get(remote_path, local_path) <NEW_LINE> sftp.close() <NEW_LINE> <DEDENT> def sftp_put(self, local_path, remote_path): <NEW_LINE> <INDENT> self.connect() <NEW_LINE> self.log.debug("copying file '%s' to '%s:%s'" % (local_path, self.host, remote_path)) <NEW_LINE> sftp = self.connection.open_sftp() <NEW_LINE> sftp.put(local_path, remote_path) <NEW_LINE> sftp.close()
SSH utilities - Execute commands on remote host. - Copy files from and to remote host.
62598fc07cff6e4e811b5c65
class jsonparser(object): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> file_path = config.abs_dir <NEW_LINE> f = open(file_path + "json/sfc/"+filename) <NEW_LINE> self.req = json.load(f) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> def get_vnf_list(self): <NEW_LINE> <INDENT> return [vnf["name"] for vnf in self.req["VNF"]] <NEW_LINE> <DEDENT> def get_constrain_list(self): <NEW_LINE> <INDENT> return [[cons["former"], cons["later"]] for cons in self.req["constrain"]] <NEW_LINE> <DEDENT> def get_sfc_name(self): <NEW_LINE> <INDENT> return self.req["name"] <NEW_LINE> <DEDENT> def get_sfc_operation(self): <NEW_LINE> <INDENT> return self.req["operation"] <NEW_LINE> <DEDENT> def get_vnf_by_name(self, name): <NEW_LINE> <INDENT> for vnf in self.req["VNF"]: <NEW_LINE> <INDENT> if vnf["name"] == name: <NEW_LINE> <INDENT> return vnf <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_sfc_load(self): <NEW_LINE> <INDENT> total = {"cpu": 0, "memory": 0, "disk": 0} <NEW_LINE> for vnf in self.req["VNF"]: <NEW_LINE> <INDENT> total["cpu"] += vnf["flavor"]["cpu"] <NEW_LINE> total["memory"] += vnf["flavor"]["memory"] <NEW_LINE> total["disk"] += vnf["flavor"]["disk"] <NEW_LINE> <DEDENT> return total <NEW_LINE> <DEDENT> def get_qos(self): <NEW_LINE> <INDENT> return self.req["QoS"] <NEW_LINE> <DEDENT> def get_sfc_objective(self): <NEW_LINE> <INDENT> return self.req["objective"]
The class is used to parse json file.
62598fc099fddb7c1ca62f0d
class Dojo: <NEW_LINE> <INDENT> arg_parser = configargparse.get_argument_parser() <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> args = self.arg_parser.parse_known_args()[0] <NEW_LINE> self.verbose = args.verbose <NEW_LINE> <DEDENT> def train(self, blueprint=None, automatic_search=True, models=None, minimum_score=0.7): <NEW_LINE> <INDENT> if not blueprint: <NEW_LINE> <INDENT> print("Required blueprint csv. argument value is missing, nothing to do here.") <NEW_LINE> return <NEW_LINE> <DEDENT> print('Loading dataset') <NEW_LINE> df_pair_groups = self.load_blueprint(blueprint) <NEW_LINE> for pair, df in df_pair_groups: <NEW_LINE> <INDENT> pair_models = self.train_pair(pair, df, automatic_search, models, minimum_score) <NEW_LINE> print(pair) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def train_pair(pair, df, automatic_search, models, minimum_score): <NEW_LINE> <INDENT> print('Training model for pair:', pair) <NEW_LINE> return pair, None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def load_blueprint(blueprint_file): <NEW_LINE> <INDENT> print('Loading dataset') <NEW_LINE> df = pd.read_csv(blueprint_file) <NEW_LINE> df_pair_groups = df.groupby(['pair']) <NEW_LINE> pairs_names = list(df_pair_groups.groups.keys()) <NEW_LINE> print('Training total of: ', len(pairs_names), 'pairs and ', df.shape[0], 'records') <NEW_LINE> return df_pair_groups
Main module responsible for training
62598fc03346ee7daa337769
class AlertResource(Resource): <NEW_LINE> <INDENT> method_decorators = [user_alert_access, login_required] <NEW_LINE> def get(self, alert_id, company_id=None): <NEW_LINE> <INDENT> alert = Alert.query.filter(Alert.uid == alert_id).filter(Alert.deleted == False).first() <NEW_LINE> data, errors = AlertSchema().dump(alert) <NEW_LINE> return data <NEW_LINE> <DEDENT> def put(self, alert_id, company_id=None): <NEW_LINE> <INDENT> data, errors = AlertSchema().load(request.get_json()) <NEW_LINE> _LOGGER.debug(data) <NEW_LINE> if errors: <NEW_LINE> <INDENT> return errors, 422 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> alert = Alert.query.filter(Alert.uid == alert_id).update(data) <NEW_LINE> db.session.commit() <NEW_LINE> return {'status': alert} <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> _LOGGER.error(e) <NEW_LINE> return {'message': 'Internal Server Error', 'code': 500}, 500 <NEW_LINE> <DEDENT> <DEDENT> def delete(self, alert_id, company_id=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> alert = Alert.query.filter(Alert.uid == alert_id).filter(Alert.deleted == False).first() <NEW_LINE> alert.deleted = True <NEW_LINE> db.session.add(alert) <NEW_LINE> db.session.commit() <NEW_LINE> add_event_log(company_id=company_id, alert_id=alert.uid, log='Alert deleted: {}'.format(alert.name)) <NEW_LINE> return {}, 204 <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> db.session.rollback() <NEW_LINE> _LOGGER.error(e) <NEW_LINE> return {"error": "Server Error"}, 500
Alert Resource
62598fc0fff4ab517ebcda27
class CASBackend(object): <NEW_LINE> <INDENT> supports_object_permissions = False <NEW_LINE> supports_anonymous_user = False <NEW_LINE> supports_inactive_user = False <NEW_LINE> def authenticate(self, ticket, service): <NEW_LINE> <INDENT> username = _verify(ticket, service) <NEW_LINE> if not username: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(username__iexact=username) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> user = User.objects.create_user(username, '') <NEW_LINE> use_site = Permission.objects.get( codename='psu_gcal' ) <NEW_LINE> user.user_permissions.remove( use_site ) <NEW_LINE> user.save() <NEW_LINE> <DEDENT> return user <NEW_LINE> <DEDENT> def get_user(self, user_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return User.objects.get(pk=user_id) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None
CAS authentication backend
62598fc07d847024c075c5fe
class GithubInlineWriter(GithubWriter): <NEW_LINE> <INDENT> def handle_pylint_error(self, path, line, code, message): <NEW_LINE> <INDENT> comments = self.github.get_review_comments(code, path) <NEW_LINE> if message in [c.body for c in comments]: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.github.create_review_comment(code, path, message) <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> pass
Writes inline comments with the pylint errors found in the analyzed files
62598fc055399d3f05626758
class CustomUser(AbstractUser, BaseModel, UserManager): <NEW_LINE> <INDENT> username = None <NEW_LINE> phone = models.CharField(_('phone number'), max_length=10, unique=True) <NEW_LINE> USERNAME_FIELD = 'phone' <NEW_LINE> REQUIRED_FIELDS = [] <NEW_LINE> def _create_user(self, phone, password=None, **extra_fields): <NEW_LINE> <INDENT> if not phone: <NEW_LINE> <INDENT> raise ValueError('The given phone must be set') <NEW_LINE> <DEDENT> self.phone=phone <NEW_LINE> user = self.model(phone=phone, **extra_fields) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_user(self, phone, password=None, **extra_fields): <NEW_LINE> <INDENT> extra_fields.setdefault('is_staff', False) <NEW_LINE> extra_fields.setdefault('is_superuser', False) <NEW_LINE> return self._create_user(phone, password, **extra_fields) <NEW_LINE> <DEDENT> def create_superuser(self, phone, password=None, **extra_fields): <NEW_LINE> <INDENT> extra_fields.setdefault('is_staff', True) <NEW_LINE> extra_fields.setdefault('is_superuser', True) <NEW_LINE> if extra_fields.get('is_staff') is not True: <NEW_LINE> <INDENT> raise ValueError('Superuser must have is_staff=True.') <NEW_LINE> <DEDENT> if extra_fields.get('is_superuser') is not True: <NEW_LINE> <INDENT> raise ValueError('Superuser must have is_superuser=True.') <NEW_LINE> <DEDENT> return self._create_user(phone, password, **extra_fields )
Model for user with no username field
62598fc04f6381625f1995e3
class FileHandleFactory(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create_file_handle(filename, mode): <NEW_LINE> <INDENT> folder = os.path.dirname(filename) <NEW_LINE> if not os.path.exists(folder): <NEW_LINE> <INDENT> os.makedirs(folder) <NEW_LINE> <DEDENT> return open(filename, mode)
Creates file handles, allowing for abstraction to virtual files
62598fc0656771135c4898b2
class TestWritableSite(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testWritableSite(self): <NEW_LINE> <INDENT> pass
WritableSite unit test stubs
62598fc04a966d76dd5ef117
class ColortermOverrideTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> colortermsFile = os.path.join(os.path.dirname(__file__), "../config", "colorterms.py") <NEW_LINE> self.photoCalConf = photoCal.PhotoCalConfig() <NEW_LINE> self.photoCalConf.colorterms.load(colortermsFile) <NEW_LINE> <DEDENT> def testHscColorterms(self): <NEW_LINE> <INDENT> hscReferenceFilters = ["g", "r", "i", "z", "y"] <NEW_LINE> hscPhysicalFilters = ["HSC-G", "HSC-R", "HSC-I", "HSC-Z", "HSC-Y"] <NEW_LINE> for filter in hscPhysicalFilters: <NEW_LINE> <INDENT> ct = self.photoCalConf.colorterms.getColorterm(filter, photoCatName="hsc") <NEW_LINE> self.assertIn(ct.primary, hscReferenceFilters) <NEW_LINE> self.assertIn(ct.secondary, hscReferenceFilters) <NEW_LINE> self.assertIsInstance(ct.c0, numbers.Number) <NEW_LINE> self.assertIsInstance(ct.c1, numbers.Number) <NEW_LINE> self.assertIsInstance(ct.c2, numbers.Number) <NEW_LINE> <DEDENT> <DEDENT> def testSdssColorterms(self): <NEW_LINE> <INDENT> sdssReferenceFilters = ["g", "r", "i", "z", "y"] <NEW_LINE> hscPhysicalFilters = ["HSC-G", "HSC-R", "HSC-I", "HSC-I2", "HSC-Z", "HSC-Y", "NB0816", "NB0921"] <NEW_LINE> for filter in hscPhysicalFilters: <NEW_LINE> <INDENT> ct = self.photoCalConf.colorterms.getColorterm(filter, photoCatName="sdss") <NEW_LINE> self.assertIn(ct.primary, sdssReferenceFilters) <NEW_LINE> self.assertIn(ct.secondary, sdssReferenceFilters) <NEW_LINE> self.assertIsInstance(ct.c0, numbers.Number) <NEW_LINE> self.assertIsInstance(ct.c1, numbers.Number) <NEW_LINE> self.assertIsInstance(ct.c2, numbers.Number) <NEW_LINE> <DEDENT> <DEDENT> def testPs1Colorterms(self): <NEW_LINE> <INDENT> ps1ReferenceFilters = ["g", "r", "i", "z", "y"] <NEW_LINE> hscPhysicalFilters = ["HSC-G", "HSC-R", "HSC-R2", "HSC-I", "HSC-I2", "HSC-Z", "HSC-Y", "IB0945", "NB0387", "NB0468", "NB0515", "NB0527", "NB0656", "NB0718", "NB0816", "NB0921", "NB0973", "NB01010"] <NEW_LINE> for filter in hscPhysicalFilters: <NEW_LINE> <INDENT> ct = self.photoCalConf.colorterms.getColorterm(filter, photoCatName="ps1") <NEW_LINE> self.assertIn(ct.primary, ps1ReferenceFilters) <NEW_LINE> self.assertIn(ct.secondary, ps1ReferenceFilters) <NEW_LINE> self.assertIsInstance(ct.c0, numbers.Number) <NEW_LINE> self.assertIsInstance(ct.c1, numbers.Number) <NEW_LINE> self.assertIsInstance(ct.c2, numbers.Number)
Test that the HSC colorterms have been set consistently.
62598fc0f548e778e596b7e2
class MambuValueObject(MambuMapObj): <NEW_LINE> <INDENT> pass
A Mambu object with some schema but that you won't interact directly with in Mambu web, but through some entity.
62598fc0283ffb24f3cf3ac7
class IndriDocs(gensim.interfaces.CorpusABC): <NEW_LINE> <INDENT> def __init__(self, index, dictionary, max_documents=None): <NEW_LINE> <INDENT> assert isinstance(index, pyndri.Index) <NEW_LINE> self.index = index <NEW_LINE> self.dictionary = dictionary <NEW_LINE> self.max_documents = max_documents <NEW_LINE> _,self.id2token,_ = index.get_dictionary() <NEW_LINE> <DEDENT> def _maximum_document(self): <NEW_LINE> <INDENT> if self.max_documents is None: <NEW_LINE> <INDENT> return self.index.maximum_document() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return min( self.max_documents + self.index.document_base(), self.index.maximum_document()) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for int_doc_id in range(self.index.document_base(), self._maximum_document()): <NEW_LINE> <INDENT> doc = self.index.document(int_doc_id)[1] <NEW_LINE> tokens = [self.id2token[word_id] for word_id in doc if word_id != 0] <NEW_LINE> yield TaggedDocument(tokens, [int_doc_id]) <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._maximum_document() - self.index.document_base()
Integrates an Index with Gensim's Doc2Vec implementation.
62598fc026068e7796d4cb9e
class Products(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=140, blank=False) <NEW_LINE> description = models.TextField(max_length=1000, null=True, blank=True) <NEW_LINE> price = models.DecimalField(max_digits=100, default=29.99, decimal_places=2) <NEW_LINE> sale_price = models.DecimalField(max_digits=100, null=True, blank=True, decimal_places=2) <NEW_LINE> slug = models.SlugField(unique=True) <NEW_LINE> timestamp = models.DateTimeField(auto_now_add=True, auto_now=False) <NEW_LINE> updated = models.DateTimeField(auto_now_add=False, auto_now=True) <NEW_LINE> active = models.BooleanField(default=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> unique_together = ("title", "slug") <NEW_LINE> <DEDENT> def get_price(self): <NEW_LINE> <INDENT> return self.price <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse("single_product", kwargs={"slug": self.slug})
these will be the models that will include all the products that will be used by the site. It will borrow from the class of Django Models and in the end it will return the title of product as a __unicode__ method. In addition to this a few things need to be included. 1. presence of a Slug field which will be a unique field that will be synchronized with the title. This is what will identify a product for instance at the URL 2.
62598fc099fddb7c1ca62f0e
class HerokuWorker(Worker): <NEW_LINE> <INDENT> imminent_shutdown_delay = 6 <NEW_LINE> frame_properties = ['f_code', 'f_lasti', 'f_lineno', 'f_locals', 'f_trace'] <NEW_LINE> def setup_work_horse_signals(self): <NEW_LINE> <INDENT> signal.signal(signal.SIGRTMIN, self.request_stop_sigrtmin) <NEW_LINE> signal.signal(signal.SIGINT, signal.SIG_IGN) <NEW_LINE> signal.signal(signal.SIGTERM, signal.SIG_IGN) <NEW_LINE> <DEDENT> def handle_warm_shutdown_request(self): <NEW_LINE> <INDENT> if self.horse_pid != 0: <NEW_LINE> <INDENT> self.log.info( 'Worker %s: warm shut down requested, sending horse SIGRTMIN signal', self.key ) <NEW_LINE> self.kill_horse(sig=signal.SIGRTMIN) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log.warning('Warm shut down requested, no horse found') <NEW_LINE> <DEDENT> <DEDENT> def request_stop_sigrtmin(self, signum, frame): <NEW_LINE> <INDENT> if self.imminent_shutdown_delay == 0: <NEW_LINE> <INDENT> self.log.warning('Imminent shutdown, raising ShutDownImminentException immediately') <NEW_LINE> self.request_force_stop_sigrtmin(signum, frame) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log.warning('Imminent shutdown, raising ShutDownImminentException in %d seconds', self.imminent_shutdown_delay) <NEW_LINE> signal.signal(signal.SIGRTMIN, self.request_force_stop_sigrtmin) <NEW_LINE> signal.signal(signal.SIGALRM, self.request_force_stop_sigrtmin) <NEW_LINE> signal.alarm(self.imminent_shutdown_delay) <NEW_LINE> <DEDENT> <DEDENT> def request_force_stop_sigrtmin(self, signum, frame): <NEW_LINE> <INDENT> info = dict((attr, getattr(frame, attr)) for attr in self.frame_properties) <NEW_LINE> self.log.warning('raising ShutDownImminentException to cancel job...') <NEW_LINE> raise ShutDownImminentException('shut down imminent (signal: %s)' % signal_name(signum), info)
Modified version of rq worker which: * stops work horses getting killed with SIGTERM * sends SIGRTMIN to work horses on SIGTERM to the main process which in turn causes the horse to crash `imminent_shutdown_delay` seconds later
62598fc0dc8b845886d537fe
class A2CAgent(A3CAgent): <NEW_LINE> <INDENT> _agent_name = "A2C" <NEW_LINE> _default_config = A2C_DEFAULT_CONFIG <NEW_LINE> def _make_optimizer(self): <NEW_LINE> <INDENT> return SyncSamplesOptimizer(self.local_evaluator, self.remote_evaluators, self.config["optimizer"]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def default_resource_request(cls, config): <NEW_LINE> <INDENT> cf = merge_dicts(cls._default_config, config) <NEW_LINE> return Resources( cpu=1, gpu=cf["gpu_fraction"] if cf["gpu"] else 0, extra_cpu=cf["num_workers"], extra_gpu=cf["use_gpu_for_workers"] and cf["num_workers"] or 0)
Synchronous variant of the A3CAgent.
62598fc03346ee7daa33776a
class Experiment: <NEW_LINE> <INDENT> def __init__(self, label): <NEW_LINE> <INDENT> self._label = label <NEW_LINE> <DEDENT> def get_label(self): <NEW_LINE> <INDENT> return self._label
An experiment that can be chosen by the agent.
62598fc0fff4ab517ebcda29
@singleton <NEW_LINE> class TempestConfig: <NEW_LINE> <INDENT> DEFAULT_CONFIG_DIR = os.path.join( os.path.abspath(os.path.dirname(os.path.dirname(__file__))), "etc") <NEW_LINE> DEFAULT_CONFIG_FILE = "tempest.conf" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> config_files = [] <NEW_LINE> failsafe_path = "/etc/tempest/" + self.DEFAULT_CONFIG_FILE <NEW_LINE> conf_dir = os.environ.get('TEMPEST_CONFIG_DIR', self.DEFAULT_CONFIG_DIR) <NEW_LINE> conf_file = os.environ.get('TEMPEST_CONFIG', self.DEFAULT_CONFIG_FILE) <NEW_LINE> path = os.path.join(conf_dir, conf_file) <NEW_LINE> if not (os.path.isfile(path) or 'TEMPEST_CONFIG_DIR' in os.environ or 'TEMPEST_CONFIG' in os.environ): <NEW_LINE> <INDENT> path = failsafe_path <NEW_LINE> <DEDENT> if not os.path.exists(path): <NEW_LINE> <INDENT> msg = "Config file %s not found" % path <NEW_LINE> print(RuntimeError(msg), file=sys.stderr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> config_files.append(path) <NEW_LINE> <DEDENT> cfg.CONF([], project='tempest', default_config_files=config_files) <NEW_LINE> logging.setup('tempest') <NEW_LINE> LOG = logging.getLogger('tempest') <NEW_LINE> LOG.info("Using tempest config file %s" % path) <NEW_LINE> register_opt_group(cfg.CONF, compute_group, ComputeGroup) <NEW_LINE> register_opt_group(cfg.CONF, identity_group, IdentityGroup) <NEW_LINE> register_opt_group(cfg.CONF, image_group, ImageGroup) <NEW_LINE> register_opt_group(cfg.CONF, network_group, NetworkGroup) <NEW_LINE> register_opt_group(cfg.CONF, volume_group, VolumeGroup) <NEW_LINE> register_opt_group(cfg.CONF, object_storage_group, ObjectStoreGroup) <NEW_LINE> register_opt_group(cfg.CONF, orchestration_group, OrchestrationGroup) <NEW_LINE> register_opt_group(cfg.CONF, dashboard_group, DashboardGroup) <NEW_LINE> register_opt_group(cfg.CONF, boto_group, BotoGroup) <NEW_LINE> register_opt_group(cfg.CONF, compute_admin_group, ComputeAdminGroup) <NEW_LINE> register_opt_group(cfg.CONF, stress_group, StressGroup) <NEW_LINE> register_opt_group(cfg.CONF, scenario_group, ScenarioGroup) <NEW_LINE> register_opt_group(cfg.CONF, service_available_group, ServiceAvailableGroup) <NEW_LINE> register_opt_group(cfg.CONF, debug_group, DebugGroup) <NEW_LINE> self.compute = cfg.CONF.compute <NEW_LINE> self.identity = cfg.CONF.identity <NEW_LINE> self.images = cfg.CONF.image <NEW_LINE> self.network = cfg.CONF.network <NEW_LINE> self.volume = cfg.CONF.volume <NEW_LINE> self.object_storage = cfg.CONF['object-storage'] <NEW_LINE> self.orchestration = cfg.CONF.orchestration <NEW_LINE> self.dashboard = cfg.CONF.dashboard <NEW_LINE> self.boto = cfg.CONF.boto <NEW_LINE> self.compute_admin = cfg.CONF['compute-admin'] <NEW_LINE> self.stress = cfg.CONF.stress <NEW_LINE> self.scenario = cfg.CONF.scenario <NEW_LINE> self.service_available = cfg.CONF.service_available <NEW_LINE> self.debug = cfg.CONF.debug <NEW_LINE> if not self.compute_admin.username: <NEW_LINE> <INDENT> self.compute_admin.username = self.identity.admin_username <NEW_LINE> self.compute_admin.password = self.identity.admin_password <NEW_LINE> self.compute_admin.tenant_name = self.identity.admin_tenant_name
Provides OpenStack configuration information.
62598fc0ff9c53063f51a892
class CalculatorsReturnValue(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.solarcell = ibei.DeVosSolarcell(input_params) <NEW_LINE> <DEDENT> def test_calc_power_density(self): <NEW_LINE> <INDENT> self.solarcell.bandgap = 0 <NEW_LINE> self.assertEqual(0, self.solarcell.calc_power_density())
Tests special values of the calculator methods.
62598fc066673b3332c30617
class Signage(BaseInfrastructure): <NEW_LINE> <INDENT> objects = BaseInfrastructure.get_manager_cls(SignageGISManager)() <NEW_LINE> in_structure = SignageStructureManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> proxy = True <NEW_LINE> verbose_name = _(u"Signage") <NEW_LINE> verbose_name_plural = _(u"Signages") <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def path_signages(cls, path): <NEW_LINE> <INDENT> return cls.objects.filter(aggregations__path=path).distinct('pk') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def topology_signages(cls, topology): <NEW_LINE> <INDENT> return cls.overlapping(topology)
An infrastructure in the park, which is of type SIGNAGE
62598fc0ad47b63b2c5a7a9b
class _LockWrapper(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.lock = threading.Lock() <NEW_LINE> <DEDENT> def acquire(self, wait=True): <NEW_LINE> <INDENT> return self.lock.acquire(wait) <NEW_LINE> <DEDENT> def release(self): <NEW_LINE> <INDENT> self.lock.release()
weakref-capable wrapper for threading.Lock
62598fc0f548e778e596b7e4
class Attribute(websettings.Attribute): <NEW_LINE> <INDENT> GLOBAL_SETTINGS = QWebEngineSettings.globalSettings <NEW_LINE> ENUM_BASE = QWebEngineSettings
A setting set via QWebEngineSettings::setAttribute.
62598fc05166f23b2e243625
class SharedResources: <NEW_LINE> <INDENT> __instance = None <NEW_LINE> def __init__(self, vid_src=0): <NEW_LINE> <INDENT> if SharedResources.__instance is not None: <NEW_LINE> <INDENT> raise Exception("This class is a singleton!") <NEW_LINE> <DEDENT> SharedResources.__instance = self <NEW_LINE> self.output_frame = None <NEW_LINE> self.lock = Lock() <NEW_LINE> self.vid_stream = StreamManager(src=vid_src) <NEW_LINE> self.filter_m = FilterManager() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_instance(): <NEW_LINE> <INDENT> if SharedResources.__instance is None: <NEW_LINE> <INDENT> SharedResources() <NEW_LINE> <DEDENT> return SharedResources.__instance <NEW_LINE> <DEDENT> def start_stream(self): <NEW_LINE> <INDENT> self.vid_stream.start() <NEW_LINE> <DEDENT> def stop_stream(self): <NEW_LINE> <INDENT> self.vid_stream.stop() <NEW_LINE> <DEDENT> def get_filters(self): <NEW_LINE> <INDENT> return self.filter_m.filters <NEW_LINE> <DEDENT> def toggle_filter(self, input_filter): <NEW_LINE> <INDENT> self.filter_m.toggle_filter(input_filter)
Class for maintaining shared variables
62598fc04f88993c371f062c
class AllSubDevicesGetLampStrikes(TestMixins.AllSubDevicesGetMixin, ResponderTestFixture): <NEW_LINE> <INDENT> CATEGORY = TestCategory.SUB_DEVICES <NEW_LINE> PID = 'LAMP_STRIKES'
Send a Get LAMP_STRIKES to ALL_SUB_DEVICES.
62598fc0796e427e5384e9da
class ProtocolCustomSettingsFormat(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'protocol': {'key': 'protocol', 'type': 'str'}, 'trigger_rate_override': {'key': 'triggerRateOverride', 'type': 'str'}, 'source_rate_override': {'key': 'sourceRateOverride', 'type': 'str'}, 'trigger_sensitivity_override': {'key': 'triggerSensitivityOverride', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, protocol: Optional[Union[str, "DdosCustomPolicyProtocol"]] = None, trigger_rate_override: Optional[str] = None, source_rate_override: Optional[str] = None, trigger_sensitivity_override: Optional[Union[str, "DdosCustomPolicyTriggerSensitivityOverride"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ProtocolCustomSettingsFormat, self).__init__(**kwargs) <NEW_LINE> self.protocol = protocol <NEW_LINE> self.trigger_rate_override = trigger_rate_override <NEW_LINE> self.source_rate_override = source_rate_override <NEW_LINE> self.trigger_sensitivity_override = trigger_sensitivity_override
DDoS custom policy properties. :param protocol: The protocol for which the DDoS protection policy is being customized. Possible values include: "Tcp", "Udp", "Syn". :type protocol: str or ~azure.mgmt.network.v2019_02_01.models.DdosCustomPolicyProtocol :param trigger_rate_override: The customized DDoS protection trigger rate. :type trigger_rate_override: str :param source_rate_override: The customized DDoS protection source rate. :type source_rate_override: str :param trigger_sensitivity_override: The customized DDoS protection trigger rate sensitivity degrees. High: Trigger rate set with most sensitivity w.r.t. normal traffic. Default: Trigger rate set with moderate sensitivity w.r.t. normal traffic. Low: Trigger rate set with less sensitivity w.r.t. normal traffic. Relaxed: Trigger rate set with least sensitivity w.r.t. normal traffic. Possible values include: "Relaxed", "Low", "Default", "High". :type trigger_sensitivity_override: str or ~azure.mgmt.network.v2019_02_01.models.DdosCustomPolicyTriggerSensitivityOverride
62598fc08a349b6b43686483
@adapter_config(name='label', required=(ITask, IAdminLayer, TaskNotificationsTable), provides=IColumn) <NEW_LINE> class TaskNotificationsTableNameColumn(NameColumn): <NEW_LINE> <INDENT> i18n_header = _("Target")
Task notifications table label column
62598fc056ac1b37e6302432
class CrawlerParseException(Exception): <NEW_LINE> <INDENT> pass
Exception from Crawler parse errors
62598fc03346ee7daa33776b
class Spider(NewsSpider): <NEW_LINE> <INDENT> name = "海大主页" <NEW_LINE> list_urls = [ "http://211.64.142.8/article_js.asp?ClassID=2&IncludeChild=true&ArticleNum=10" "&ShowTitle=true&ShowUpdateTime=true&OrderField=UpdateTime&OrderType=desc", ] <NEW_LINE> list_extract_pattern = r"<a href='(.*?)' title='(.*?)' target='_blank'>" <NEW_LINE> item_url_pattern = r"http://211.64.142.8/Article_Show\.asp" <NEW_LINE> item_extract_scope = "" <NEW_LINE> item_extract_field = { 'title': ".tdbg_right2[height='50'] b", 'content': ".tdbg_right[height='260']", } <NEW_LINE> item_max_count = 10 <NEW_LINE> datetime_format = "%Y-%m-%d %H:%M:%S" <NEW_LINE> response_encoding = 'gbk' <NEW_LINE> def __init__(self, *a, **kw): <NEW_LINE> <INDENT> super(Spider, self).__init__(*a, **kw) <NEW_LINE> self.list_extract_pattern = re.compile(self.list_extract_pattern) <NEW_LINE> <DEDENT> def _extract_fields(self, scope_selector, field_selectors): <NEW_LINE> <INDENT> response = self.current_response <NEW_LINE> if response.meta['type'] == 'list': <NEW_LINE> <INDENT> links = [] <NEW_LINE> titles = [] <NEW_LINE> datetimes = [] <NEW_LINE> for item in self.list_extract_pattern.findall(response.body_as_unicode()): <NEW_LINE> <INDENT> links.append(self.process_link(item[0])) <NEW_LINE> info = item[1].split("\\n") <NEW_LINE> titles.append(self.process_title(info[0][5:])) <NEW_LINE> datetimes.append(self.process_datetime(info[2][5:])) <NEW_LINE> <DEDENT> yield 'link', links <NEW_LINE> yield 'title', titles <NEW_LINE> yield 'datetime', datetimes <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for field, values in super(Spider, self)._extract_fields( scope_selector, field_selectors): <NEW_LINE> <INDENT> yield field, values
海大主页 置顶文章比较多,需要多抓取几项 列表页是脚本生成的,应当直接从返回的脚本里提取
62598fc0fff4ab517ebcda2b
class DscastarGenerator: <NEW_LINE> <INDENT> def __init__(self, output_root): <NEW_LINE> <INDENT> self.output_root = join(output_root, "train") <NEW_LINE> self.seq = 0 <NEW_LINE> self.frame_length = None <NEW_LINE> <DEDENT> def output_rgb_files(self, src_rgb_paths): <NEW_LINE> <INDENT> self.frame_length = len(src_rgb_paths) <NEW_LINE> rgb_folder = join(self.output_root, 'rgb') <NEW_LINE> os.makedirs(rgb_folder, exist_ok=True) <NEW_LINE> for i, src_rgb_path in enumerate(src_rgb_paths): <NEW_LINE> <INDENT> shutil.copy( src_rgb_path, join(rgb_folder, "{}-frame-{:06}.color.jpg".format(self.seq, i)) ) <NEW_LINE> <DEDENT> <DEDENT> def output_depth_files(self, src_depth_paths): <NEW_LINE> <INDENT> depth_folder = join(self.output_root, 'depth') <NEW_LINE> os.makedirs(depth_folder, exist_ok=True) <NEW_LINE> for i, src_depth_path in enumerate(src_depth_paths): <NEW_LINE> <INDENT> shutil.copy( src_depth_path, join(depth_folder, "{}-frame-{:06}.depth.png".format(self.seq, i)) ) <NEW_LINE> <DEDENT> <DEDENT> def output_trajectory(self, pose_matrixes): <NEW_LINE> <INDENT> poses_folder = join(self.output_root, 'poses') <NEW_LINE> os.makedirs(poses_folder, exist_ok=True) <NEW_LINE> for i, pose_matrix in enumerate(pose_matrixes): <NEW_LINE> <INDENT> lines = [] <NEW_LINE> for row in pose_matrix: <NEW_LINE> <INDENT> lines.append(" ".join(row.astype(np.str)) + "\n") <NEW_LINE> <DEDENT> open(join(poses_folder, "{}-frame-{:06}.pose.txt".format(self.seq, i)), "w").writelines(lines) <NEW_LINE> <DEDENT> <DEDENT> def output_intrinsic(self, intrinsic): <NEW_LINE> <INDENT> calibration_folder = join(self.output_root, 'calibration') <NEW_LINE> os.makedirs(calibration_folder, exist_ok=True) <NEW_LINE> assert (self.frame_length) <NEW_LINE> for i in range(0, self.frame_length): <NEW_LINE> <INDENT> open(join(calibration_folder, "{}-frame-{:06}.calibration.txt".format(self.seq, i)), "w") .write(str(intrinsic["intrinsic_matrix"][0]))
This class generate a data structure for DSACSTAR. The format is https://github.com/vislearn/dsacstar#data-structure.
62598fc07b180e01f3e49172
class DescribeUpgradableNodeVersionsRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(DescribeUpgradableNodeVersionsRequest, self).__init__( '/regions/{regionId}/clusters/{clusterId}/upgradableNodeVersions', 'GET', header, version) <NEW_LINE> self.parameters = parameters
查询可升级的节点版本
62598fc05fdd1c0f98e5e1d9
class Export_Xps_Pose_Op(bpy.types.Operator, ExportHelper): <NEW_LINE> <INDENT> bl_idname = "xps_tools.export_pose" <NEW_LINE> bl_label = "Export XNALara/XPS Pose" <NEW_LINE> bl_space_type = "PROPERTIES" <NEW_LINE> bl_region_type = "WINDOW" <NEW_LINE> filename_ext = '.pose' <NEW_LINE> filepath = bpy.props.StringProperty( name="File Path", description="Filepath used for exporting the file", maxlen=1024, default="", ) <NEW_LINE> filter_glob = bpy.props.StringProperty( default="*.pose", options={'HIDDEN'}, ) <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return context.active_object and context.active_object.type == 'ARMATURE' <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> export_xnalara_pose.getOutputFilename(self.filepath) <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> def invoke(self, context, event): <NEW_LINE> <INDENT> wm = context.window_manager <NEW_LINE> wm.fileselect_add(self) <NEW_LINE> return {'RUNNING_MODAL'}
Save an XNALara pose File
62598fc04428ac0f6e65876a
class Solution: <NEW_LINE> <INDENT> def orangesRotting(self, grid: List[List[int]]) -> int: <NEW_LINE> <INDENT> m, n = len(grid), len(grid[0]) <NEW_LINE> result = 0 <NEW_LINE> queue = list() <NEW_LINE> for i in range(m): <NEW_LINE> <INDENT> for j in range(n): <NEW_LINE> <INDENT> if grid[i][j] == 2: <NEW_LINE> <INDENT> queue.append((i, j, 0)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> while queue: <NEW_LINE> <INDENT> i, j, result = queue.pop(0) <NEW_LINE> for x, y in [(i-1, j), (i+1, j), (i, j-1), (i, j+1)]: <NEW_LINE> <INDENT> if 0 <= x < m and 0 <= y < n and grid[x][y] == 1: <NEW_LINE> <INDENT> grid[x][y] = 2 <NEW_LINE> queue.append((x, y, result+1)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for i in grid: <NEW_LINE> <INDENT> if 1 in i: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> return result
找到所有腐烂的橘子 所有腐烂的橘子往上下左右扩散,扩散到的新鲜橘子转成腐烂的橘子,并且记时间为1 新转成腐烂的橘子再往外扩散,直到能扩散到的位置都扩散完毕 找列表中是否在新鲜橘子,存在返回-1 不存在返回时间
62598fc03617ad0b5ee0638d
class Constant(_Value): <NEW_LINE> <INDENT> def __init__(self, value, bits_sign=None): <NEW_LINE> <INDENT> from migen.fhdl.bitcontainer import bits_for <NEW_LINE> _Value.__init__(self) <NEW_LINE> self.value = int(value) <NEW_LINE> if bits_sign is None: <NEW_LINE> <INDENT> bits_sign = bits_for(self.value), self.value < 0 <NEW_LINE> <DEDENT> elif isinstance(bits_sign, int): <NEW_LINE> <INDENT> bits_sign = bits_sign, self.value < 0 <NEW_LINE> <DEDENT> self.nbits, self.signed = bits_sign <NEW_LINE> if not isinstance(self.nbits, int) or self.nbits <= 0: <NEW_LINE> <INDENT> raise TypeError("Width must be a strictly positive integer") <NEW_LINE> <DEDENT> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return self.value
A constant, HDL-literal integer `_Value` Parameters ---------- value : int bits_sign : int or tuple or None Either an integer `bits` or a tuple `(bits, signed)` specifying the number of bits in this `Constant` and whether it is signed (can represent negative values). `bits_sign` defaults to the minimum width and signedness of `value`.
62598fc0f548e778e596b7e6
class UserProfile( models.Model ): <NEW_LINE> <INDENT> user = models.OneToOneField( User, ) <NEW_LINE> privacy = models.CharField( blank = False, help_text = "If you choose private, other users will not be able to see you", max_length = 50 ) <NEW_LINE> groups = models.ManyToManyField( WorkoutGroup, blank = True, related_name = "members", ) <NEW_LINE> tags = models.ManyToManyField( Tag, blank = True, ) <NEW_LINE> playlists = models.ManyToManyField( Playlist, blank = True, ) <NEW_LINE> goals = models.ManyToManyField( WorkoutGoal, null = True, blank = True, help_text = "Your Goal (e.g. Weight Loss)", related_name = "users", ) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "%s" % self.user.username <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('users.views.view_user', args=[str(self.pk)])
This UserProfile model wraps the default Django auth User model with additional information, including their groups, playlists, tags (interests), etc.
62598fc0796e427e5384e9dc
class GPPro(ToolWrapper, ABC, EnforceOverrides): <NEW_LINE> <INDENT> GP_BIN = "java -jar " + Paths.GPPRO <NEW_LINE> @final <NEW_LINE> def run_gppro(self, args, outfile): <NEW_LINE> <INDENT> outpath = self.get_outpath(outfile) <NEW_LINE> cmd_line = self.GP_BIN + " " + " ".join(args) + " > " + outpath <NEW_LINE> if isfile(outpath) and not self.force_mode: <NEW_LINE> <INDENT> print("Skipping " + cmd_line + " (results found).") <NEW_LINE> return 0 <NEW_LINE> <DEDENT> print("Running " + cmd_line + ".") <NEW_LINE> return execute_cmd(cmd_line)
SCRUTINY ToolWrapper for GlobalPlatformPro
62598fc0aad79263cf42ea1c
class Tag(VersionedPanObject): <NEW_LINE> <INDENT> ROOT = Root.VSYS <NEW_LINE> SUFFIX = ENTRY <NEW_LINE> def _setup(self): <NEW_LINE> <INDENT> self._xpaths.add_profile(value='/tag') <NEW_LINE> params = [] <NEW_LINE> params.append(VersionedParamPath( 'color', path='color')) <NEW_LINE> params.append(VersionedParamPath( 'comments', path='comments')) <NEW_LINE> self._params = tuple(params) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def color_code(color_name): <NEW_LINE> <INDENT> colors = { 'red': 1, 'green': 2, 'blue': 3, 'yellow': 4, 'copper': 5, 'orange': 6, 'purple': 7, 'gray': 8, 'light green': 9, 'cyan': 10, 'light gray': 11, 'blue gray': 12, 'lime': 13, 'black': 14, 'gold': 15, 'brown': 16, } <NEW_LINE> if color_name not in colors: <NEW_LINE> <INDENT> raise ValueError("Color '{0}' is not valid".format(color_name)) <NEW_LINE> <DEDENT> return "color"+str(colors[color_name])
Administrative tag Args: name (str): Name of the tag color (str): Color ID (eg. 'color1', 'color4', etc). You can use :func:`~pandevice.objects.Tag.color_code` to generate the ID. comments (str): Comments
62598fc0956e5f7376df57a2
class Softmax: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.params = [] <NEW_LINE> self.grads = [] <NEW_LINE> self.out = None <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> self.out = softmax(x) <NEW_LINE> return self.out <NEW_LINE> <DEDENT> def backward(self, dout): <NEW_LINE> <INDENT> dx = self.out * dout <NEW_LINE> sumdx = np.sum(dx, axis=1, keepdims=True) <NEW_LINE> dx -= self.out * sumdx <NEW_LINE> return dx
Softmax layer There are no parameters and graduations in Softmax Layer. Softmax layer converts the input "score batch" to a related "probability batch", in which each row indicates a probability vector. A probability vector is a vector whose length is equal to the number of classes. Each element in the vector is a probability to a corresponding class and the highest probability denotes the class predicted by the current model.
62598fc03346ee7daa33776c
class NoPreemptionPolicy(PreemptabilityPolicy): <NEW_LINE> <INDENT> def __init__(self, slottable): <NEW_LINE> <INDENT> PreemptabilityPolicy.__init__(self, slottable) <NEW_LINE> <DEDENT> def get_lease_preemptability_score(self, preemptor, preemptee, time): <NEW_LINE> <INDENT> return -1
Simple preemption policy: preemption is never allowed.
62598fc097e22403b383b150
@attr.s(auto_attribs=True, init=False) <NEW_LINE> class Snippet(model.Model): <NEW_LINE> <INDENT> name: Optional[str] = None <NEW_LINE> label: Optional[str] = None <NEW_LINE> sql: Optional[str] = None <NEW_LINE> can: Optional[MutableMapping[str, bool]] = None <NEW_LINE> def __init__( self, *, name: Optional[str] = None, label: Optional[str] = None, sql: Optional[str] = None, can: Optional[MutableMapping[str, bool]] = None ): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.label = label <NEW_LINE> self.sql = sql <NEW_LINE> self.can = can
Attributes: name: Name of the snippet label: Label of the snippet sql: SQL text of the snippet can: Operations the current user is able to perform on this object
62598fc060cbc95b06364584
class Countdown(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.app = tk.Tk() <NEW_LINE> self.timeleft = 0 <NEW_LINE> self.counting = False <NEW_LINE> self.job = None <NEW_LINE> self.layout_setup() <NEW_LINE> self.update_display(MSG_WELCOME) <NEW_LINE> <DEDENT> def layout_setup(self): <NEW_LINE> <INDENT> self.app.title('Countdown') <NEW_LINE> self.app.attributes('-topmost', True) <NEW_LINE> self.display = tk.Label(self.app, font=(FONT, FONT_SIZE), bg=CLR_BG, fg=CLR_FG) <NEW_LINE> self.display.pack() <NEW_LINE> for minute in COUNTDOWNS: <NEW_LINE> <INDENT> btn = tk.Button(self.app, text=minute, width=2, command=partial(self.btn_time, minute) ) <NEW_LINE> btn.pack(side=tk.LEFT) <NEW_LINE> <DEDENT> <DEDENT> def update_display(self, msg): <NEW_LINE> <INDENT> self.display['text'] = '{:^{width}}'.format(msg, width=DISPLAY_WIDTH) <NEW_LINE> <DEDENT> def done(self): <NEW_LINE> <INDENT> self.update_display(MSG_DONE) <NEW_LINE> winsound.PlaySound(WIN_SOUND, winsound.SND_ALIAS) <NEW_LINE> self.stop() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.counting = False <NEW_LINE> self.update_display(MSG_DONE) <NEW_LINE> if self.job: <NEW_LINE> <INDENT> self.app.after_cancel(self.job) <NEW_LINE> self.job = None <NEW_LINE> <DEDENT> <DEDENT> def tick(self): <NEW_LINE> <INDENT> if self.counting: <NEW_LINE> <INDENT> if self.timeleft == 0: <NEW_LINE> <INDENT> self.done() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.timeleft = self.timeleft - TICK_INTERVAL <NEW_LINE> self.update_display(sec_to_msg(self.timeleft)) <NEW_LINE> self.job = self.app.after(int(TICK_INTERVAL * 1000), self.tick) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def btn_time(self, minutes): <NEW_LINE> <INDENT> self.stop() <NEW_LINE> if minutes: <NEW_LINE> <INDENT> self.timeleft = minutes * 60 <NEW_LINE> self.counting = True <NEW_LINE> self.tick()
class implmenting the GUI and timing functionality of the timer
62598fc023849d37ff8512fb
@attr.s(slots=True, auto_attribs=True) <NEW_LINE> class RarityComponent: <NEW_LINE> <INDENT> rarity: RARITIES
Component that holds the rarity of the item.
62598fc07b180e01f3e49173
class InvalidSlot(results.MetadataError): <NEW_LINE> <INDENT> _attr = 'slot'
Package's SLOT is invalid.
62598fc097e22403b383b151
class Leaf: <NEW_LINE> <INDENT> def __init__(self, symbol): <NEW_LINE> <INDENT> self.symbol = symbol
Leaf of a regex syntax tree
62598fc063b5f9789fe853ba
class ReadOnlyFieldData(FieldData): <NEW_LINE> <INDENT> def __init__(self, source): <NEW_LINE> <INDENT> self._source = source <NEW_LINE> <DEDENT> def get(self, block, name): <NEW_LINE> <INDENT> return self._source.get(block, name) <NEW_LINE> <DEDENT> def set(self, block, name, value): <NEW_LINE> <INDENT> raise InvalidScopeError(f"{block}.{name} is read-only, cannot set") <NEW_LINE> <DEDENT> def delete(self, block, name): <NEW_LINE> <INDENT> raise InvalidScopeError(f"{block}.{name} is read-only, cannot delete") <NEW_LINE> <DEDENT> def has(self, block, name): <NEW_LINE> <INDENT> return self._source.has(block, name) <NEW_LINE> <DEDENT> def default(self, block, name): <NEW_LINE> <INDENT> return self._source.default(block, name) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"ReadOnlyFieldData({self._source!r})"
A FieldData that wraps another FieldData an makes all calls to set and delete raise :class:`~xblock.exceptions.InvalidScopeError`s.
62598fc05fdd1c0f98e5e1da
class Meta: <NEW_LINE> <INDENT> model = User
Define a model instance class.
62598fc0be7bc26dc9251f80
class CustomQuestionView(APIView): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> questions = Question.objects.filter(created_by=request.user) <NEW_LINE> world_id = request.query_params.get("world_id") <NEW_LINE> if world_id: <NEW_LINE> <INDENT> world = CustomWorld.objects.get(id=world_id) <NEW_LINE> if world.created_by != request.user: <NEW_LINE> <INDENT> raise PermissionDenied(detail="You do not have access to this Custom World") <NEW_LINE> <DEDENT> section = Section.objects.get(world_id=world_id) <NEW_LINE> questions = questions.filter(section=section) <NEW_LINE> <DEDENT> serializer = CreateQuestionSerializer(questions, many=True) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> serializer = CreateQuestionSerializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> section = Section.objects.get(id=request.data['section']) <NEW_LINE> number_of_questions_in_section = len(Question.objects.filter(section=section)) <NEW_LINE> if section.world.is_custom_world and number_of_questions_in_section < 12: <NEW_LINE> <INDENT> serializer.save(created_by=request.user, difficulty=1, section=section) <NEW_LINE> return Response(serializer.data, status=status.HTTP_201_CREATED) <NEW_LINE> <DEDENT> return Response(serializer.data, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> return Response(serializer.data, status=status.HTTP_400_BAD_REQUEST)
API for creating custom questions Requests handled: GET, POST
62598fc067a9b606de546214
class LongCodedValue(CodedValue, long): <NEW_LINE> <INDENT> pass
A long integer coded value.
62598fc0283ffb24f3cf3acd
class LoggingResetCommand(sublime_plugin.WindowCommand): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> reset_logging_system() <NEW_LINE> sublime.status_message("Logging system has been reset.")
command key: logging_reset
62598fc08a349b6b43686487
class Album: <NEW_LINE> <INDENT> def __init__(self, name, year, artist=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.year = year <NEW_LINE> if artist is None: <NEW_LINE> <INDENT> self.artist = Artist("Various Artists") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.artist = artist <NEW_LINE> <DEDENT> self.tracks = [] <NEW_LINE> <DEDENT> def add_song(self, song, position=None): <NEW_LINE> <INDENT> song_found = find_object(song, self.tracks) <NEW_LINE> if song_found is None: <NEW_LINE> <INDENT> song_found = Song(song, self.name) <NEW_LINE> <DEDENT> if position is None: <NEW_LINE> <INDENT> self.tracks.append(song_found) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.tracks.insert(position, song_found)
Methods: add_song: used to add a new song to the album's track.
62598fc076e4537e8c3ef7ef
class Ping(object): <NEW_LINE> <INDENT> openapi_types = { 'result': 'str' } <NEW_LINE> attribute_map = { 'result': 'result' } <NEW_LINE> def __init__(self, result=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._result = None <NEW_LINE> self.discriminator = None <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> self.result = result <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def result(self): <NEW_LINE> <INDENT> return self._result <NEW_LINE> <DEDENT> @result.setter <NEW_LINE> def result(self, result): <NEW_LINE> <INDENT> self._result = result <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Ping): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Ping): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598fc0d486a94d0ba2c21a
class NodeUnavailableEvent(NotificationEvent): <NEW_LINE> <INDENT> pass
Event sent by a node to the manager to indicate that it is inaccessible
62598fc03d592f4c4edbb107
class Enum(String, SchemaType): <NEW_LINE> <INDENT> __visit_name__ = 'enum' <NEW_LINE> def __init__(self, *enums, **kw): <NEW_LINE> <INDENT> self.enums = enums <NEW_LINE> self.native_enum = kw.pop('native_enum', True) <NEW_LINE> convert_unicode = kw.pop('convert_unicode', None) <NEW_LINE> if convert_unicode is None: <NEW_LINE> <INDENT> for e in enums: <NEW_LINE> <INDENT> if isinstance(e, util.text_type): <NEW_LINE> <INDENT> convert_unicode = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> convert_unicode = False <NEW_LINE> <DEDENT> <DEDENT> if self.enums: <NEW_LINE> <INDENT> length = max(len(x) for x in self.enums) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> length = 0 <NEW_LINE> <DEDENT> String.__init__(self, length=length, convert_unicode=convert_unicode, ) <NEW_LINE> SchemaType.__init__(self, **kw) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return util.generic_repr(self, [ ("native_enum", True), ("name", None) ]) <NEW_LINE> <DEDENT> def _should_create_constraint(self, compiler): <NEW_LINE> <INDENT> return not self.native_enum or not compiler.dialect.supports_native_enum <NEW_LINE> <DEDENT> @util.dependencies("sqlalchemy.sql.schema") <NEW_LINE> def _set_table(self, schema, column, table): <NEW_LINE> <INDENT> if self.native_enum: <NEW_LINE> <INDENT> SchemaType._set_table(self, column, table) <NEW_LINE> <DEDENT> e = schema.CheckConstraint( type_coerce(column, self).in_(self.enums), name=self.name, _create_rule=util.portable_instancemethod( self._should_create_constraint) ) <NEW_LINE> table.append_constraint(e) <NEW_LINE> <DEDENT> def adapt(self, impltype, **kw): <NEW_LINE> <INDENT> schema = kw.pop('schema', self.schema) <NEW_LINE> metadata = kw.pop('metadata', self.metadata) <NEW_LINE> if issubclass(impltype, Enum): <NEW_LINE> <INDENT> return impltype(name=self.name, schema=schema, metadata=metadata, convert_unicode=self.convert_unicode, native_enum=self.native_enum, inherit_schema=self.inherit_schema, *self.enums, **kw ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(Enum, self).adapt(impltype, **kw)
Generic Enum Type. The Enum type provides a set of possible string values which the column is constrained towards. By default, uses the backend's native ENUM type if available, else uses VARCHAR + a CHECK constraint. .. seealso:: :class:`~.postgresql.ENUM` - PostgreSQL-specific type, which has additional functionality.
62598fc03346ee7daa33776d
class TestWordAnalogy(TestCase): <NEW_LINE> <INDENT> maxDiff = None <NEW_LINE> vectors = [ "boy -0.9 -0.5 0.2 -0.5 0.3", "girl -0.6 -0.6 0.1 0.4 0.1", "brother -0.1 -0.9 0.3 -0.6 0.8", "sister -0.6 -0.9 0.2 0.1 0.3", "dad -0.3 0.7 -0.6 -0.6 0.9", "mom -0.2 0.6 -0.1 0.8 0.5" ] <NEW_LINE> input_words = [ "boy girl brother sister", "boy girl dad mom" ] <NEW_LINE> def test_get_analogy_normalize_euclid(self): <NEW_LINE> <INDENT> thesaurus = WordAnalogy(self.vectors, True) <NEW_LINE> output = [] <NEW_LINE> for line in self.input_words: <NEW_LINE> <INDENT> output.append(thesaurus.get_analogy(line, True)) <NEW_LINE> <DEDENT> self.assertEquals(output, self.input_words) <NEW_LINE> <DEDENT> def test_get_analogy_no_normalize_euclid(self): <NEW_LINE> <INDENT> thesaurus = WordAnalogy(self.vectors, False) <NEW_LINE> output = [] <NEW_LINE> for line in self.input_words: <NEW_LINE> <INDENT> output.append(thesaurus.get_analogy(line, True)) <NEW_LINE> <DEDENT> self.assertEquals(output, self.input_words) <NEW_LINE> <DEDENT> def test_get_analogy_normalize_cosine(self): <NEW_LINE> <INDENT> thesaurus = WordAnalogy(self.vectors, True) <NEW_LINE> output = [] <NEW_LINE> for line in self.input_words: <NEW_LINE> <INDENT> output.append(thesaurus.get_analogy(line, False)) <NEW_LINE> <DEDENT> self.assertEquals(output, self.input_words) <NEW_LINE> <DEDENT> def test_get_analogy_no_normalize_cosine(self): <NEW_LINE> <INDENT> thesaurus = WordAnalogy(self.vectors, False) <NEW_LINE> output = [] <NEW_LINE> for line in self.input_words: <NEW_LINE> <INDENT> output.append(thesaurus.get_analogy(line, False)) <NEW_LINE> <DEDENT> self.assertEquals(output, self.input_words)
This class contains tests for the WordAnalogy class
62598fc060cbc95b06364586
class LongThrower(ThrowerAnt): <NEW_LINE> <INDENT> name = 'Long' <NEW_LINE> implemented = True <NEW_LINE> food_cost = 3 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> Ant.__init__(self, armor=1) <NEW_LINE> self.min_range = 3 <NEW_LINE> self.max_range = 10
A ThrowerAnt that only throws leaves at Bees at least 4 places away.
62598fc07b180e01f3e49174
class BaseWrk2BenchmarkSuite(BaseWrkBenchmarkSuite): <NEW_LINE> <INDENT> def rules(self, out, benchmarks, bmSuiteArgs): <NEW_LINE> <INDENT> return [ mx_benchmark.StdOutRule( r"^\s*(?P<percentile>\d*[.,]?\d*)%\s+(?P<latency>\d*[.,]?\d*)ms$", { "benchmark": benchmarks[0], "bench-suite": self.benchSuiteName(), "metric.name": "sample-time", "metric.value": ("<latency>", float), "metric.unit": "ms", "metric.better": "lower", "metric.percentile": ("<percentile>", float), } ) ] + super(BaseWrk2BenchmarkSuite, self).rules(out, benchmarks, bmSuiteArgs) <NEW_LINE> <DEDENT> def getLibraryDirectory(self): <NEW_LINE> <INDENT> return mx.library("WRK2", True).get_path(True) <NEW_LINE> <DEDENT> def setupWrkCmd(self, config): <NEW_LINE> <INDENT> cmd = ["--latency"] <NEW_LINE> if "rate" in config: <NEW_LINE> <INDENT> cmd += ["--rate", str(config["rate"])] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mx.abort("rate not specified in Wrk2 configuration.") <NEW_LINE> <DEDENT> return super(BaseWrk2BenchmarkSuite, self).setupWrkCmd(config, cmd)
Base class for Wrk2 based benchmark suites. Wrk2 (https://github.com/giltene/wrk2) is a tool that can be used to measure the latency of applications offering HTTP services.
62598fc0be7bc26dc9251f81
class EIOOXM(object): <NEW_LINE> <INDENT> def __init__(self, handle, rl, bm): <NEW_LINE> <INDENT> self.handle = handle <NEW_LINE> self.ready = False <NEW_LINE> self.rl = rl <NEW_LINE> self.bm = bm <NEW_LINE> self.returned_messages = [] <NEW_LINE> <DEDENT> def on_order_failed(self, order): <NEW_LINE> <INDENT> self.returned_messages.append(order) <NEW_LINE> self.bm.on_interesting_shit() <NEW_LINE> <DEDENT> def order_passback(self, order): <NEW_LINE> <INDENT> self.returned_messages.append(order) <NEW_LINE> <DEDENT> def on_fail_status(self, failstatus): <NEW_LINE> <INDENT> self.bm.oxm[self.handle].faulty = failstatus <NEW_LINE> self.bm.on_interesting_shit() <NEW_LINE> <DEDENT> def on_sent_successfully(self, order): <NEW_LINE> <INDENT> self.bm.rlayer.osm.log_as_sent(order) <NEW_LINE> <DEDENT> def on_ready(self): <NEW_LINE> <INDENT> self.ready = True <NEW_LINE> self.bm.on_interesting_shit() <NEW_LINE> <DEDENT> def pop_messages(self): <NEW_LINE> <INDENT> msge = self.returned_messages <NEW_LINE> self.returned_messages = [] <NEW_LINE> return msge
Eunike Interface Object/Order Execution Module An interface instance given to OXMs.
62598fc0adb09d7d5dc0a7c8
class FCOSModule(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, cfg, in_channels): <NEW_LINE> <INDENT> super(FCOSModule, self).__init__() <NEW_LINE> head = FCOSHead(cfg, in_channels) <NEW_LINE> box_selector_test = make_fcos_postprocessor(cfg) <NEW_LINE> loss_evaluator = make_fcos_loss_evaluator(cfg) <NEW_LINE> self.head = head <NEW_LINE> self.box_selector_test = box_selector_test <NEW_LINE> self.loss_evaluator = loss_evaluator <NEW_LINE> self.fpn_strides = cfg.MODEL.FCOS.FPN_STRIDES <NEW_LINE> <DEDENT> def forward(self, images, features, targets=None): <NEW_LINE> <INDENT> box_cls, box_regression, centerness = self.head(features) <NEW_LINE> locations = self.compute_locations(features) <NEW_LINE> if self.training: <NEW_LINE> <INDENT> return self._forward_train( locations, box_cls, box_regression, centerness, targets ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._forward_test( locations, box_cls, box_regression, centerness, images.image_sizes ) <NEW_LINE> <DEDENT> <DEDENT> def _forward_train(self, locations, box_cls, box_regression, centerness, targets): <NEW_LINE> <INDENT> loss_box_cls, loss_box_reg, loss_centerness = self.loss_evaluator( locations, box_cls, box_regression, centerness, targets ) <NEW_LINE> losses = { "loss_cls": loss_box_cls, "loss_reg": loss_box_reg, "loss_centerness": loss_centerness } <NEW_LINE> return None, losses <NEW_LINE> <DEDENT> def _forward_test(self, locations, box_cls, box_regression, centerness, image_sizes): <NEW_LINE> <INDENT> boxes = self.box_selector_test( locations, box_cls, box_regression, centerness, image_sizes ) <NEW_LINE> return boxes, {} <NEW_LINE> <DEDENT> def compute_locations(self, features): <NEW_LINE> <INDENT> locations = [] <NEW_LINE> for level, feature in enumerate(features): <NEW_LINE> <INDENT> h, w = feature.size()[-2:] <NEW_LINE> locations_per_level = self.compute_locations_per_level( h, w, self.fpn_strides[level], feature.device ) <NEW_LINE> locations.append(locations_per_level) <NEW_LINE> <DEDENT> return locations <NEW_LINE> <DEDENT> def compute_locations_per_level(self, h, w, stride, device): <NEW_LINE> <INDENT> shifts_x = torch.arange( 0, w * stride, step=stride, dtype=torch.float32, device=device ) <NEW_LINE> shifts_y = torch.arange( 0, h * stride, step=stride, dtype=torch.float32, device=device ) <NEW_LINE> shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) <NEW_LINE> shift_x = shift_x.reshape(-1) <NEW_LINE> shift_y = shift_y.reshape(-1) <NEW_LINE> locations = torch.stack((shift_x, shift_y), dim=1) + stride // 2 <NEW_LINE> return locations
Module for FCOS computation. Takes feature maps from the face_backbone and FCOS outputs and losses. Only Test on FPN now.
62598fc08a349b6b43686489
class ForwardingRuleMeta(object): <NEW_LINE> <INDENT> swagger_types = { 'id': 'str', 'alias': 'str' } <NEW_LINE> attribute_map = { 'id': 'id', 'alias': 'alias' } <NEW_LINE> def __init__(self, id=None, alias=None): <NEW_LINE> <INDENT> self._id = None <NEW_LINE> self._alias = None <NEW_LINE> self.discriminator = None <NEW_LINE> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> if alias is not None: <NEW_LINE> <INDENT> self.alias = alias <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> <DEDENT> @property <NEW_LINE> def alias(self): <NEW_LINE> <INDENT> return self._alias <NEW_LINE> <DEDENT> @alias.setter <NEW_LINE> def alias(self, alias): <NEW_LINE> <INDENT> self._alias = alias <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(ForwardingRuleMeta, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ForwardingRuleMeta): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fc0796e427e5384e9e0
class Feature(_messages.Message): <NEW_LINE> <INDENT> class TypeValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> TYPE_UNSPECIFIED = 0 <NEW_LINE> FACE_DETECTION = 1 <NEW_LINE> LANDMARK_DETECTION = 2 <NEW_LINE> LOGO_DETECTION = 3 <NEW_LINE> LABEL_DETECTION = 4 <NEW_LINE> TEXT_DETECTION = 5 <NEW_LINE> DOCUMENT_TEXT_DETECTION = 6 <NEW_LINE> SAFE_SEARCH_DETECTION = 7 <NEW_LINE> IMAGE_PROPERTIES = 8 <NEW_LINE> CROP_HINTS = 9 <NEW_LINE> WEB_DETECTION = 10 <NEW_LINE> <DEDENT> maxResults = _messages.IntegerField(1, variant=_messages.Variant.INT32) <NEW_LINE> model = _messages.StringField(2) <NEW_LINE> type = _messages.EnumField('TypeValueValuesEnum', 3)
The type of Google Cloud Vision API detection to perform, and the maximum number of results to return for that type. Multiple `Feature` objects can be specified in the `features` list. Enums: TypeValueValuesEnum: The feature type. Fields: maxResults: Maximum number of results of this type. Does not apply to `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`. model: Model to use for the feature. Supported values: "builtin/stable" (the default if unset) and "builtin/latest". type: The feature type.
62598fc07cff6e4e811b5c6f
class ActivationRequestFailed(Exception): <NEW_LINE> <INDENT> def __init__(self, msgs): <NEW_LINE> <INDENT> self.msgs = msgs
Raised when activation request fails, e.g. if email could not be sent
62598fc0d8ef3951e32c7f83
@add_start_docstrings( ROBERTA_START_DOCSTRING, ROBERTA_INPUTS_DOCSTRING, ) <NEW_LINE> class RobertaForSequenceClassification(BertPreTrainedModel): <NEW_LINE> <INDENT> config_class = RobertaConfig <NEW_LINE> pretrained_model_archive_map = ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAP <NEW_LINE> base_model_prefix = "roberta" <NEW_LINE> def __init__(self, config): <NEW_LINE> <INDENT> super().__init__(config) <NEW_LINE> self.num_labels = config.num_labels <NEW_LINE> self.roberta = RobertaModel(config) <NEW_LINE> self.classifier = RobertaClassificationHead(config) <NEW_LINE> <DEDENT> def forward( self, input_ids=None, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, inputs_embeds=None, labels=None, ): <NEW_LINE> <INDENT> outputs = self.roberta( input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, ) <NEW_LINE> sequence_output = outputs[0] <NEW_LINE> logits = self.classifier(sequence_output) <NEW_LINE> outputs = (logits,) + outputs[2:] <NEW_LINE> if labels is not None: <NEW_LINE> <INDENT> if self.num_labels == 1: <NEW_LINE> <INDENT> loss_fct = MSELoss() <NEW_LINE> loss = loss_fct(logits.view(-1), labels.view(-1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> loss_fct = CrossEntropyLoss() <NEW_LINE> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <NEW_LINE> <DEDENT> outputs = (loss,) + outputs <NEW_LINE> <DEDENT> return outputs
**labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``: Labels for computing the sequence classification/regression loss. Indices should be in ``[0, ..., config.num_labels]``. If ``config.num_labels == 1`` a regression loss is computed (Mean-Square loss), If ``config.num_labels > 1`` a classification loss is computed (Cross-Entropy). Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: Classification (or regression if config.num_labels==1) loss. **logits**: ``torch.FloatTensor`` of shape ``(batch_size, config.num_labels)`` Classification (or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) of shape ``(batch_size, sequence_length, hidden_size)``: Hidden-states of the model at the output of each layer plus the initial embedding outputs. **attentions**: (`optional`, returned when ``config.output_attentions=True``) list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. Examples:: tokenizer = RobertaTokenizer.from_pretrained('roberta-base') model = RobertaForSequenceClassification.from_pretrained('roberta-base') input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute", add_special_tokens=True)).unsqueeze(0) # Batch size 1 labels = torch.tensor([1]).unsqueeze(0) # Batch size 1 outputs = model(input_ids, labels=labels) loss, logits = outputs[:2]
62598fc0956e5f7376df57a4
class TestLineageElement(TestCase): <NEW_LINE> <INDENT> def testTaxid(self): <NEW_LINE> <INDENT> element = LE(245, 'no name', 'species') <NEW_LINE> self.assertEqual(245, element.taxid) <NEW_LINE> self.assertEqual(245, element[0]) <NEW_LINE> <DEDENT> def testName(self): <NEW_LINE> <INDENT> element = LE(245, 'no name', 'species') <NEW_LINE> self.assertEqual('no name', element.name) <NEW_LINE> self.assertEqual('no name', element[1]) <NEW_LINE> <DEDENT> def testRank(self): <NEW_LINE> <INDENT> element = LE(245, 'no name', 'species') <NEW_LINE> self.assertEqual('species', element.rank) <NEW_LINE> self.assertEqual('species', element[2])
Test the LineageElement named tuple.
62598fc0851cf427c66b8501
@export <NEW_LINE> class InterpolateAndExtrapolate: <NEW_LINE> <INDENT> def __init__(self, points, values, neighbours_to_use=None, array_valued=False): <NEW_LINE> <INDENT> self.kdtree = cKDTree(points) <NEW_LINE> self.values = values.astype(np.float) <NEW_LINE> if neighbours_to_use is None: <NEW_LINE> <INDENT> neighbours_to_use = points.shape[1] * 2 <NEW_LINE> <DEDENT> self.neighbours_to_use = neighbours_to_use <NEW_LINE> self.array_valued = array_valued <NEW_LINE> if array_valued: <NEW_LINE> <INDENT> self.n_dim = self.values.shape[-1] <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, points): <NEW_LINE> <INDENT> distances, indices = self.kdtree.query(points, self.neighbours_to_use) <NEW_LINE> result = np.ones(len(points)) * float('nan') <NEW_LINE> if self.array_valued: <NEW_LINE> <INDENT> result = np.repeat(result.reshape(-1, 1), self.n_dim, axis=1) <NEW_LINE> <DEDENT> valid = (distances < float('inf')).max(axis=-1) <NEW_LINE> values = self.values[indices[valid]] <NEW_LINE> weights = 1 / np.clip(distances[valid], 1e-6, float('inf')) <NEW_LINE> if self.array_valued: <NEW_LINE> <INDENT> weights = np.repeat(weights, self.n_dim).reshape(values.shape) <NEW_LINE> <DEDENT> result[valid] = np.average(values, weights=weights, axis=-2 if self.array_valued else -1) <NEW_LINE> return result
Linearly interpolate- and extrapolate using inverse-distance weighted averaging between nearby points.
62598fc03d592f4c4edbb108
class Skill: <NEW_LINE> <INDENT> def __init__(self, name, description, magic_points_cost): <NEW_LINE> <INDENT> self.name: str = name <NEW_LINE> self.description: str = description <NEW_LINE> self.magic_points_cost: mpf = magic_points_cost <NEW_LINE> self.level: int = 1 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> res: str = "" <NEW_LINE> res += "Name: " + str(self.name) + "\n" <NEW_LINE> res += "Description: " + str(self.description) + "\n" <NEW_LINE> res += "Magic Points Cost: " + str(self.magic_points_cost) + "\n" <NEW_LINE> res += "Level: " + str(self.level) + "\n" <NEW_LINE> return res <NEW_LINE> <DEDENT> def level_up(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> return copy.deepcopy(self)
This class contains attributes of a skill legendary creatures have.
62598fc092d797404e388c88
class spectrometer_display(gr.sync_block): <NEW_LINE> <INDENT> def __init__(self, vec_length, integration, collect): <NEW_LINE> <INDENT> gr.sync_block.__init__(self, name="spectrometer_display", in_sig=[(np.float32, int(vec_length))], out_sig=[(np.float32, int(vec_length))]) <NEW_LINE> self.vec_length = int(vec_length) <NEW_LINE> self.integration = integration <NEW_LINE> self.collect = collect <NEW_LINE> self.hot = 1 <NEW_LINE> self.cold = 0 <NEW_LINE> <DEDENT> def work(self, input_items, output_items): <NEW_LINE> <INDENT> in0 = input_items[0] <NEW_LINE> out = output_items[0] <NEW_LINE> print(np.shape(in0)) <NEW_LINE> if self.collect == "cal": <NEW_LINE> <INDENT> out[:] = in0*(self.hot)/(self.cold+5) <NEW_LINE> <DEDENT> elif self.collect == "hot": <NEW_LINE> <INDENT> out[:] = 5 <NEW_LINE> self.hot = 5 <NEW_LINE> <DEDENT> elif self.collect == "cold": <NEW_LINE> <INDENT> out[:] = 2 <NEW_LINE> self.cold = 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out[:] = in0 <NEW_LINE> <DEDENT> return len(output_items[0]) <NEW_LINE> <DEDENT> def set_parameters(self, integration, collect): <NEW_LINE> <INDENT> self.integration = integration <NEW_LINE> self.collect = collect
Spectrometer Display. A single vector stream comes into the block from the spectrometer. In: Data stream of spectra Several vectors are output: Out: Latest Spectrum - either raw or with calibration, depending on user's choice. The output streams have different possible calibration units????. Counts (linear) Counts (db) Kelvins Parameters are 1) Vector length in Channels 2) Notes about observation 3) Integration Time 4) 9) Brightness units; one of [Counts (linear), Counts(dB), Kelvins]
62598fc00fa83653e46f5131
class TestReadWriteExample(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.filename = 'export_test_0x123' <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> os.remove(self.filename) <NEW_LINE> <DEDENT> def test_read_and_write_file(self): <NEW_LINE> <INDENT> this_path = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> mm_path = os.path.join(this_path, '../docs/input.mm') <NEW_LINE> mind_map = pymm.read(mm_path) <NEW_LINE> self.assertTrue(mind_map) <NEW_LINE> self.assertTrue(mind_map.root) <NEW_LINE> pymm.write(self.filename, mind_map) <NEW_LINE> self.verify_conversion_traces_match() <NEW_LINE> <DEDENT> def verify_conversion_traces_match(self): <NEW_LINE> <INDENT> encode_trace = pymm.factory.ConversionHandler.last_encode <NEW_LINE> decode_trace = pymm.factory.ConversionHandler.last_decode <NEW_LINE> encoded = (factory.decoding_element for factory in encode_trace) <NEW_LINE> decoded = (factory.decoding_element for factory in decode_trace) <NEW_LINE> encode_count = collections.Counter(encoded) <NEW_LINE> decode_count = collections.Counter(decoded) <NEW_LINE> self.assertTrue(encode_count == decode_count) <NEW_LINE> <DEDENT> def test_write_file(self): <NEW_LINE> <INDENT> mind_map = Mindmap() <NEW_LINE> pymm.write(self.filename, mind_map)
Test full import and export functionality
62598fc05fdd1c0f98e5e1de
class LinksForm(FlaskForm): <NEW_LINE> <INDENT> links = TextAreaField(label="Links to download images from:", validators=[DataRequired()]) <NEW_LINE> submit = SubmitField(label="Submit")
Form for image downloader page
62598fc0442bda511e95c6ab
class Elipsoide(Page): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Page.__init__(self, u"Elipsoide<br><br>x<sup>2</sup>/a<sup>2</sup> + y<sup>2</sup>/b<sup>2</sup> + z<sup>2</sup>/c<sup>2</sup> = 1") <NEW_LINE> param = lambda u,v: (cos(u)*cos(v), 1.5*cos(v)*sin(u), 2*sin(v)) <NEW_LINE> elipsoide = ParametricPlot3D(param, (-pi, pi), (-pi/2,pi/2)) <NEW_LINE> col = _1(84,129,121) <NEW_LINE> elipsoide.setAmbientColor(col).setDiffuseColor(col).setSpecularColor(col) <NEW_LINE> par1 = lambda u,v: Vec3(-sin(u)*cos(v), 1.5*cos(u)*cos(v), 0) <NEW_LINE> par2 = lambda u,v: Vec3(-cos(u)*sin(v), -1.5*sin(u)*sin(v), 2*cos(v)) <NEW_LINE> tp = TangentPlane2(param,par1,par2,(0,0),_1(252,250,225)) <NEW_LINE> self.addChild(elipsoide) <NEW_LINE> self.addChild(tp) <NEW_LINE> Slider(rangep=('u', -pi,pi,0,20),func=tp.setU, duration=8000, parent=self) <NEW_LINE> Slider(rangep=('v', -pi/2,pi/2,0,20),func=tp.setV, duration=8000, parent=self)
El <b>plano tangente</b> a una superficie diferenciable <b>M<sup>2</sup></b> en uno de sus puntos <b>p</b>, <b>T<sub>P</sub>M</b>, consta de <b>TODOS</b> los vectores tangentes en <b>p</b> a curvas en la superficie que pasan por <b>p</b>.<br><br> En un elipsoide, el plano <b>T<sub>p</sub>M</b> deja a toda una vecindad del punto <b>p</b> en uno de los semiespacios que separa. <b>T<sub>p</sub>M</b> sólo toca al elipsoide en <b>p</b> y por eso los puntos del elipsoide se llaman <b>elípticos</b>. En la posición inicial del punto <b>p</b>, la interacción mueve <b>T<sub>p</sub>M</b> a lo largo de curvas en las <b>direcciones principales</b> (ver el capítulo "Curvatura y secciones normales").
62598fc04a966d76dd5ef121
class CollapseExtension(markdown.Extension): <NEW_LINE> <INDENT> def extendMarkdown(self, md, md_globals): <NEW_LINE> <INDENT> md.registerExtension(self) <NEW_LINE> md.parser.blockprocessors.add( 'collapse', CollapseProcessor(md.parser), '_begin')
Collapse extension for Python-Markdown.
62598fc0099cdd3c63675508
class AccTypeField(Field): <NEW_LINE> <INDENT> def to_internal_value(self, data: str) -> AccTypeEnum: <NEW_LINE> <INDENT> if not isinstance(data, str): <NEW_LINE> <INDENT> raise ValidationError("Expected a string") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return AccTypeEnum(data.lower().capitalize()) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValidationError(f"Unkown account type {data}") <NEW_LINE> <DEDENT> <DEDENT> def to_representation(self, value: AccTypeEnum) -> str: <NEW_LINE> <INDENT> return value.value
Transforms a string into a value in AccTypeEnum
62598fc0ec188e330fdf8ae0
class ServiceCommandFailedException(ShellException): <NEW_LINE> <INDENT> @property <NEW_LINE> def status_code(self): <NEW_LINE> <INDENT> return 500 <NEW_LINE> <DEDENT> def __init__(self, status): <NEW_LINE> <INDENT> self.status = status <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Service command failed with status {0}".format(self.status)
Should be raised when one of the service command fails
62598fc0cc40096d6161a2ff
class Aligner(object): <NEW_LINE> <INDENT> def __init__(self, model_path): <NEW_LINE> <INDENT> if model_path is None: <NEW_LINE> <INDENT> model_path = '../model/seeta_fa_v1.1.bin' <NEW_LINE> <DEDENT> assert os.path.isfile(model_path) is True, 'No such file' <NEW_LINE> byte_model_path = model_path.encode('utf-8') <NEW_LINE> self.aligner = align_lib.get_face_aligner(byte_model_path) <NEW_LINE> <DEDENT> def align(self, image=None, face=None): <NEW_LINE> <INDENT> assert image is not None, 'Image cannot be None' <NEW_LINE> assert face is not None, 'Face cannot be None' <NEW_LINE> if not isinstance(image, np.ndarray): <NEW_LINE> <INDENT> image = np.array(image) <NEW_LINE> <DEDENT> assert len(image.shape) is 2, 'Input is not a gray scale image!' <NEW_LINE> image_data = _Image() <NEW_LINE> image_data.height, image_data.width = image.shape <NEW_LINE> image_data.channels = 1 <NEW_LINE> image_data.data = image.ctypes.data <NEW_LINE> face_data = _Face() <NEW_LINE> face_data.left = face.left <NEW_LINE> face_data.top = face.top <NEW_LINE> face_data.right = face.right <NEW_LINE> face_data.bottom = face.bottom <NEW_LINE> face_data.score = face.score <NEW_LINE> marks_data = align_lib.align(self.aligner, byref(image_data), byref(face_data)) <NEW_LINE> landmarks = [(marks_data.contents.x[i], marks_data.contents.y[i]) for i in range(5)] <NEW_LINE> align_lib.free_landmarks(marks_data) <NEW_LINE> return landmarks <NEW_LINE> <DEDENT> def release(self): <NEW_LINE> <INDENT> align_lib.free_aligner(self.aligner)
Class for Face Alignment
62598fc0656771135c4898bc
class CommandTask(Task): <NEW_LINE> <INDENT> def __init__(self, command, data=None, *args, **kwargs): <NEW_LINE> <INDENT> Task.__init__(self, *args, **kwargs) <NEW_LINE> self.command = command <NEW_LINE> self.data = data <NEW_LINE> <DEDENT> def handle_worker(self, worker): <NEW_LINE> <INDENT> logging.debug("SEND_COMMAND: ", self.command, self.data) <NEW_LINE> worker.send_command(self.command, self.data)
A task to run the given command
62598fc099fddb7c1ca62f13
class StocksWatchers(models.Model): <NEW_LINE> <INDENT> pass
Design for watching specific stocks
62598fc0aad79263cf42ea22
class VersionConfig(TemplateView): <NEW_LINE> <INDENT> permission_required = 'config.can_manage_config' <NEW_LINE> template_name = 'config/version.html' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(VersionConfig, self).get_context_data(**kwargs) <NEW_LINE> context['versions'] = [('OpenSlides', get_version())] <NEW_LINE> for plugin in settings.INSTALLED_PLUGINS: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> mod = import_module(plugin) <NEW_LINE> plugin_version = get_version(mod.VERSION) <NEW_LINE> <DEDENT> except (ImportError, AttributeError, AssertionError): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> plugin_name = mod.NAME <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> plugin_name = mod.__name__.split('.')[0] <NEW_LINE> <DEDENT> context['versions'].append((plugin_name, plugin_version)) <NEW_LINE> <DEDENT> return context
Show version infos.
62598fc023849d37ff851301
class FriendlyLink(AbstractModel): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = _(u"系统-友情链接列表") <NEW_LINE> verbose_name = _(u"友情链接") <NEW_LINE> <DEDENT> name = models.CharField(_(u'名称'), max_length=100) <NEW_LINE> web_site = models.URLField(_(u'网站地址'), max_length=200) <NEW_LINE> is_active = models.BooleanField(_(u'是否激活'), default=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
Friend Link Model
62598fc07d847024c075c60a
class GenreRetrieveUpdate(generics.RetrieveUpdateAPIView): <NEW_LINE> <INDENT> queryset = Genre.objects.all() <NEW_LINE> serializer_class = GenreSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticated, )
Retrieve and update genre information
62598fc05fdd1c0f98e5e1e1
class ClouderBase(models.Model): <NEW_LINE> <INDENT> _inherit = 'clouder.base' <NEW_LINE> @property <NEW_LINE> def magento_port(self): <NEW_LINE> <INDENT> return self.service_id.childs['exec'] and self.service_id.childs['exec'].ports['web']['hostport'] <NEW_LINE> <DEDENT> @api.multi <NEW_LINE> def deploy_database(self): <NEW_LINE> <INDENT> if self.application_id.type_id.name == 'magento': <NEW_LINE> <INDENT> if self.build == 'build': <NEW_LINE> <INDENT> dbname = self.service_id.name.replace('-', '_') <NEW_LINE> self.service_id.database.execute([ "mysql -u root -p'" + self.service_id.database.root_password + "' -se \"create database " + dbname + ";\"" ]) <NEW_LINE> self.service_id.database.execute([ "mysql -u root -p'" + self.service_id.database.root_password + "' -se \"create user '" + self.service_id.db_user + "'@'%' IDENTIFIED BY '" + self.service_id.childs['data'] .options['db_password']['value'] + "';\"" ]) <NEW_LINE> self.service_id.database.execute([ "mysql -u root -p'" + self.service_id.database.root_password + "' -se \"grant all on " + dbname + ".* to '" + self.service_id.db_user + "';\"" ]) <NEW_LINE> self.service_id.database.execute([ "mysql -u root -p'" + self.service_id.database.root_password + "' -se \"FLUSH PRIVILEGES;\"" ]) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return super(ClouderBase, self).deploy_database()
Add methods to manage the magento base specificities.
62598fc0656771135c4898be
class ImageClass(object): <NEW_LINE> <INDENT> def __init__(self, name, image_paths): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.image_paths = image_paths <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name + ', ' + str(len(self.image_paths)) + ' images' <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.image_paths)
Stores the paths to images for a given class
62598fc0a219f33f346c6a56
class HistogramPitchSpace(Histogram): <NEW_LINE> <INDENT> axesClasses = _mergeDicts(Histogram.axesClasses, {'x': axis.PitchSpaceAxis}) <NEW_LINE> def __init__(self, streamObj=None, *args, **keywords): <NEW_LINE> <INDENT> super().__init__(streamObj, *args, **keywords) <NEW_LINE> self.axisX.showEnharmonic = False <NEW_LINE> if 'figureSize' not in keywords: <NEW_LINE> <INDENT> self.figureSize = (10, 6) <NEW_LINE> <DEDENT> if 'title' not in keywords: <NEW_LINE> <INDENT> self.title = 'Pitch Histogram'
A histogram of pitch space. >>> s = corpus.parse('bach/bwv324.xml') #_DOCS_HIDE >>> p = graph.plot.HistogramPitchSpace(s, doneAction=None) #_DOCS_HIDE >>> #_DOCS_SHOW s = corpus.parse('bach/bwv57.8') >>> #_DOCS_SHOW p = graph.plot.HistogramPitchSpace(s) >>> p.id 'histogram-pitchSpace-count' >>> p.run() # with defaults and proper configuration, will open graph .. image:: images/HistogramPitchSpace.* :width: 600
62598fc07cff6e4e811b5c73
class EncodedGeometryApiField(ApiField): <NEW_LINE> <INDENT> dehydrated_type = 'geometry' <NEW_LINE> help_text = 'Geometry data.' <NEW_LINE> def dehydrate(self, obj): <NEW_LINE> <INDENT> return self.convert(super(EncodedGeometryApiField, self).dehydrate(obj)) <NEW_LINE> <DEDENT> def convert(self, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(value, dict): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> encoder = cgpolyencode.GPolyEncoder() <NEW_LINE> geom = GEOSGeometry(value) <NEW_LINE> geom_encoded = {} <NEW_LINE> for key, part in enumerate(geom): <NEW_LINE> <INDENT> geom_encoded[key] = encoder.encode(part[0]) <NEW_LINE> <DEDENT> return geom_encoded
Custom ApiField for dealing with data from GeometryFields (by serializing them as Encoded Geometries).
62598fc0dc8b845886d5380a
class Delete(base.DeleteCommand): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> flags.AddDeviceResourceFlags(parser, 'from which to delete credentials', positional=False) <NEW_LINE> flags.GetIndexFlag('credential', 'to delete').AddToParser(parser) <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> client = devices.DevicesClient() <NEW_LINE> device_ref = util.ParseDevice( args.device, registry=args.registry, region=args.region) <NEW_LINE> credentials = client.Get(device_ref).credentials <NEW_LINE> try: <NEW_LINE> <INDENT> credential = credentials.pop(args.index) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise util.BadCredentialIndexError(device_ref.Name(), credentials, args.index) <NEW_LINE> <DEDENT> console_io.PromptContinue( message='This will delete the following credential: {}'.format( credential), cancel_on_no=True) <NEW_LINE> return client.Patch(device_ref, credentials=credentials)
Delete a credential from a device.
62598fc03d592f4c4edbb10d
class RedisStore(TokenStore): <NEW_LINE> <INDENT> def __init__(self, redis_url: Optional[str] = None): <NEW_LINE> <INDENT> import redis <NEW_LINE> if redis_url: <NEW_LINE> <INDENT> self.client = redis.Redis.from_url(redis_url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.client = redis.Redis() <NEW_LINE> <DEDENT> <DEDENT> def set(self, key: str, value: str, expire: float = FEISHU_TOKEN_EXPIRE_TIME): <NEW_LINE> <INDENT> expire -= FEISHU_TOKEN_UPDATE_TIME <NEW_LINE> self.client.setex(key, value, expire) <NEW_LINE> <DEDENT> def get(self, key: str): <NEW_LINE> <INDENT> self.client.get(key)
Redis存储
62598fc03346ee7daa337770
class ProblemaNoAcotado(Exception): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "El problema es no acotado."
Esta clase sirve para menejar el caso en el que el problema es no acotado.
62598fc092d797404e388c8a
class ListAssignmentsPager: <NEW_LINE> <INDENT> def __init__( self, method: Callable[..., reservation.ListAssignmentsResponse], request: reservation.ListAssignmentsRequest, response: reservation.ListAssignmentsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): <NEW_LINE> <INDENT> self._method = method <NEW_LINE> self._request = reservation.ListAssignmentsRequest(request) <NEW_LINE> self._response = response <NEW_LINE> self._metadata = metadata <NEW_LINE> <DEDENT> def __getattr__(self, name: str) -> Any: <NEW_LINE> <INDENT> return getattr(self._response, name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def pages(self) -> Iterator[reservation.ListAssignmentsResponse]: <NEW_LINE> <INDENT> yield self._response <NEW_LINE> while self._response.next_page_token: <NEW_LINE> <INDENT> self._request.page_token = self._response.next_page_token <NEW_LINE> self._response = self._method(self._request, metadata=self._metadata) <NEW_LINE> yield self._response <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self) -> Iterator[reservation.Assignment]: <NEW_LINE> <INDENT> for page in self.pages: <NEW_LINE> <INDENT> yield from page.assignments <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
A pager for iterating through ``list_assignments`` requests. This class thinly wraps an initial :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` object, and provides an ``__iter__`` method to iterate through its ``assignments`` field. If there are more pages, the ``__iter__`` method will make additional ``ListAssignments`` requests and continue to iterate through the ``assignments`` field on the corresponding responses. All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup.
62598fc097e22403b383b159
class Tag(QObject, object): <NEW_LINE> <INDENT> value_changed = pyqtSignal() <NEW_LINE> def __init__( self, name: str, address: int, plc_datatype: int = None, datatype: type = float ) -> None: <NEW_LINE> <INDENT> super(Tag, self).__init__() <NEW_LINE> self.name = name <NEW_LINE> self.address = address <NEW_LINE> self.datatype = datatype <NEW_LINE> self.dirty = False <NEW_LINE> self.plc_datatype = plc_datatype <NEW_LINE> self._raw_value: Any = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self) -> Any: <NEW_LINE> <INDENT> if self.raw_value is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return self.datatype(self._raw_value) <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value: Any) -> None: <NEW_LINE> <INDENT> self._raw_value = value <NEW_LINE> self.dirty = True <NEW_LINE> <DEDENT> @property <NEW_LINE> def raw_value(self) -> Any: <NEW_LINE> <INDENT> return self._raw_value <NEW_LINE> <DEDENT> @raw_value.setter <NEW_LINE> def raw_value(self, value: Any) -> None: <NEW_LINE> <INDENT> self._raw_value = value <NEW_LINE> self.value_changed.emit()
An instance of Tag represents a buffered connection between GUI and PLC. :type name: str :ivar name: tag name :ivar address: PLC memory address :type datatype: type :ivar datatype: python datatype :type dirty: bool :ivar dirty: is set if the tag value has been changed :ivar plc_datatype: identifier for the datatype the data is stored in the PLC :ivar raw_value: the raw PLC value
62598fc07047854f4633f624