code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Token: <NEW_LINE> <INDENT> _well_formed = re.compile('^[a-z0-9-]+$') <NEW_LINE> def __init__(self, token=None, data=True): <NEW_LINE> <INDENT> if token is None: <NEW_LINE> <INDENT> token = str(uuid.uuid4()) <NEW_LINE> <DEDENT> self.token = token <NEW_LINE> self.data = data <NEW_LINE> <DEDENT> def cache_key(self): <NEW_LINE> <INDENT> assert self.token, 'No token value set.' <NEW_LINE> return '%s:token:%s' % (settings.CACHE_PREFIX, self.token) <NEW_LINE> <DEDENT> def save(self, time=60): <NEW_LINE> <INDENT> cache.set(self.cache_key(), self.data, time) <NEW_LINE> <DEDENT> def well_formed(self): <NEW_LINE> <INDENT> return self._well_formed.match(self.token) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def valid(cls, key, data=True): <NEW_LINE> <INDENT> token = cls(key) <NEW_LINE> if not token.well_formed(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> result = cache.get(token.cache_key()) <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> return result == data <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def pop(cls, key, data=True): <NEW_LINE> <INDENT> token = cls(key) <NEW_LINE> if not token.well_formed(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> result = cache.get(token.cache_key()) <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> if result == data: <NEW_LINE> <INDENT> cache.delete(token.cache_key()) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return False
A simple token stored in the cache.
6259900dbf627c535bcb20bc
class CloserServerRaet(deeding.ParamDeed): <NEW_LINE> <INDENT> Ioinits = odict( connection=odict(ipath='connection', ival=None)) <NEW_LINE> def action(self, connection, **kwa): <NEW_LINE> <INDENT> if connection.value: <NEW_LINE> <INDENT> connection.value.close() <NEW_LINE> <DEDENT> return None
CloserServerRaet closes server socket connection inherited attributes .name is actor name string .store is data store ref .ioinits is dict of io init data for initio ._parametric is flag for initio to not create attributes
6259900d5166f23b2e243fe2
class CreateProbe(ProbeCommand): <NEW_LINE> <INDENT> log = logging.getLogger(__name__ + '.CreateProbe') <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(CreateProbe, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'id', metavar='network_id', help=_('ID of network to probe')) <NEW_LINE> parser.add_argument( '--device-owner', default='network', choices=['network', 'compute'], help=_('Owner type of the device: network/compute')) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def run(self, parsed_args): <NEW_LINE> <INDENT> self.log.debug('run(%s)' % parsed_args) <NEW_LINE> debug_agent = self.get_debug_agent() <NEW_LINE> port = debug_agent.create_probe(parsed_args.id, parsed_args.device_owner) <NEW_LINE> self.log.info(_('Probe created : %s '), port.id)
Create probe port and interface, then plug it in.
6259900d507cdc57c63a59b0
class UnicodeMixin(object): <NEW_LINE> <INDENT> if six.PY3: <NEW_LINE> <INDENT> __str__ = lambda x: x.__unicode__() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> __str__ = lambda x: unicode(x).encode('utf-8')
Python 2 and 3 string representation support.
6259900dbf627c535bcb20c2
class LIP(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def nak(message): <NEW_LINE> <INDENT> response = Message("ACK") <NEW_LINE> response.MSH.MSH_9 = "ACK" <NEW_LINE> response.MSA.MSA_1 = "AE" <NEW_LINE> response.MSA.MSA_2 = message.MSH.MSH_10 <NEW_LINE> response.MSA.MSA_3 = "Message type not supported" <NEW_LINE> return response <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def reply(message): <NEW_LINE> <INDENT> print("Received by LIP", repr(message)) <NEW_LINE> try: <NEW_LINE> <INDENT> m = parse_message(message, find_groups=False) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print('parsing failed', repr(message)) <NEW_LINE> response = LIP.nak() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Message type:", m.MSH.message_type.to_er7()) <NEW_LINE> print("Message content:", repr(m.to_er7())) <NEW_LINE> if m.MSH.MSH_9.MSH_9_3.to_er7() == 'QBP_Q11': <NEW_LINE> <INDENT> response = Message("RSP_K11") <NEW_LINE> response.MSH.MSH_9 = "RSP^K11^RSP_K11" <NEW_LINE> response.MSA = "MSA|AA" <NEW_LINE> response.MSA.MSA_2 = m.MSH.MSH_10 <NEW_LINE> qak = Segment("QAK") <NEW_LINE> qak.qak_1 = m.QPD.QPD_2 <NEW_LINE> qak.qak_2 = "OK" <NEW_LINE> qak.qak_3 = "Q22^Specimen Labeling Instructions^IHE_LABTF" <NEW_LINE> qak.qak_4 = "1" <NEW_LINE> response.add(qak) <NEW_LINE> response.QPD = m.QPD <NEW_LINE> response.PID.PID_1 = '1' <NEW_LINE> response.PID.PID_5.PID_5_1 = 'PATIENT_SURNAME' <NEW_LINE> response.PID.PID_5.PID_5_2 = 'PATIENT_NAME' <NEW_LINE> response.PID.PID_6 = "19800101" <NEW_LINE> response.PID.PID_7 = "F" <NEW_LINE> spm = Segment("SPM") <NEW_LINE> obr = Segment("OBR") <NEW_LINE> spm.SPM_1 = '1' <NEW_LINE> spm.SPM_2 = "12345" <NEW_LINE> obr.OBR_4 = "ORDER^DESCRIPTION" <NEW_LINE> response.add(spm) <NEW_LINE> response.add(obr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = LIP.nak(m) <NEW_LINE> <DEDENT> <DEDENT> return response.to_mllp()
Label information provider implementation
6259900d925a0f43d25e8c53
class Gauge(AbstractMetric): <NEW_LINE> <INDENT> def __init__(self, name, value_type, fields=(), docstring=None, units=None): <NEW_LINE> <INDENT> super().__init__( rdf_stats.MetricMetadata( varname=name, metric_type=rdf_stats.MetricMetadata.MetricType.GAUGE, value_type=stats_utils.MetricValueTypeFromPythonType(value_type), fields_defs=stats_utils.FieldDefinitionProtosFromTuples(fields), docstring=docstring, units=units)) <NEW_LINE> <DEDENT> def SetValue(self, value, fields=None): <NEW_LINE> <INDENT> stats_collector_instance.Get().SetGaugeValue( self.name, value, fields=fields) <NEW_LINE> <DEDENT> def SetCallback(self, callback, fields=None): <NEW_LINE> <INDENT> stats_collector_instance.Get().SetGaugeCallback( self.name, callback, fields=fields)
A Gauge metric that can be set to a value. Refer to default_stats_collector._GaugeMetric and DefaultStatsCollector to see how StatsCollector handles the field definitions and values.
6259900d21a7993f00c66b90
class Article2Tag(models.Model): <NEW_LINE> <INDENT> nid = models.AutoField(primary_key=True) <NEW_LINE> article = models.ForeignKey(verbose_name='文章',to='Article',to_field='nid',on_delete=models.CASCADE) <NEW_LINE> tag = models.ForeignKey(verbose_name='标签',to='Tag',to_field='nid',on_delete=models.CASCADE) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = [ ('article','tag'), ] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> v = self.article.title + '---' + self.tag.title <NEW_LINE> return v
文章和标签多对多表
6259900d21a7993f00c66b92
class BatchDeleteVideosRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(BatchDeleteVideosRequest, self).__init__( '/videos:batchDelete', 'POST', header, version) <NEW_LINE> self.parameters = parameters
批量删除视频,调用该接口会同时删除与指定视频相关的所有信息,包括转码任务信息、转码流数据等,同时清除云存储中相关文件资源。
6259900d15fb5d323ce7f959
class Block(Base): <NEW_LINE> <INDENT> def __init__(self, name, *sections, **options): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.sections = AttrList(self) <NEW_LINE> self.options = AttrDict(self) <NEW_LINE> self._set_directives(*sections, **options) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _directives(self): <NEW_LINE> <INDENT> dirs = self._dump_options() + list(self.sections) <NEW_LINE> return [directive for directive in dirs if directive is not self] <NEW_LINE> <DEDENT> def _set_directives(self, *sections, **options): <NEW_LINE> <INDENT> for section in sections: <NEW_LINE> <INDENT> self.sections.append(section) <NEW_LINE> <DEDENT> for key, value in six.iteritems(options): <NEW_LINE> <INDENT> setattr(self.options, key, value) <NEW_LINE> <DEDENT> <DEDENT> def _build_options(self, key, value): <NEW_LINE> <INDENT> if isinstance(value, Block): <NEW_LINE> <INDENT> option = value <NEW_LINE> <DEDENT> elif isinstance(value, list): <NEW_LINE> <INDENT> option = KeyMultiValueOption(key, value=value) <NEW_LINE> <DEDENT> elif value is None or value == '': <NEW_LINE> <INDENT> option = KeyOption(key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(value, str) and ' ' in value: <NEW_LINE> <INDENT> option = KeyMultiValueOption(key, value=value.split()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> option = KeyValueOption(key, value=value) <NEW_LINE> <DEDENT> <DEDENT> return option <NEW_LINE> <DEDENT> def _dump_options(self): <NEW_LINE> <INDENT> return [self._build_options(key, value) for key, value in six.iteritems(self.options)] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> directives = self._directives <NEW_LINE> for directive in directives: <NEW_LINE> <INDENT> if directive is not self: <NEW_LINE> <INDENT> directive._indent_level = self._indent_level + 1 <NEW_LINE> <DEDENT> <DEDENT> return '\n{indent}{name}{{{directives}\n{indent}}}'.format( name='{0} '.format(self.name), directives=''.join([repr(e) for e in directives]), indent=self._get_indent(), )
A block represent a named section of an Nginx config, such as 'http', 'server' or 'location' Using this object is as simple as providing a name and any sections or options, which can be other Block objects or option objects. Example:: >>> from nginx.config.api import Block >>> http = Block('http', option='value') >>> print(http) http { option value; }
6259900d507cdc57c63a59ba
class TChannelThriftModule(types.ModuleType): <NEW_LINE> <INDENT> def __init__(self, service, module, hostport=None): <NEW_LINE> <INDENT> self.service = service <NEW_LINE> self.module = module <NEW_LINE> self.hostport = hostport <NEW_LINE> for service_cls in self.module.services: <NEW_LINE> <INDENT> name = service_cls.service_spec.name <NEW_LINE> setattr(self, name, Service(service_cls, self)) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.module, name) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'TChannelThriftModule(%s, %s)' % (self.service, self.module) <NEW_LINE> <DEDENT> __repr__ = __str__
Wraps the ``thriftrw``-generated module. Wraps service classes with ``Service`` and exposes everything else from the module as-is.
6259900d21a7993f00c66b96
class DGTiling(BasicTilingBottomLeft): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(DGTiling, self).__init__(-180, -90, 180, 270) <NEW_LINE> self.bounds = CoordsBbox(-180., -90., 180., 90.) <NEW_LINE> <DEDENT> def children(self, *tile): <NEW_LINE> <INDENT> if len(tile) == 1: <NEW_LINE> <INDENT> tile = tile[0] <NEW_LINE> <DEDENT> x, y, z = tile <NEW_LINE> if z == 0: <NEW_LINE> <INDENT> return [Tile(0, 0, 1), Tile(1, 0, 1)] <NEW_LINE> <DEDENT> return super(DGTiling, self).children(x, y, z)
Tiler for the DG tiling scheme. The DG tiling scheme is a subdivision of the WGS84 ellipsoid. Long/lat coordinates are directly mapped to the rectange [-180, 180] and [-90, 90] in this scheme. In practice, level 0 is a square whose latitude goes from -90 to 270, so half of this square is undefined! Because of this, the tiling really starts at level 0, with the bottom two tiles being valid. The children method handles this oddity for you.
6259900d925a0f43d25e8c5b
class RegistrationForm(FlaskForm): <NEW_LINE> <INDENT> email = StringField('Email', validators=[DataRequired(), Email()]) <NEW_LINE> username = StringField('Username', validators=[DataRequired()]) <NEW_LINE> first_name = StringField('First Name', validators=[DataRequired()]) <NEW_LINE> last_name = StringField('Last Name', validators=[DataRequired()]) <NEW_LINE> phone_number = StringField('Phone Number (Optional)') <NEW_LINE> password = PasswordField('Password', validators=[DataRequired(), EqualTo('confirm_password')]) <NEW_LINE> confirm_password = PasswordField('Confirm Password') <NEW_LINE> submit = SubmitField('Register') <NEW_LINE> def validate_email(self, field): <NEW_LINE> <INDENT> if User.query.filter_by(email=field.data).first(): <NEW_LINE> <INDENT> raise ValidationError('Email is already in use.') <NEW_LINE> <DEDENT> <DEDENT> def validate_username(self, field): <NEW_LINE> <INDENT> if User.query.filter_by(username=field.data).first(): <NEW_LINE> <INDENT> raise ValidationError('Username is already in use.')
Form for users to create new account
6259900d3cc13d1c6d466365
class Vault(): <NEW_LINE> <INDENT> def __init__(self, password): <NEW_LINE> <INDENT> self.password = password <NEW_LINE> pass_bytes = to_bytes(password, encoding='utf-8', errors='strict') <NEW_LINE> secrets = [('password', VaultSecret(_bytes=pass_bytes))] <NEW_LINE> self.vault = VaultLib(secrets=secrets) <NEW_LINE> <DEDENT> def dump(self, data, stream=None): <NEW_LINE> <INDENT> encrypted = self.vault.encrypt(data) <NEW_LINE> if stream: <NEW_LINE> <INDENT> stream.write(encrypted) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return encrypted <NEW_LINE> <DEDENT> <DEDENT> def load(self, stream): <NEW_LINE> <INDENT> return self.vault.decrypt(stream) <NEW_LINE> <DEDENT> def dump_as_yaml(self, obj, stream=None): <NEW_LINE> <INDENT> data = yaml.dump(obj, allow_unicode=True, default_flow_style=False, Dumper=AnsibleDumper) <NEW_LINE> return self.dump(data, stream) <NEW_LINE> <DEDENT> def dump_as_yaml_to_tempfile(self, obj): <NEW_LINE> <INDENT> with tempfile.NamedTemporaryFile(delete=False, suffix='.yaml') as data_temp: <NEW_LINE> <INDENT> self.dump_as_yaml(obj, data_temp) <NEW_LINE> <DEDENT> data_temp.close() <NEW_LINE> return data_temp.name
Read and write data using the Ansible vault.
6259900d5166f23b2e243ff0
class Update(LoginRequiredMixin, StaticContextMixin, generic.UpdateView): <NEW_LINE> <INDENT> form_class, model = forms.Company, models.Company <NEW_LINE> template_name = 'inventory/form.html' <NEW_LINE> static_context = { 'page_title': 'Edit company:', } <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> context['href_cancel'] = self.object.get_absolute_url() <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> return self.request.user.setting_set.get().companies <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> messages.success(self.request, 'Changes Saved!') <NEW_LINE> return super().form_valid(form)
Edit a Company.
6259900d507cdc57c63a59c0
class PythonScript(Expression): <NEW_LINE> <INDENT> pass
REPRESENT A Python SCRIPT
6259900d627d3e7fe0e07abc
class AddDomainTypeAssignmentRuleRequest(ModelNormal): <NEW_LINE> <INDENT> allowed_values = { } <NEW_LINE> validations = { } <NEW_LINE> additional_properties_type = None <NEW_LINE> _nullable = False <NEW_LINE> @cached_property <NEW_LINE> def openapi_types(): <NEW_LINE> <INDENT> return { 'domain_type_id': (str,), 'community_id': (str,), } <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def discriminator(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> attribute_map = { 'domain_type_id': 'domainTypeId', 'community_id': 'communityId', } <NEW_LINE> _composed_schemas = {} <NEW_LINE> required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) <NEW_LINE> @convert_js_args_to_python_args <NEW_LINE> def __init__(self, domain_type_id, *args, **kwargs): <NEW_LINE> <INDENT> _check_type = kwargs.pop('_check_type', True) <NEW_LINE> _spec_property_naming = kwargs.pop('_spec_property_naming', False) <NEW_LINE> _path_to_item = kwargs.pop('_path_to_item', ()) <NEW_LINE> _configuration = kwargs.pop('_configuration', None) <NEW_LINE> _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) <NEW_LINE> if args: <NEW_LINE> <INDENT> raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) <NEW_LINE> <DEDENT> self._data_store = {} <NEW_LINE> self._check_type = _check_type <NEW_LINE> self._spec_property_naming = _spec_property_naming <NEW_LINE> self._path_to_item = _path_to_item <NEW_LINE> self._configuration = _configuration <NEW_LINE> self._visited_composed_classes = _visited_composed_classes + (self.__class__,) <NEW_LINE> self.domain_type_id = domain_type_id <NEW_LINE> for var_name, var_value in kwargs.items(): <NEW_LINE> <INDENT> if var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> setattr(self, var_name, var_value)
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values.
6259900d21a7993f00c66b9e
class MatchCriteria: <NEW_LINE> <INDENT> def __init__(self, collection, states = [], matchStates = None, objAttrs = [], matchObjAttrs = None, roles = [], matchRoles = None, interfaces = [], matchInterfaces = None, invert = False, applyPredicate = False): <NEW_LINE> <INDENT> self.collection = collection <NEW_LINE> self.matchStates = matchStates or collection.MATCH_ANY <NEW_LINE> self.objAttrs = objAttrs <NEW_LINE> self.matchObjAttrs = matchObjAttrs or collection.MATCH_ANY <NEW_LINE> self.roles = roles <NEW_LINE> self.matchRoles = matchRoles or collection.MATCH_ANY <NEW_LINE> self.interfaces = interfaces <NEW_LINE> self.matchInterfaces = matchInterfaces or collection.MATCH_ALL <NEW_LINE> self.invert = invert <NEW_LINE> self.applyPredicate = applyPredicate <NEW_LINE> self.states = pyatspi.StateSet() <NEW_LINE> for state in states: <NEW_LINE> <INDENT> self.states.add(state)
Contains the criteria which will be used to generate a collection matchRule. We don't want to create the rule until we need it and are ready to use it. In addition, the creation of an AT-SPI match rule requires you specify quite a few things (see the __init__), most of which are irrelevant to the search at hand. This class makes it possible for the StructuralNavigationObject creator to just specify the few criteria that actually matter.
6259900d462c4b4f79dbc62b
class EsphomeEnumMapper: <NEW_LINE> <INDENT> def __init__(self, func: Callable[[], dict[int, str]]) -> None: <NEW_LINE> <INDENT> self._func = func <NEW_LINE> <DEDENT> def from_esphome(self, value: int) -> str: <NEW_LINE> <INDENT> return self._func()[value] <NEW_LINE> <DEDENT> def from_hass(self, value: str) -> int: <NEW_LINE> <INDENT> inverse = {v: k for k, v in self._func().items()} <NEW_LINE> return inverse[value]
Helper class to convert between hass and esphome enum values.
6259900d21a7993f00c66ba0
class PingFailed(PingError, AssertionError): <NEW_LINE> <INDENT> message = ("timeout of {timeout} seconds expired after counting only " "{count} out of expected {expected_count} ICMP messages of " "type {message_type!r}")
Raised when ping timeout expires before reaching expected message count
6259900d56b00c62f0fb34e3
class Gradient_Descent(BaseAlgorithm): <NEW_LINE> <INDENT> def __init__(self, space, learning_rate=1., dx_tolerance=1e-7): <NEW_LINE> <INDENT> super(Gradient_Descent, self).__init__(space, learning_rate=learning_rate, dx_tolerance=dx_tolerance) <NEW_LINE> self.has_observed_once = False <NEW_LINE> self.current_point = None <NEW_LINE> self.gradient = numpy.array([numpy.inf]) <NEW_LINE> <DEDENT> def suggest(self, num=1): <NEW_LINE> <INDENT> assert num == 1 <NEW_LINE> if not self.has_observed_once: <NEW_LINE> <INDENT> return self.space.sample(1) <NEW_LINE> <DEDENT> self.current_point -= self.learning_rate * self.gradient <NEW_LINE> return [self.current_point] <NEW_LINE> <DEDENT> def observe(self, points, results): <NEW_LINE> <INDENT> self.current_point = numpy.asarray(points[-1]) <NEW_LINE> self.gradient = numpy.asarray(results[-1]['gradient']) <NEW_LINE> self.has_observed_once = True <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_done(self): <NEW_LINE> <INDENT> dx = self.learning_rate * numpy.sqrt(self.gradient.dot(self.gradient)) <NEW_LINE> return dx <= self.dx_tolerance
Implement a gradient descent algorithm.
6259900d15fb5d323ce7f965
class GP17iBack(PrintedForm): <NEW_LINE> <INDENT> NAME = "GP17(1) Back" <NEW_LINE> data = None <NEW_LINE> _bg_pixmap = None <NEW_LINE> def __init__(self, parent=None): <NEW_LINE> <INDENT> PrintedForm.__init__(self, parent) <NEW_LINE> self.rects = RECTS <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def is_active(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def set_data(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def BACKGROUND_IMAGE(self): <NEW_LINE> <INDENT> if self._bg_pixmap is None: <NEW_LINE> <INDENT> self._bg_pixmap = QtGui.QPixmap( os.path.join(localsettings.RESOURCE_DIR, "gp17-1", "back.png")) <NEW_LINE> <DEDENT> return self._bg_pixmap <NEW_LINE> <DEDENT> def print_(self): <NEW_LINE> <INDENT> self.set_offset( gp17config.GP17iback_OFFSET_LEFT, gp17config.GP17iback_OFFSET_TOP) <NEW_LINE> self.set_scaling( gp17config.GP17iback_SCALE_X, gp17config.GP17iback_SCALE_Y) <NEW_LINE> painter = PrintedForm.print_(self) <NEW_LINE> self._fill(painter) <NEW_LINE> <DEDENT> def _fill(self, painter): <NEW_LINE> <INDENT> if self.data is None: <NEW_LINE> <INDENT> return
a class to set up and print a GP17 (tooth specific version)
6259900d5166f23b2e243ff8
class Cronometro: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__tempoInicial = 0 <NEW_LINE> self.__tempoFinal = 0 <NEW_LINE> self.__parado = True <NEW_LINE> <DEDENT> def iniciar(self): <NEW_LINE> <INDENT> time.clock() <NEW_LINE> self.__tempoInicial = time.clock() <NEW_LINE> self.__parado = False <NEW_LINE> <DEDENT> def parar(self): <NEW_LINE> <INDENT> self.__tempoFinal = time.clock() <NEW_LINE> self.__parado = True <NEW_LINE> <DEDENT> def zerar(self): <NEW_LINE> <INDENT> self.__tempoInicial = 0 <NEW_LINE> self.__tempoFinal = 0 <NEW_LINE> self.__parado = True <NEW_LINE> <DEDENT> def exibir(self): <NEW_LINE> <INDENT> if(self.__parado): <NEW_LINE> <INDENT> return self.__tempoFinal - self.__tempoInicial <NEW_LINE> <DEDENT> self.__tempoFinal = time.clock() <NEW_LINE> return self.__tempoFinal - self.__tempoInicial <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if(self.__parado): <NEW_LINE> <INDENT> return str(self.__tempoFinal - self.__tempoInicial) <NEW_LINE> <DEDENT> self.__tempoFinal = time.clock() <NEW_LINE> return str(self.__tempoFinal - self.__tempoInicial) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if(self.__parado): <NEW_LINE> <INDENT> status = " Está parado" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__tempoFinal = time.clock() <NEW_LINE> status = " Está rodando" <NEW_LINE> <DEDENT> return str(self.__tempoFinal - self.__tempoInicial) + status <NEW_LINE> <DEDENT> def getTempoFinal(self): <NEW_LINE> <INDENT> return self.__tempoFinal <NEW_LINE> <DEDENT> def getTempoInicial(self): <NEW_LINE> <INDENT> return self.__tempoInicial <NEW_LINE> <DEDENT> def getStatus(self): <NEW_LINE> <INDENT> return self.__parado
Essa classe inicializa três atributos: tempo inicial, tempo final e status do cronometro. A diferença entre os atributos para o tempo resultam no tempo atual.
6259900d627d3e7fe0e07ac2
@urls.register <NEW_LINE> class ReachabilityTest(generic.View): <NEW_LINE> <INDENT> url_regex = r'neutron/reachabilitytests/(?P<test_id>[^/]+|default)/$' <NEW_LINE> @rest_utils.ajax() <NEW_LINE> def patch(self, request, test_id): <NEW_LINE> <INDENT> result = bsnneutron. reachabilitytest_update(request, test_id, run_test=True) <NEW_LINE> return result <NEW_LINE> <DEDENT> @rest_utils.ajax() <NEW_LINE> def delete(self, request, test_id): <NEW_LINE> <INDENT> result = bsnneutron.reachabilitytest_delete(request, test_id) <NEW_LINE> return result
API for BSN Neutron Reachability Tests
6259900d56b00c62f0fb34e7
class SubmodWWUB(SubmodBase): <NEW_LINE> <INDENT> def _evalcmd(self, arg1, s, r, y, d, NT, Tk=None): <NEW_LINE> <INDENT> assert arg1 is None <NEW_LINE> prefix = self._new_prefix() <NEW_LINE> assert isinstance(s, list) and len(s) in (NT, NT+1) <NEW_LINE> assert isinstance(r, list) and len(r) == NT <NEW_LINE> assert isinstance(y, list) and len(y) == NT <NEW_LINE> assert isinstance(d, list) and len(d) == NT <NEW_LINE> varl = filter_strings(s + r + y) <NEW_LINE> s = list2dict(s, 0, NT) <NEW_LINE> r = list2dict(r, 1, NT) <NEW_LINE> y = list2dict(y, 1, NT) <NEW_LINE> d = list2dict(d, 1, NT) <NEW_LINE> if Tk is None: <NEW_LINE> <INDENT> Tk = NT <NEW_LINE> <DEDENT> model = Model() <NEW_LINE> for var in varl: <NEW_LINE> <INDENT> model.add_var(name=var) <NEW_LINE> <DEDENT> XFormWWUB(model, s, r, y, d, NT, Tk, prefix) <NEW_LINE> model.rename_cons(lambda name: prefix+name) <NEW_LINE> declared_vars = set(varl) <NEW_LINE> self._pyvars["_model"] += writemod.model2ampl(model, declared_vars)
Command for creating WW-U-B extended formulations.
6259900d462c4b4f79dbc631
class Cancer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), "stata_cancer_glm.csv") <NEW_LINE> data = np.recfromcsv(open(filename, 'rb')) <NEW_LINE> self.endog = data.studytime <NEW_LINE> design = np.column_stack((data.age,data.drug)) <NEW_LINE> design = categorical(design, col=1, drop=True) <NEW_LINE> design = np.delete(design, 1, axis=1) <NEW_LINE> self.exog = add_constant(design, prepend=False)
The Cancer data can be found here http://www.stata-press.com/data/r10/rmain.html
6259900d0a366e3fb87dd61d
@python_2_unicode_compatible <NEW_LINE> class Purchase(QuickbooksManagedObject, QuickbooksTransactionEntity, LinkedTxnMixin): <NEW_LINE> <INDENT> class_dict = { "AccountRef": Ref, "EntityRef": Ref, "DepartmentRef": Ref, "CurrencyRef": Ref, "PaymentMethodRef": Ref, "RemitToAddr": Address, "TxnTaxDetail": TxnTaxDetail } <NEW_LINE> list_dict = { "Line": PurchaseLine, "LinkedTxn": LinkedTxn, } <NEW_LINE> qbo_object_name = "Purchase" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(Purchase, self).__init__() <NEW_LINE> self.DocNumber = "" <NEW_LINE> self.TxnDate = "" <NEW_LINE> self.ExchangeRate = 1 <NEW_LINE> self.PrivateNote = "" <NEW_LINE> self.PaymentType = "" <NEW_LINE> self.Credit = False <NEW_LINE> self.TotalAmt = 0 <NEW_LINE> self.PrintStatus = "NeedToPrint" <NEW_LINE> self.PurchaseEx = None <NEW_LINE> self.TxnSource = None <NEW_LINE> self.GlobalTaxCalculation = "TaxExcluded" <NEW_LINE> self.TxnTaxDetail = None <NEW_LINE> self.DepartmentRef = None <NEW_LINE> self.AccountRef = None <NEW_LINE> self.EnitityRef = None <NEW_LINE> self.CurrencyRef = None <NEW_LINE> self.PaymentMethodRef = None <NEW_LINE> self.RemitToAddr = None <NEW_LINE> self.Line = [] <NEW_LINE> self.LinkedTxn = [] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.TotalAmt)
QBO definition: This entity represents expenses, such as a purchase made from a vendor. There are three types of Purchases: Cash, Check, and Credit Card. - Cash Purchase contains information regarding a payment made in cash. - Check Purchase contains information regarding a payment made by check. - Credit Card Purchase contains information regarding a payment made by credit card or refunded/credited back to a credit card. For example, to create a transaction that sends a check to a vendor, create a Purchase object with PaymentType set to Check. To query Purchase transactions of a certain type, for example Check, submit the following to the query endpoint: SELECT * from Purchase where PaymentType='Check' You must specify an AccountRef for all purchases. The TotalAmtattribute must add up to sum of Line.Amount attributes.
6259900d56b00c62f0fb34e9
class Admin(commands.Cog): <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> <DEDENT> async def cog_check(self, ctx): <NEW_LINE> <INDENT> return await utils.discord.is_admin(ctx) <NEW_LINE> <DEDENT> @commands.command(aliases=['die', 'quit']) <NEW_LINE> async def shutdown(self, ctx): <NEW_LINE> <INDENT> await self.shutdown_(ctx) <NEW_LINE> <DEDENT> @commands.command(name='shutdown!', aliases=['die!', 'q!', 'quit!'], hidden=True) <NEW_LINE> async def shutdown_noconfirm(self, ctx): <NEW_LINE> <INDENT> await self.shutdown_(ctx, True) <NEW_LINE> <DEDENT> async def shutdown_(self, ctx, noconfirm=False): <NEW_LINE> <INDENT> if noconfirm: <NEW_LINE> <INDENT> response = 'y' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m, response = await utils.discord.get_confirm_embed( ctx, title="Shutdown?", description="This action may be difficult to undo without phsyical or remote access to the host machine. Are you sure?", ) <NEW_LINE> <DEDENT> if response == 'y': <NEW_LINE> <INDENT> title = "Shutting down\N{HORIZONTAL ELLIPSIS}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> title = f"Shutdown {strings.YESNO[response]}" <NEW_LINE> <DEDENT> if noconfirm: <NEW_LINE> <INDENT> color = colors.INFO <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> color = colors.YESNO[response] <NEW_LINE> <DEDENT> await (ctx.send if noconfirm else m.edit)(embed=discord.Embed( color=color, title=title )) <NEW_LINE> if response == 'y': <NEW_LINE> <INDENT> l.info(f"Shutting down at the command of {utils.discord.fake_mention(ctx.author)}...") <NEW_LINE> await self.bot.logout() <NEW_LINE> <DEDENT> <DEDENT> @commands.command() <NEW_LINE> async def update(self, ctx): <NEW_LINE> <INDENT> subproc = await asyncio.create_subprocess_exec('git', 'pull', stdout=PIPE) <NEW_LINE> embed = discord.Embed( color=colors.INFO, title="Running `git pull`" ) <NEW_LINE> m = await ctx.send(embed=embed) <NEW_LINE> returncode = await subproc.wait() <NEW_LINE> embed.color = colors.ERROR if returncode else colors.SUCCESS <NEW_LINE> stdout, stderr = await subproc.communicate() <NEW_LINE> if stdout: <NEW_LINE> <INDENT> embed.add_field( name="Stdout", value=f"```\n{stdout.decode('utf-8')}\n```", inline=False, ) <NEW_LINE> <DEDENT> if stderr: <NEW_LINE> <INDENT> embed.add_field( name="Stderr", value=f"```\n{stderr.decode('utf-8')}\n```", inline=False, ) <NEW_LINE> <DEDENT> if not (stdout or stderr): <NEW_LINE> <INDENT> embed.description = "`git pull` completed." <NEW_LINE> <DEDENT> await m.edit(embed=embed) <NEW_LINE> await utils.discord.invoke_command(ctx, 'reload *') <NEW_LINE> <DEDENT> @commands.command(aliases=['r']) <NEW_LINE> async def reload(self, ctx, *, extensions: str = '*'): <NEW_LINE> <INDENT> await reload_extensions(ctx, *extensions.split())
Admin-only commands.
6259900e15fb5d323ce7f96c
class BespokeOptimizationSchema(BaseOptimizationSchema): <NEW_LINE> <INDENT> type: Literal["bespoke"] = "bespoke" <NEW_LINE> smiles: str = Field( ..., description="The SMILES representation of the molecule to generate bespoke " "parameters for.", ) <NEW_LINE> initial_force_field_hash: str = Field( ..., description="The hash values of the initial input force field with " "no bespokefit modifications. Used for internal hashing", ) <NEW_LINE> fragmentation_engine: Optional[FragmentationEngine] = Field( WBOFragmenter(), description="The fragmentation engine that should be used to fragment the " "molecule. If no engine is provided the molecules will not be fragmented.", ) <NEW_LINE> target_torsion_smirks: Optional[List[str]] = Field( ..., description="A list of SMARTS patterns that should be used to identify the " "**bonds** within the target molecule to generate bespoke torsions around. Each " "SMARTS pattern should include **two** indexed atoms that correspond to the " "two atoms involved in the central bond." "\n" "By default bespoke torsion parameters (if requested) will be constructed for " "all non-terminal 'rotatable bonds'", ) <NEW_LINE> smirk_settings: SMIRKSettings = Field( SMIRKSettings(), description="The settings that should be used when generating SMIRKS patterns for this optimization stage.", ) <NEW_LINE> @property <NEW_LINE> def molecule(self) -> Molecule: <NEW_LINE> <INDENT> return Molecule.from_mapped_smiles(self.smiles) <NEW_LINE> <DEDENT> @property <NEW_LINE> def target_smirks(self) -> List[SMIRKSType]: <NEW_LINE> <INDENT> return list( { SMIRKSType(parameter.type) for stage in self.stages for parameter in stage.parameter_hyperparameters } )
A schema which encodes how a bespoke force field should be created for a specific molecule.
6259900e627d3e7fe0e07ac6
class FieldNotFoundException(Exception): <NEW_LINE> <INDENT> pass
Field not found exception. This class defines an exception to capture the error when an invalid field is used for operation. **DEPRECATED**
6259900e56b00c62f0fb34eb
class Model_Config(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.is_supervised = FLAGS.supervised <NEW_LINE> self.embed_dim = 300 <NEW_LINE> self.lstm_dim = 1000 <NEW_LINE> self.L = 20 <NEW_LINE> self.num_vocab = 72704 <NEW_LINE> self.vis_dim = 4096 <NEW_LINE> self.spa_dim = 5 <NEW_LINE> self.enc_dim = 512 <NEW_LINE> self.dec_dim = 512 <NEW_LINE> self.reg_dim = 512 <NEW_LINE> self.lstm_dropout = False <NEW_LINE> self.start_lr = 0.01 <NEW_LINE> self.lr_decay_step = FLAGS.lr_decay_step <NEW_LINE> self.lr_decay_rate = 0.1 <NEW_LINE> self.momentum = 0.95 <NEW_LINE> self.max_iter = FLAGS.max_iter <NEW_LINE> self.clip_gradients = 10.0 <NEW_LINE> self.weight_decay = 0.0005 <NEW_LINE> self.avg_decay = 0.99
Wrapper class for model hyperparameters.
6259900e3cc13d1c6d466375
class Normalize(object): <NEW_LINE> <INDENT> def __init__(self, mean, std): <NEW_LINE> <INDENT> if not isinstance(mean, list): <NEW_LINE> <INDENT> mean = [mean] <NEW_LINE> <DEDENT> if not isinstance(std, list): <NEW_LINE> <INDENT> std = [std] <NEW_LINE> <DEDENT> self.mean = torch.FloatTensor(mean).unsqueeze(1).unsqueeze(2) <NEW_LINE> self.std = torch.FloatTensor(std).unsqueeze(1).unsqueeze(2) <NEW_LINE> <DEDENT> def __call__(self, tensor): <NEW_LINE> <INDENT> return tensor.sub_(self.mean).div_(self.std)
Given mean and std, will normalize each channel of the torch.*Tensor, i.e. channel = (channel - mean) / std
6259900e15fb5d323ce7f96e
class NSNitroNserrInternalPiError(NSNitroPolErrors): <NEW_LINE> <INDENT> pass
Nitro error code 2103 Internal policy error
6259900ebf627c535bcb20de
class LogFormatter(logging.Formatter): <NEW_LINE> <INDENT> def format(self, record): <NEW_LINE> <INDENT> time = datetime.fromtimestamp(record.created).strftime("%Y-%m-%d %H:%M:%S") <NEW_LINE> header = "{} [{:s}]::{:s}".format(time, record.module, record.funcName) <NEW_LINE> return "{:60s} - {}".format(header, record.getMessage())
Custom Log Formatting
6259900e21a7993f00c66baa
class BaseThreadedModule(BaseModule.BaseModule, threading.Thread): <NEW_LINE> <INDENT> def __init__(self, lumbermill): <NEW_LINE> <INDENT> BaseModule.BaseModule.__init__(self, lumbermill) <NEW_LINE> threading.Thread.__init__(self) <NEW_LINE> self.input_queue = False <NEW_LINE> self.alive = True <NEW_LINE> self.daemon = True <NEW_LINE> <DEDENT> def setInputQueue(self, queue): <NEW_LINE> <INDENT> self.input_queue = queue <NEW_LINE> <DEDENT> def getInputQueue(self): <NEW_LINE> <INDENT> return self.input_queue <NEW_LINE> <DEDENT> def pollQueue(self, block=True, timeout=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for event in self.input_queue.get(block, timeout): <NEW_LINE> <INDENT> yield event <NEW_LINE> <DEDENT> <DEDENT> except (KeyboardInterrupt, SystemExit, ValueError, OSError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> if not self.input_queue: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not self.receivers: <NEW_LINE> <INDENT> if self.module_type not in ['stand_alone', 'output']: <NEW_LINE> <INDENT> self.logger.error("Shutting down module %s since no receivers are set." % (self.__class__.__name__)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> while self.alive: <NEW_LINE> <INDENT> for event in self.pollQueue(): <NEW_LINE> <INDENT> if not event: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.receiveEvent(event)
Base class for all lumbermill modules. In most cases this is the class to inherit from when implementing a new module. It will only be started as thread when necessary. This depends on the configuration and how the modules are combined. If you happen to override one of the methods defined here, be sure to know what you are doing ;) You have been warned... Configuration template: - module: SomeModuleName id: # <default: ""; type: string; is: optional> filter: # <default: None; type: None||string; is: optional> add_fields: # <default: {}; type: dict; is: optional> delete_fields: # <default: []; type: list; is: optional> event_type: # <default: None; type: None||string; is: optional> set_internal: # <default: {}; type: dict; is: optional> log_level: # <default: 'info'; type: string; values: ['info', 'warn', 'error', 'critical', 'fatal', 'debug']; is: optional> queue_size: # <default: 20; type: integer; is: optional> receivers: - ModuleName - ModuleAlias
6259900e15fb5d323ce7f970
class JoinMessage(BaseMessage): <NEW_LINE> <INDENT> __slots__ = "nick", "ident", "channel" <NEW_LINE> def __init__(self, messaged_at, nick, ident, channel): <NEW_LINE> <INDENT> BaseMessage.__init__(self, messaged_at) <NEW_LINE> self.nick = unicode(nick) <NEW_LINE> self.ident = unicode(ident) <NEW_LINE> self.channel = unicode(channel)
Join message type. :param messaged_at: a :class:`datetime.datetime` logged :type messaged_at: :class:`datetime.datetime` :param nick: a nickname :type nick: :class:`basestring` :param ident: an ident :type ident: :class:`basestring` :param channel: a channel name :type channel: :class:`basestring` .. attribute:: nick The nickname. .. attribute:: ident The ident. .. attribute:: channel The channel name.
6259900ed164cc6175821bac
class TestIsProductPage(unittest.TestCase): <NEW_LINE> <INDENT> def test_is_product_page(self): <NEW_LINE> <INDENT> self.assertTrue(crawler.is_product_page( 'http://www.epocacosmeticos.com.br/lady-million-eau-my-gold-eau-de-toilette-paco-rabanne-perfume-feminino/p', 'http://www.epocacosmeticos.com.br/lady-million-eau-my-gold-eau-de-toilette-paco-rabanne-perfume-feminino/p')) <NEW_LINE> <DEDENT> def test_is_not_product_page(self): <NEW_LINE> <INDENT> self.assertFalse(crawler.is_product_page( 'http://www.epocacosmeticos.com.br/lady-million-eau-my-gold-eau-de-toilette-paco-rabanne-perfume-feminino', 'http://www.epocacosmeticos.com.br/lady-million-eau-my-gold-eau-de-toilette-paco-rabanne-perfume-feminino')) <NEW_LINE> <DEDENT> def test_response_error_url(self): <NEW_LINE> <INDENT> self.assertFalse(crawler.is_product_page('http://www.epocacosmeticos.com.br/fake-product/p', 'http://www.epocacosmeticos.com.br/?ProductLinkNotFound=fake-product/p')) <NEW_LINE> <DEDENT> def test_mock_url(self): <NEW_LINE> <INDENT> self.assertTrue(crawler.is_product_page('http://www.epocacosmeticos.com.br/fake-product/p', 'http://www.epocacosmeticos.com.br/fake-product/p')) <NEW_LINE> <DEDENT> def test_invalid_value(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, crawler.is_product_page, 123, 256)
Tests for checking if a page is a prodcutd page
6259900e627d3e7fe0e07aca
class IndexTemplate(TemplateView): <NEW_LINE> <INDENT> template_name = "index.html"
! Clase para cargar el index.html @author Rodrigo Boet (rudmanmrrod at gmail.com) @date 19-11-2018 @version 1.0.0
6259900e507cdc57c63a59d0
class TaggedItem(models.Model): <NEW_LINE> <INDENT> tag = models.ForeignKey(Tag, verbose_name=_('tag'), related_name='items') <NEW_LINE> content_type = models.ForeignKey(ContentType, verbose_name=_('content type')) <NEW_LINE> object_id = models.PositiveIntegerField(_('object id'), db_index=True) <NEW_LINE> object = generic.GenericForeignKey('content_type', 'object_id') <NEW_LINE> objects = TaggedItemManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = (('tag', 'content_type', 'object_id'),) <NEW_LINE> verbose_name = _('objeto etiqueta') <NEW_LINE> verbose_name_plural = _('objetos etiqueta') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s [%s]' % (self.object, self.tag)
Holds the relationship between a tag and the item being tagged.
6259900ebf627c535bcb20e0
class Product(object): <NEW_LINE> <INDENT> def __init__(self, title, price): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.price = price <NEW_LINE> <DEDENT> def __float__(self): <NEW_LINE> <INDENT> return self.price <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Класс: "{}"\nПродукт "{}" стоимостью "{}"'.format( self.__class__, self.title, self.price)
Здесь будет документация к классу!
6259900e925a0f43d25e8c70
class IflaVfMac(ctypes.Structure): <NEW_LINE> <INDENT> _fields_ = [("vf", ctypes.c_uint32), ("mac", ctypes.c_uint8 * 32)]
struct ifla_vf_mac
6259900e15fb5d323ce7f972
class BubbleWidget(PyGlassWidget): <NEW_LINE> <INDENT> def __init__(self, parent, **kwargs): <NEW_LINE> <INDENT> super(BubbleWidget, self).__init__(parent, **kwargs) <NEW_LINE> self.exampleBtn.clicked.connect(self._handleExampleButton) <NEW_LINE> self.homeBtn.clicked.connect(self._handleReturnHome) <NEW_LINE> <DEDENT> def _handleExampleButton(self): <NEW_LINE> <INDENT> x = 0 <NEW_LINE> y = 0 <NEW_LINE> z = 0 <NEW_LINE> sx = 0.1 <NEW_LINE> sy = 0.1 <NEW_LINE> sz = 0.1 <NEW_LINE> so = 0.1 <NEW_LINE> maxY = 30 <NEW_LINE> maxX = 5 <NEW_LINE> maxZ = 5 <NEW_LINE> keyStep = 3 <NEW_LINE> totalTime = 10 <NEW_LINE> maxKey = totalTime * keyStep * 10 <NEW_LINE> minHdev = -2.0 <NEW_LINE> maxHdev = 2.0 <NEW_LINE> scaleRate = 0.2 <NEW_LINE> time = 0 <NEW_LINE> bubbleToTop = 1 <NEW_LINE> bubbleShader = mayaShader('bubble_mat', (0.0,0.8,1.0), (0.9,0.9,0.9), (0.8,0.8,0.8), 'blinn') <NEW_LINE> bubbleShader.create() <NEW_LINE> r = 1 <NEW_LINE> yUp = (0, 1, 0) <NEW_LINE> p = (0,0,0) <NEW_LINE> d = 3 <NEW_LINE> bNum = 1 <NEW_LINE> c = cmds.sphere(p=p, ax=yUp, ssw=0, esw=360, r=r, d=d, ut=0, tol=0.01, s=8, nsp=4, ch=1, n='bubble' + str(bNum))[0] <NEW_LINE> cmds.select(c) <NEW_LINE> cmds.hyperShade(a="bubble_mat") <NEW_LINE> for i in xrange(1, maxKey, keyStep): <NEW_LINE> <INDENT> cmds.currentTime( i ) <NEW_LINE> cmds.setKeyframe( v = y, at='translateY' ) <NEW_LINE> cmds.setKeyframe( v = x, at='translateX' ) <NEW_LINE> cmds.setKeyframe( v = z, at='translateZ' ) <NEW_LINE> x = x + uniform(minHdev, maxHdev) <NEW_LINE> z = z + uniform(minHdev, maxHdev) <NEW_LINE> y = y + (keyStep / bubbleToTop) <NEW_LINE> if x >= maxX: <NEW_LINE> <INDENT> x = maxX <NEW_LINE> <DEDENT> if z >= maxZ: <NEW_LINE> <INDENT> z = maxZ <NEW_LINE> <DEDENT> sx = so * ((1 + scaleRate)**time) <NEW_LINE> sz = so * ((1 + scaleRate)**time) <NEW_LINE> sy = so * ((1 + scaleRate)**time) <NEW_LINE> cmds.setKeyframe( v = sy, at='scaleY' ) <NEW_LINE> cmds.setKeyframe( v = sx, at='scaleX' ) <NEW_LINE> cmds.setKeyframe( v = sz, at='scaleZ' ) <NEW_LINE> time = time + 1 <NEW_LINE> if y >maxY: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> response = nimble.createRemoteResponse(globals()) <NEW_LINE> response.put('name', c) <NEW_LINE> <DEDENT> def _handleReturnHome(self): <NEW_LINE> <INDENT> self.mainWindow.setActiveWidget('home')
A class for Assignment 1
6259900e5166f23b2e244004
class GUIStatus(QtWidgets.QGroupBox) : <NEW_LINE> <INDENT> def __init__(self, parent=None, msg='No message in GUIStatus...') : <NEW_LINE> <INDENT> QtWidgets.QGroupBox.__init__(self, 'State', parent) <NEW_LINE> self.setGeometry(100, 100, 300, 60) <NEW_LINE> self.setWindowTitle('GUI Status') <NEW_LINE> self.instr_name = cp.instr_name <NEW_LINE> self.exp_name = cp.exp_name <NEW_LINE> self.det_name = cp.det_name <NEW_LINE> self.det_but_title = cp.det_but_title <NEW_LINE> self.calib_dir = cp.calib_dir <NEW_LINE> self.current_tab = cp.current_tab <NEW_LINE> self.box_txt = QtWidgets.QTextEdit(self) <NEW_LINE> self.vbox = QtWidgets.QVBoxLayout() <NEW_LINE> self.vbox.addWidget(self.box_txt) <NEW_LINE> self.setLayout(self.vbox) <NEW_LINE> self.setStatusMessage(msg) <NEW_LINE> self.showToolTips() <NEW_LINE> self.setStyle() <NEW_LINE> cp.guistatus = self <NEW_LINE> <DEDENT> def showToolTips(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setStyle(self): <NEW_LINE> <INDENT> self. setStyleSheet (cp.styleBkgd) <NEW_LINE> self.box_txt .setReadOnly (True) <NEW_LINE> self.box_txt .setStyleSheet (cp.styleWhiteFixed) <NEW_LINE> self.layout().setContentsMargins(2,4,2,2) <NEW_LINE> self.setMinimumSize(300,60) <NEW_LINE> <DEDENT> def setParent(self,parent) : <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> <DEDENT> def closeEvent(self, event): <NEW_LINE> <INDENT> logger.debug('closeEvent', __name__) <NEW_LINE> self.box_txt.close() <NEW_LINE> try : del cp.guistatus <NEW_LINE> except : pass <NEW_LINE> cp.guistatus = None <NEW_LINE> <DEDENT> def onClose(self): <NEW_LINE> <INDENT> logger.debug('onClose', __name__) <NEW_LINE> self.close() <NEW_LINE> <DEDENT> def setStatusMessage(self, msg='msg is empty...') : <NEW_LINE> <INDENT> logger.debug('Set status message',__name__) <NEW_LINE> self.box_txt.setText(msg) <NEW_LINE> <DEDENT> def updateStatusInfo(self) : <NEW_LINE> <INDENT> msg = '' <NEW_LINE> if self.instr_name.value() == 'Select' : <NEW_LINE> <INDENT> msg += 'Select instrument now!' <NEW_LINE> <DEDENT> elif self.exp_name.value() == 'Select' : <NEW_LINE> <INDENT> msg += 'Select experiment now!' <NEW_LINE> <DEDENT> elif self.det_but_title.value() == 'Select' : <NEW_LINE> <INDENT> msg += 'Select detector(s) now!' <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> msg += 'Selected list of detector(s): %s' % self.det_name.value() <NEW_LINE> ctype = 'pedestals' <NEW_LINE> if self.current_tab.value() == 'Dark' : ctype = 'pedestals' <NEW_LINE> if self.current_tab.value() == 'File Manager' : ctype = None <NEW_LINE> for det_name in cp.list_of_dets_selected() : <NEW_LINE> <INDENT> calib_subdir = cp.dict_of_det_calib_types[det_name] <NEW_LINE> msg += '\n' + gu.get_text_content_of_calib_dir_for_detector(path=self.calib_dir.value(), subdir=calib_subdir, det=det_name, calib_type=ctype) <NEW_LINE> <DEDENT> <DEDENT> self.setStatusMessage(msg)
GUI State
6259900e56b00c62f0fb34f1
class QuestionDetail(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Question.objects.all() <NEW_LINE> serializer_class = QuestionSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticated,)
Returns the specific Question object with its corresponding id
6259900e3cc13d1c6d46637f
class DisableMigrations(object): <NEW_LINE> <INDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return None
Django-cms disables all migrations when they run their tests. It would be better to not do it. Right now we are forced to disable our migrations because we inherit one of our models from django-cms. The error in question is due to an incompability of sqlite3 and with atomic transactions.
6259900ed164cc6175821bb4
class PyLint(ShellCommand): <NEW_LINE> <INDENT> name = "pylint" <NEW_LINE> description = ["running", "pylint"] <NEW_LINE> descriptionDone = ["pylint"] <NEW_LINE> RC_OK = 0 <NEW_LINE> RC_FATAL = 1 <NEW_LINE> RC_ERROR = 2 <NEW_LINE> RC_WARNING = 4 <NEW_LINE> RC_REFACTOR = 8 <NEW_LINE> RC_CONVENTION = 16 <NEW_LINE> RC_USAGE = 32 <NEW_LINE> _MESSAGES = { 'C': "convention", 'R': "refactor", 'W': "warning", 'E': "error", 'F': "fatal", 'I': "info", } <NEW_LINE> _flunkingIssues = ("F", "E") <NEW_LINE> _re_groupname = 'errtype' <NEW_LINE> _msgtypes_re_str = '(?P<%s>[%s])' % ( _re_groupname, ''.join(_MESSAGES.keys())) <NEW_LINE> _default_line_re = re.compile( r'^%s(\d{4})?: *\d+(, *\d+)?:.+' % _msgtypes_re_str) <NEW_LINE> _parseable_line_re = re.compile( r'[^:]+:\d+: \[%s(\d{4})?(\([a-z-]+\))?[,\]] .+' % _msgtypes_re_str) <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> ShellCommand.__init__(self, **kwargs) <NEW_LINE> self.counts = {} <NEW_LINE> self.summaries = {} <NEW_LINE> self.addLogObserver( 'stdio', logobserver.LineConsumerLogObserver(self.logConsumer)) <NEW_LINE> <DEDENT> def logConsumer(self): <NEW_LINE> <INDENT> for m in self._MESSAGES: <NEW_LINE> <INDENT> self.counts[m] = 0 <NEW_LINE> self.summaries[m] = [] <NEW_LINE> <DEDENT> line_re = None <NEW_LINE> while True: <NEW_LINE> <INDENT> stream, line = yield <NEW_LINE> if not line_re: <NEW_LINE> <INDENT> if self._parseable_line_re.match(line): <NEW_LINE> <INDENT> line_re = self._parseable_line_re <NEW_LINE> <DEDENT> elif self._default_line_re.match(line): <NEW_LINE> <INDENT> line_re = self._default_line_re <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> mo = line_re.match(line) <NEW_LINE> if mo: <NEW_LINE> <INDENT> msgtype = mo.group(self._re_groupname) <NEW_LINE> assert msgtype in self._MESSAGES <NEW_LINE> self.summaries[msgtype].append(line) <NEW_LINE> self.counts[msgtype] += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def createSummary(self, log): <NEW_LINE> <INDENT> counts, summaries = self.counts, self.summaries <NEW_LINE> self.descriptionDone = self.descriptionDone[:] <NEW_LINE> for msg, fullmsg in self._MESSAGES.items(): <NEW_LINE> <INDENT> if counts[msg]: <NEW_LINE> <INDENT> self.descriptionDone.append("%s=%d" % (fullmsg, counts[msg])) <NEW_LINE> self.addCompleteLog(fullmsg, "\n".join(summaries[msg])) <NEW_LINE> <DEDENT> self.setProperty("pylint-%s" % fullmsg, counts[msg], 'Pylint') <NEW_LINE> <DEDENT> self.setProperty("pylint-total", sum(counts.values()), 'Pylint') <NEW_LINE> <DEDENT> def evaluateCommand(self, cmd): <NEW_LINE> <INDENT> if cmd.rc & (self.RC_FATAL | self.RC_ERROR | self.RC_USAGE): <NEW_LINE> <INDENT> return FAILURE <NEW_LINE> <DEDENT> for msg in self._flunkingIssues: <NEW_LINE> <INDENT> if self.getProperty("pylint-%s" % self._MESSAGES[msg]): <NEW_LINE> <INDENT> return FAILURE <NEW_LINE> <DEDENT> <DEDENT> if self.getProperty("pylint-total"): <NEW_LINE> <INDENT> return WARNINGS <NEW_LINE> <DEDENT> return SUCCESS
A command that knows about pylint output. It is a good idea to add --output-format=parseable to your command, since it includes the filename in the message.
6259900ebf627c535bcb20ea
class ProcessingInfo(pew.Model): <NEW_LINE> <INDENT> night = pew.IntegerField() <NEW_LINE> runId = pew.SmallIntegerField() <NEW_LINE> extension = pew.CharField(6) <NEW_LINE> status = pew.SmallIntegerField() <NEW_LINE> isdc = pew.BooleanField(default=False) <NEW_LINE> fhgfs = pew.BooleanField(default=False) <NEW_LINE> bigtank = pew.BooleanField(default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> database = processing_db <NEW_LINE> db_table = "File_Processing_Info" <NEW_LINE> indexes = ( (('night', 'runId'), True), ) <NEW_LINE> <DEDENT> def getFileSystems(): <NEW_LINE> <INDENT> return ['isdc', 'fhgfs', 'bigtank'] <NEW_LINE> <DEDENT> def isSupported(fs): <NEW_LINE> <INDENT> return fs in ProcessingInfo.getFileSystems()
ProcessingInfo Database Model
6259900e925a0f43d25e8c7c
class Component(VapiInterface): <NEW_LINE> <INDENT> RESOURCE_TYPE = "com.vmware.vapi.component" <NEW_LINE> def __init__(self, config): <NEW_LINE> <INDENT> VapiInterface.__init__(self, config, _ComponentStub) <NEW_LINE> <DEDENT> def list(self): <NEW_LINE> <INDENT> return self._invoke('list', None) <NEW_LINE> <DEDENT> def get(self, component_id, ): <NEW_LINE> <INDENT> return self._invoke('get', { 'component_id': component_id, }) <NEW_LINE> <DEDENT> def fingerprint(self, component_id, ): <NEW_LINE> <INDENT> return self._invoke('fingerprint', { 'component_id': component_id, })
The ``Component`` class provides methods to retrieve authentication information of a component element. A component element is said to contain authentication information if any one of package elements contained in it has authentication information.
6259900ebf627c535bcb20ec
class Algorithm(object): <NEW_LINE> <INDENT> def __init__(self, corpi, config_file): <NEW_LINE> <INDENT> self.corpi = corpi <NEW_LINE> self.config = utilities.get_config(config_file) <NEW_LINE> self.results = None <NEW_LINE> self.doc_ids = [] <NEW_LINE> for corpus in corpi.corpus_list: <NEW_LINE> <INDENT> for item in corpus: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.doc_ids.extend(corpus.doc_ids) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> print('\n\n\n\nRunning the following algorithms:\n\n') <NEW_LINE> print(self.config) <NEW_LINE> result_dict = {} <NEW_LINE> if self.config['named_entities']: <NEW_LINE> <INDENT> ner = Named_Entity_Recognition(self.corpi) <NEW_LINE> ner.run() <NEW_LINE> result_dict['named_entities'] = ner.output <NEW_LINE> <DEDENT> if not self.config['latent_semantic_analysis'] and self.config['LSA_Concepts']: <NEW_LINE> <INDENT> warnings.warn("NEED LATENT SEMANTIC ANALYSIS TO RUN LSA CONCEPTS") <NEW_LINE> <DEDENT> if not self.config['latent_semantic_analysis'] and self.config['kmeans']: <NEW_LINE> <INDENT> warnings.warn("NEED LATENT SEMANTIC ANALYSIS TO RUN KMEANS") <NEW_LINE> <DEDENT> if self.config['latent_semantic_analysis']: <NEW_LINE> <INDENT> l = LatentSemanticAnalysis(self.corpi, self.doc_ids) <NEW_LINE> l.run() <NEW_LINE> result_dict['latent_semantic_analysis'] = l.output <NEW_LINE> if self.config['LSA_Concepts']: <NEW_LINE> <INDENT> c = LSA_Concepts(self.corpi, l.dtm_lsa, l.lsa, l.vectorizer) <NEW_LINE> c.run() <NEW_LINE> result_dict['LSA_Concepts'] = c.output <NEW_LINE> <DEDENT> if self.config['kmeans']: <NEW_LINE> <INDENT> k = kmeans(self.corpi, l.dtm_lsa, self.config['kmeans']) <NEW_LINE> k.run() <NEW_LINE> result_dict['kmeans'] = k.output <NEW_LINE> <DEDENT> <DEDENT> if not self.config['bag_of_words'] and self.config['word_frequency_table']: <NEW_LINE> <INDENT> warnings.warn("NEED BAG OF WORDS TO RUN WORD FREQUENCY TABLE") <NEW_LINE> <DEDENT> if self.config['bag_of_words']: <NEW_LINE> <INDENT> b = BagOfWords(self.corpi) <NEW_LINE> b.run() <NEW_LINE> result_dict['bag_of_words'] = b.output <NEW_LINE> if self.config['word_frequency_table']: <NEW_LINE> <INDENT> self.w = WordFreq(self.corpi, b.output) <NEW_LINE> self.w.run() <NEW_LINE> result_dict['word_frequency'] = self.w.output <NEW_LINE> <DEDENT> <DEDENT> if self.config['tf_idf']: <NEW_LINE> <INDENT> t = Tf_Idf(self.corpi) <NEW_LINE> t.run() <NEW_LINE> result_dict['tf_idf'] = t.output <NEW_LINE> <DEDENT> if self.config['LDA']: <NEW_LINE> <INDENT> lda = LDA(self.corpi, self.config['LDA']) <NEW_LINE> lda.run() <NEW_LINE> result_dict['LDA'] = lda.output <NEW_LINE> result_dict['LDA_Topics'] = lda.topics <NEW_LINE> <DEDENT> output_text = "" <NEW_LINE> self.results = result_dict <NEW_LINE> for alg,result in result_dict.items(): <NEW_LINE> <INDENT> output_text += "\n\nalgorithm: {}\n\nresult:\n\n {}\n\n".format(alg,result) <NEW_LINE> <DEDENT> print(output_text) <NEW_LINE> return result_dict
Reads the algorithm config file to see the selected algorithm(s).
6259900e627d3e7fe0e07ad8
class EmailFileWriter: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.output_name = name <NEW_LINE> <DEDENT> def write(self, emails, output_file_name): <NEW_LINE> <INDENT> raise NotImplementedError("Each writer must be able to write!")
The "interface" for a writer object
6259900e21a7993f00c66bba
class DBLoader(Loader): <NEW_LINE> <INDENT> def __init__(self, source): <NEW_LINE> <INDENT> Loader.__init__(self, source) <NEW_LINE> self.manager = dbm.DBManager(source) <NEW_LINE> BASE.metadata.create_all(self.manager.engine()) <NEW_LINE> self.session = self.manager.session() <NEW_LINE> <DEDENT> def load(self, number=-1): <NEW_LINE> <INDENT> photos = self.session.query(Picture) <NEW_LINE> counter = 0 <NEW_LINE> for pic in photos: <NEW_LINE> <INDENT> yield __dbpic_to_dicpic__(pic) <NEW_LINE> counter += 1 <NEW_LINE> if number > 0 and counter >= number: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def photo_number(self): <NEW_LINE> <INDENT> return self.session.query(func.count(Picture.id))[0][0]
sqlite loader.
6259900e15fb5d323ce7f980
class Operator(object): <NEW_LINE> <INDENT> def apply(self, obj, op): <NEW_LINE> <INDENT> coeffs = self._poly.all_coeffs() <NEW_LINE> coeffs.reverse() <NEW_LINE> diffs = [obj] <NEW_LINE> for c in coeffs[1:]: <NEW_LINE> <INDENT> diffs.append(op(diffs[-1])) <NEW_LINE> <DEDENT> r = coeffs[0]*diffs[0] <NEW_LINE> for c, d in zip(coeffs[1:], diffs[1:]): <NEW_LINE> <INDENT> r += c*d <NEW_LINE> <DEDENT> return r
Base class for operators to be applied to our functions. These operators are differential operators. They are by convention expressed in the variable D = z*d/dz (although this base class does not actually care). Note that when the operator is applied to an object, we typically do *not* blindly differentiate but instead use a different representation of the z*d/dz operator (see make_derivative_operator). To subclass from this, define a __init__ method that initializes a self._poly variable. This variable stores a polynomial. By convention the generator is z*d/dz, and acts to the right of all coefficients. Thus this poly x**2 + 2*z*x + 1 represents the differential operator (z*d/dz)**2 + 2*z**2*d/dz. This class is used only in the implementation of the hypergeometric function expansion algorithm.
6259900e5166f23b2e244012
class Meta: <NEW_LINE> <INDENT> model = AGSResult
Factory metadata.
6259900ebf627c535bcb20f0
class mrp_operation_consumed(models.Model): <NEW_LINE> <INDENT> _name = 'mrp.operation.consumed' <NEW_LINE> _description = 'Operations consumed' <NEW_LINE> _rec_name = 'routing_id' <NEW_LINE> routing_id = fields.Many2one('mrp.routing', string='Routing', required=False, ondelete='cascade') <NEW_LINE> operation_id = fields.Many2one('mrp.routing.line', string='Operation', required=False, ondelete='cascade',help="Define the operation") <NEW_LINE> operation_consumed_id = fields.Many2one('mrp.routing.line', string='Operation consumed', required=False, ondelete='cascade', help="Define the operation consumed")
Operations consumed
6259900e0a366e3fb87dd634
class VoteForm(forms.Form): <NEW_LINE> <INDENT> object_id = forms.IntegerField(widget=forms.HiddenInput)
User input to vote upon something
6259900e0a366e3fb87dd636
class TestTimeStripperDoNotArchiveUntil(TestTimeStripperCase): <NEW_LINE> <INDENT> family = 'wikisource' <NEW_LINE> code = 'en' <NEW_LINE> username = '[[User:DoNotArchiveUntil]]' <NEW_LINE> date = '06:57 06 June 2015 (UTC)' <NEW_LINE> user_and_date = username + ' ' + date <NEW_LINE> tzone = tzoneFixedOffset(0, 'UTC') <NEW_LINE> def test_timestripper_match(self): <NEW_LINE> <INDENT> ts = self.ts <NEW_LINE> txt_match = '<!-- [[User:Do___ArchiveUntil]] ' + self.date + ' -->' <NEW_LINE> res = datetime.datetime(2015, 6, 6, 6, 57, tzinfo=self.tzone) <NEW_LINE> self.assertEqual(ts.timestripper(txt_match), res) <NEW_LINE> txt_match = '<!-- --> <!-- ' + self.user_and_date + ' <!-- -->' <NEW_LINE> res = datetime.datetime(2015, 6, 6, 6, 57, tzinfo=self.tzone) <NEW_LINE> self.assertEqual(ts.timestripper(txt_match), res) <NEW_LINE> txt_match = '<!-- ' + self.user_and_date + ' -->' <NEW_LINE> res = datetime.datetime(2015, 6, 6, 6, 57, tzinfo=self.tzone) <NEW_LINE> self.assertEqual(ts.timestripper(txt_match), res) <NEW_LINE> <DEDENT> def test_timestripper_match_only(self): <NEW_LINE> <INDENT> ts = self.ts <NEW_LINE> later_date = '10:57 06 June 2015 (UTC)' <NEW_LINE> txt_match = '<!-- --> ' + self.user_and_date + ' <!-- -->' + later_date <NEW_LINE> res = datetime.datetime(2015, 6, 6, 10, 57, tzinfo=self.tzone) <NEW_LINE> self.assertEqual(ts.timestripper(txt_match), res) <NEW_LINE> earlier_date = '02:57 06 June 2015 (UTC)' <NEW_LINE> txt_match = '<!-- ' + self.user_and_date + ' --> ' + earlier_date <NEW_LINE> res = datetime.datetime(2015, 6, 6, 6, 57, tzinfo=self.tzone) <NEW_LINE> self.assertEqual(ts.timestripper(txt_match), res)
Test cases for Do Not Archive Until templates. See https://commons.wikimedia.org/wiki/Template:DNAU and https://en.wikipedia.org/wiki/Template:Do_not_archive_until.
6259900ed164cc6175821bc2
class PayOperation(Enum): <NEW_LINE> <INDENT> PAYMENT = 'payment' <NEW_LINE> ONE_CLICK_PAYMENT = 'oneclickPayment'
Type of payment operation.
6259900e56b00c62f0fb3507
class EnumEvaluationNoteTypes: <NEW_LINE> <INDENT> def __int__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> initial_ctg = 'initial_ctg' <NEW_LINE> level_of_concern = 'concern' <NEW_LINE> intervention = 'intervention' <NEW_LINE> level_ph = 'ph' <NEW_LINE> level_neurology = 'neurology'
Types of evaluations
6259900e3cc13d1c6d466391
class SystemResetTest(BaseHostTest): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(SystemResetTest, self).__init__() <NEW_LINE> self.reset = False <NEW_LINE> cycle_s = self.get_config_item('program_cycle_s') <NEW_LINE> self.program_cycle_s = cycle_s if cycle_s is not None else DEFAULT_CYCLE_PERIOD <NEW_LINE> self.test_steps_sequence = self.test_steps() <NEW_LINE> self.test_steps_sequence.send(None) <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> self.register_callback(MSG_KEY_DEVICE_READY, self.cb_device_ready) <NEW_LINE> <DEDENT> def cb_device_ready(self, key, value, timestamp): <NEW_LINE> <INDENT> self.reset = True <NEW_LINE> try: <NEW_LINE> <INDENT> if self.test_steps_sequence.send(value): <NEW_LINE> <INDENT> self.notify_complete(True) <NEW_LINE> <DEDENT> <DEDENT> except (StopIteration, RuntimeError) as exc: <NEW_LINE> <INDENT> self.notify_complete(False) <NEW_LINE> <DEDENT> <DEDENT> def test_steps(self): <NEW_LINE> <INDENT> system_reset = yield <NEW_LINE> self.reset = False <NEW_LINE> self.send_kv(MSG_KEY_DEVICE_RESET, MSG_VALUE_DUMMY) <NEW_LINE> time.sleep(self.program_cycle_s) <NEW_LINE> self.send_kv(MSG_KEY_SYNC, MSG_VALUE_DUMMY) <NEW_LINE> system_reset = yield <NEW_LINE> if self.reset == False: <NEW_LINE> <INDENT> raise RuntimeError('Platform did not reset as expected.') <NEW_LINE> <DEDENT> yield True
Test for the system_reset API. Given a device running code When the device is restarted using @a system_reset() Then the device is restarted
6259900ed164cc6175821bc6
class Motor(object): <NEW_LINE> <INDENT> pi = pigpio.pi("192.168.200.1") <NEW_LINE> """Constructor""" <NEW_LINE> def __init__(self, pwm_pin=20, dir_pin=26, freq=1000, duty=0, dir=1): <NEW_LINE> <INDENT> self.pwm_pin = pwm_pin <NEW_LINE> self.dir_pin = dir_pin <NEW_LINE> self.pi.set_mode(self.pwm_pin, pigpio.OUTPUT) <NEW_LINE> self.pi.set_mode(self.dir_pin, pigpio.OUTPUT) <NEW_LINE> self.set_frequency(freq) <NEW_LINE> self.set_duty(duty) <NEW_LINE> self.set_direction(dir) <NEW_LINE> <DEDENT> """Setter""" <NEW_LINE> def set_pwm_pin(self, pin): <NEW_LINE> <INDENT> self.pwm_pin = pin <NEW_LINE> <DEDENT> def set_dir_pin(self, pin): <NEW_LINE> <INDENT> self.dir_pin = pin <NEW_LINE> <DEDENT> def set_frequency(self, freq=1000): <NEW_LINE> <INDENT> self.freq = freq <NEW_LINE> self.pi.set_PWM_frequency(self.pwm_pin, freq) <NEW_LINE> <DEDENT> def set_duty(self, duty=30): <NEW_LINE> <INDENT> self.duty = duty <NEW_LINE> self.pi.set_PWM_dutycycle(self.pwm_pin, duty) <NEW_LINE> <DEDENT> def set_direction(self, direction=1): <NEW_LINE> <INDENT> self.pi.write(self.dir_pin, direction) <NEW_LINE> self.direction = direction <NEW_LINE> <DEDENT> """Getter""" <NEW_LINE> def get_pwm_pin(self): <NEW_LINE> <INDENT> return self.pwm_pin <NEW_LINE> <DEDENT> def get_dir_pin(self): <NEW_LINE> <INDENT> return self.dir_pin <NEW_LINE> <DEDENT> def get_frequency(self): <NEW_LINE> <INDENT> return self.freq <NEW_LINE> <DEDENT> def get_duty(self): <NEW_LINE> <INDENT> return self.duty <NEW_LINE> <DEDENT> def get_direction(self): <NEW_LINE> <INDENT> return self.direction
Motor Class
6259900f5166f23b2e24401e
class NTAG203(tt2.Type2Tag): <NEW_LINE> <INDENT> def __init__(self, clf, target): <NEW_LINE> <INDENT> super(NTAG203, self).__init__(clf, target) <NEW_LINE> self._product = "NXP NTAG203" <NEW_LINE> <DEDENT> def dump(self): <NEW_LINE> <INDENT> oprint = lambda o: ' '.join(['??' if x < 0 else '%02x'%x for x in o]) <NEW_LINE> s = super(NTAG203, self)._dump(40) <NEW_LINE> footer = dict(zip(range(40, 42), ("LOCK2-LOCK3", "CNTR0-CNTR1"))) <NEW_LINE> for i in sorted(footer.keys()): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = self.read(i)[0:4] <NEW_LINE> <DEDENT> except tt2.Type2TagCommandError: <NEW_LINE> <INDENT> data = [None, None, None, None] <NEW_LINE> <DEDENT> s.append("{0:3}: {1} ({2})".format(i, oprint(data), footer[i])) <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> def protect(self, password=None, read_protect=False, protect_from=0): <NEW_LINE> <INDENT> return super(NTAG203, self).protect( password, read_protect, protect_from) <NEW_LINE> <DEDENT> def _protect(self, password, read_protect, protect_from): <NEW_LINE> <INDENT> if password is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ndef_cc = self.read(3)[0:4] <NEW_LINE> if ndef_cc[0] == 0xE1 and ndef_cc[1] >> 4 == 1: <NEW_LINE> <INDENT> ndef_cc[3] = 0x0F <NEW_LINE> self.write(3, ndef_cc) <NEW_LINE> <DEDENT> self.write(2, "\x00\x00\xFF\xFF") <NEW_LINE> self.write(40, "\xFF\x01\x00\x00") <NEW_LINE> return True <NEW_LINE> <DEDENT> except tt2.Type2TagCommandError: pass <NEW_LINE> <DEDENT> return False
The NTAG203 is a plain memory Tag with 144 bytes user data memory plus a 16-bit one-way counter. It does not have any security features beyond the standard lock bit mechanism that permanently disables write access.
6259900f925a0f43d25e8c8c
class SetSupportedParameters(TestMixins.UnsupportedSetMixin, ResponderTestFixture): <NEW_LINE> <INDENT> PID = 'SUPPORTED_PARAMETERS'
Attempt to SET supported parameters.
6259900f507cdc57c63a59ed
class DataRoute(object): <NEW_LINE> <INDENT> def db_for_read(self, model, **hints): <NEW_LINE> <INDENT> return 'data' <NEW_LINE> <DEDENT> def db_for_write(self, model, **hints): <NEW_LINE> <INDENT> return 'data' <NEW_LINE> <DEDENT> def allow_relation(self, obj1, obj2, **hints): <NEW_LINE> <INDENT> if obj1._state.db in 'data' and obj2._state.db in 'data': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def allow_migrate(self, db, app_label, model_name=None, **hints): <NEW_LINE> <INDENT> return True
A router to control all database operations on models in the pghm application.
6259900f56b00c62f0fb350d
class UserProfileManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self,email,name,password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError('Users must have an email address') <NEW_LINE> <DEDENT> email=self.normalize_email(email) <NEW_LINE> user=self.model(email=email,name=name) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self,email,name,password): <NEW_LINE> <INDENT> user=self.create_user(email,name,password) <NEW_LINE> user.is_superuser=True <NEW_LINE> user.is_staff=True <NEW_LINE> user.save(using=self._db) <NEW_LINE> return self
django can create user and superuser from userprofile model
6259900f462c4b4f79dbc657
class PersonIngestor(Ingestor): <NEW_LINE> <INDENT> grok.context(IPersonFolder) <NEW_LINE> def getContainedObjectInterface(self): <NEW_LINE> <INDENT> return IPerson <NEW_LINE> <DEDENT> def getTitles(self, predicates): <NEW_LINE> <INDENT> first = last = None <NEW_LINE> lasts = predicates.get(URIRef(FOAF_SURNAME)) <NEW_LINE> firsts = predicates.get(URIRef(FOAF_GIVENNAME)) <NEW_LINE> if lasts and lasts[0]: <NEW_LINE> <INDENT> last = unicode(lasts[0]) <NEW_LINE> <DEDENT> if firsts and firsts[0]: <NEW_LINE> <INDENT> first = unicode(firsts[0]) <NEW_LINE> <DEDENT> if first and last: <NEW_LINE> <INDENT> return [u'{}, {}'.format(last, first)] <NEW_LINE> <DEDENT> name = [i for i in (last, first) if i] <NEW_LINE> if name: <NEW_LINE> <INDENT> return name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
RDF ingestor for people.
6259900fbf627c535bcb2100
@dataclass <NEW_LINE> class Input: <NEW_LINE> <INDENT> name: Name <NEW_LINE> email: EmailAddress
Input.
6259900f56b00c62f0fb350f
class Solution: <NEW_LINE> <INDENT> def majorityNumber(self, nums): <NEW_LINE> <INDENT> ele, count = nums[0], 1 <NEW_LINE> for i in range(1, len(nums)): <NEW_LINE> <INDENT> if count == 0: <NEW_LINE> <INDENT> ele = nums[i] <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if nums[i] == ele: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> count -= 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ele
@param: nums: a list of integers @return: find a majority number
6259900f3cc13d1c6d466399
class Checkpointer(object): <NEW_LINE> <INDENT> def __init__(self, base_directory, checkpoint_file_prefix='ckpt', sentinel_file_identifier='checkpoint', checkpoint_frequency=1): <NEW_LINE> <INDENT> if not base_directory: <NEW_LINE> <INDENT> raise ValueError('No path provided to Checkpointer.') <NEW_LINE> <DEDENT> self._checkpoint_file_prefix = checkpoint_file_prefix <NEW_LINE> self._sentinel_file_prefix = 'sentinel_{}_complete'.format( sentinel_file_identifier) <NEW_LINE> self._checkpoint_frequency = checkpoint_frequency <NEW_LINE> self._base_directory = base_directory <NEW_LINE> try: <NEW_LINE> <INDENT> tf.gfile.MakeDirs(base_directory) <NEW_LINE> <DEDENT> except tf.errors.PermissionDeniedError: <NEW_LINE> <INDENT> raise ValueError('Unable to create checkpoint path: {}.'.format( base_directory)) <NEW_LINE> <DEDENT> <DEDENT> def _generate_filename(self, file_prefix, iteration_number): <NEW_LINE> <INDENT> filename = '{}.{}'.format(file_prefix, iteration_number) <NEW_LINE> return os.path.join(self._base_directory, filename) <NEW_LINE> <DEDENT> def _save_data_to_file(self, data, filename): <NEW_LINE> <INDENT> with tf.gfile.GFile(filename, 'w') as fout: <NEW_LINE> <INDENT> pickle.dump(data, fout) <NEW_LINE> <DEDENT> <DEDENT> def save_checkpoint(self, iteration_number, data): <NEW_LINE> <INDENT> if iteration_number % self._checkpoint_frequency != 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> filename = self._generate_filename(self._checkpoint_file_prefix, iteration_number) <NEW_LINE> self._save_data_to_file(data, filename) <NEW_LINE> filename = self._generate_filename(self._sentinel_file_prefix, iteration_number) <NEW_LINE> with tf.gfile.GFile(filename, 'wb') as fout: <NEW_LINE> <INDENT> fout.write('done') <NEW_LINE> <DEDENT> self._clean_up_old_checkpoints(iteration_number) <NEW_LINE> <DEDENT> def _clean_up_old_checkpoints(self, iteration_number): <NEW_LINE> <INDENT> stale_iteration_number = iteration_number - (self._checkpoint_frequency * CHECKPOINT_DURATION) <NEW_LINE> if stale_iteration_number >= 0: <NEW_LINE> <INDENT> stale_file = self._generate_filename(self._checkpoint_file_prefix, stale_iteration_number) <NEW_LINE> stale_sentinel = self._generate_filename(self._sentinel_file_prefix, stale_iteration_number) <NEW_LINE> try: <NEW_LINE> <INDENT> tf.gfile.Remove(stale_file) <NEW_LINE> tf.gfile.Remove(stale_sentinel) <NEW_LINE> <DEDENT> except tf.errors.NotFoundError: <NEW_LINE> <INDENT> tf.logging.info('Unable to remove {} or {}.'.format(stale_file, stale_sentinel)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _load_data_from_file(self, filename): <NEW_LINE> <INDENT> if not tf.gfile.Exists(filename): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> with tf.gfile.GFile(filename, 'rb') as fin: <NEW_LINE> <INDENT> return pickle.load(fin) <NEW_LINE> <DEDENT> <DEDENT> def load_checkpoint(self, iteration_number): <NEW_LINE> <INDENT> checkpoint_file = self._generate_filename(self._checkpoint_file_prefix, iteration_number) <NEW_LINE> return self._load_data_from_file(checkpoint_file)
Class for managing checkpoints for Dopamine agents.
6259900f0a366e3fb87dd644
class FakeImageResource: <NEW_LINE> <INDENT> def __init__(self, io, arg): <NEW_LINE> <INDENT> self.url = f"$({io}s.resources.{k8s.safe_name(arg)}.url)"
Used in dry-run a Task function, which might call res_param.url
6259900fbf627c535bcb2102
class ResourceConfigurationV1(BaseService): <NEW_LINE> <INDENT> default_url = 'https://config.cloud-object-storage.cloud.ibm.com/v1' <NEW_LINE> def __init__(self, url=default_url, iam_apikey=None, iam_access_token=None, iam_url=None, iam_client_id=None, iam_client_secret=None, ): <NEW_LINE> <INDENT> BaseService.__init__(self, vcap_services_name='resource_configuration', url=url, iam_apikey=iam_apikey, iam_access_token=iam_access_token, iam_url=iam_url, iam_client_id=iam_client_id, iam_client_secret=iam_client_secret, use_vcap_services=True, display_name='ResourceConfiguration') <NEW_LINE> <DEDENT> def get_bucket_config(self, bucket, **kwargs): <NEW_LINE> <INDENT> if bucket is None: <NEW_LINE> <INDENT> raise ValueError('bucket must be provided') <NEW_LINE> <DEDENT> headers = { } <NEW_LINE> if 'headers' in kwargs: <NEW_LINE> <INDENT> headers.update(kwargs.get('headers')) <NEW_LINE> <DEDENT> sdk_headers = get_sdk_headers('resource_configuration', 'V1', 'get_bucket_config') <NEW_LINE> headers.update(sdk_headers) <NEW_LINE> url = '/b/{0}'.format(*self._encode_path_vars(bucket)) <NEW_LINE> response = self.request(method='GET', url=url, headers=headers, accept_json=True) <NEW_LINE> return response <NEW_LINE> <DEDENT> def update_bucket_config(self, bucket, firewall=None, activity_tracking=None, metrics_monitoring=None, hard_quota=None, if_match=None, **kwargs): <NEW_LINE> <INDENT> if bucket is None: <NEW_LINE> <INDENT> raise ValueError('bucket must be provided') <NEW_LINE> <DEDENT> if firewall is not None: <NEW_LINE> <INDENT> firewall = self._convert_model(firewall, Firewall) <NEW_LINE> <DEDENT> if activity_tracking is not None: <NEW_LINE> <INDENT> activity_tracking = self._convert_model(activity_tracking, ActivityTracking) <NEW_LINE> <DEDENT> if metrics_monitoring is not None: <NEW_LINE> <INDENT> metrics_monitoring = self._convert_model(metrics_monitoring, MetricsMonitoring) <NEW_LINE> <DEDENT> headers = { 'if-match': if_match } <NEW_LINE> if 'headers' in kwargs: <NEW_LINE> <INDENT> headers.update(kwargs.get('headers')) <NEW_LINE> <DEDENT> sdk_headers = get_sdk_headers('resource_configuration', 'V1', 'update_bucket_config') <NEW_LINE> headers.update(sdk_headers) <NEW_LINE> data = { 'firewall': firewall, 'activity_tracking': activity_tracking, 'metrics_monitoring': metrics_monitoring, 'hard_quota': hard_quota } <NEW_LINE> url = '/b/{0}'.format(*self._encode_path_vars(bucket)) <NEW_LINE> response = self.request(method='PATCH', url=url, headers=headers, json=data, accept_json=False) <NEW_LINE> return response
The ResourceConfiguration V1 service.
6259900f21a7993f00c66bd0
class FakeEvent: <NEW_LINE> <INDENT> def __init__(self, string): <NEW_LINE> <INDENT> self.data = string
make a holder for passing a string to an event handler
6259900f56b00c62f0fb3513
class Solution2: <NEW_LINE> <INDENT> def isValidBST(self, root: TreeNode) -> bool: <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.onLeft(root.left, root.val) and self.onRight(root.right, root.val) and self.isValidBST(root.left) and self.isValidBST(root.right) <NEW_LINE> <DEDENT> def onLeft(self, left: TreeNode, pVal: int)-> bool: <NEW_LINE> <INDENT> if left is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return left.val < pVal and self.onLeft(left.left, pVal) and self.onLeft(left.right, pVal) <NEW_LINE> <DEDENT> def onRight(self, right: TreeNode, pVal: int) -> bool: <NEW_LINE> <INDENT> if right is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return pVal < right.val and self.onRight(right.left, pVal) and self.onRight(right.right, pVal)
仅需验证 BST 的有序性即可,即验证当前节点与其左右子节点的关系
6259900f3cc13d1c6d46639d
class BracketMatcher(QtCore.QObject): <NEW_LINE> <INDENT> _opening_map = { '(':')', '{':'}', '[':']' } <NEW_LINE> _closing_map = { ')':'(', '}':'{', ']':'[' } <NEW_LINE> def __init__(self, text_edit): <NEW_LINE> <INDENT> assert isinstance(text_edit, (QtGui.QTextEdit, QtGui.QPlainTextEdit)) <NEW_LINE> super(BracketMatcher, self).__init__() <NEW_LINE> self.format = QtGui.QTextCharFormat() <NEW_LINE> self.format.setBackground(QtGui.QColor('silver')) <NEW_LINE> self._text_edit = text_edit <NEW_LINE> text_edit.cursorPositionChanged.connect(self._cursor_position_changed) <NEW_LINE> <DEDENT> def _find_match(self, position): <NEW_LINE> <INDENT> document = self._text_edit.document() <NEW_LINE> qchar = document.characterAt(position) <NEW_LINE> start_char = qchar.toAscii() <NEW_LINE> search_char = self._opening_map.get(start_char) <NEW_LINE> if search_char: <NEW_LINE> <INDENT> increment = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> search_char = self._closing_map.get(start_char) <NEW_LINE> if search_char: <NEW_LINE> <INDENT> increment = -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> depth = 0 <NEW_LINE> while position >= 0 and position < document.characterCount(): <NEW_LINE> <INDENT> char = qchar.toAscii() <NEW_LINE> if char == start_char: <NEW_LINE> <INDENT> depth += 1 <NEW_LINE> <DEDENT> elif char == search_char: <NEW_LINE> <INDENT> depth -= 1 <NEW_LINE> <DEDENT> if depth == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> position += increment <NEW_LINE> qchar = document.characterAt(position) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> position = -1 <NEW_LINE> <DEDENT> return position <NEW_LINE> <DEDENT> def _selection_for_character(self, position): <NEW_LINE> <INDENT> selection = QtGui.QTextEdit.ExtraSelection() <NEW_LINE> cursor = self._text_edit.textCursor() <NEW_LINE> cursor.setPosition(position) <NEW_LINE> cursor.movePosition(QtGui.QTextCursor.NextCharacter, QtGui.QTextCursor.KeepAnchor) <NEW_LINE> selection.cursor = cursor <NEW_LINE> selection.format = self.format <NEW_LINE> return selection <NEW_LINE> <DEDENT> def _cursor_position_changed(self): <NEW_LINE> <INDENT> self._text_edit.setExtraSelections([]) <NEW_LINE> cursor = self._text_edit.textCursor() <NEW_LINE> if not cursor.hasSelection(): <NEW_LINE> <INDENT> position = cursor.position() - 1 <NEW_LINE> match_position = self._find_match(position) <NEW_LINE> if match_position != -1: <NEW_LINE> <INDENT> extra_selections = [ self._selection_for_character(pos) for pos in (position, match_position) ] <NEW_LINE> self._text_edit.setExtraSelections(extra_selections)
Matches square brackets, braces, and parentheses based on cursor position.
6259900f462c4b4f79dbc65d
class Wrapper(object): <NEW_LINE> <INDENT> origin = None <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<{0.__class__.__name__}(Wrapping {1})>".format(self, repr(self.origin)) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Base class for policy object wrappers.
6259900f56b00c62f0fb3515
class Discover: <NEW_LINE> <INDENT> def __init(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def scan(self, stop_on_first=True, base_ip=0): <NEW_LINE> <INDENT> tvs = [] <NEW_LINE> if base_ip == 0: <NEW_LINE> <INDENT> sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) <NEW_LINE> sock.connect(("8.8.8.8", 80)) <NEW_LINE> ip_address = sock.getsockname()[0] <NEW_LINE> sock.close() <NEW_LINE> ip_parts = ip_address.split('.') <NEW_LINE> base_ip = ip_parts[0] + '.' + ip_parts[1] + '.' + ip_parts[2] <NEW_LINE> <DEDENT> for ip_suffix in range(2, 256): <NEW_LINE> <INDENT> ip_check = '{}.{}'.format(base_ip, ip_suffix) <NEW_LINE> if self.check_ip(ip_check): <NEW_LINE> <INDENT> tvs.append(ip_check) <NEW_LINE> if stop_on_first: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return tvs <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def check_ip(ip_address, log=False, request_timeout=0.1): <NEW_LINE> <INDENT> if log: <NEW_LINE> <INDENT> print('Checking ip: {}...'.format(ip_address)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> tv_url = 'http://{}:6095/request?action=isalive'.format(ip_address) <NEW_LINE> request = requests.get(tv_url, timeout=request_timeout) <NEW_LINE> <DEDENT> except (requests.exceptions.ConnectTimeout, requests.exceptions.ConnectionError): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return request.status_code == 200
This class handles discovery and checking of local Xiaomi TVs
6259900f0a366e3fb87dd64a
class CentererB(NodeB): <NEW_LINE> <INDENT> type = QlibsNodeTypes.CENTERER <NEW_LINE> def __init__(self, sep_x, sep_y, child=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.sep_x = sep_x <NEW_LINE> self.sep_y = sep_y <NEW_LINE> if child is not None: <NEW_LINE> <INDENT> self.add_child(child) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def child(self): <NEW_LINE> <INDENT> return self.children[0] <NEW_LINE> <DEDENT> @child.setter <NEW_LINE> def child(self, val): <NEW_LINE> <INDENT> if self.children: <NEW_LINE> <INDENT> self.children[0] = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.children.append(val) <NEW_LINE> <DEDENT> self.recalc_size() <NEW_LINE> <DEDENT> def recalc_size(self): <NEW_LINE> <INDENT> tpos = (self.position.x + self.sep_x, self.position.y + self.sep_y) <NEW_LINE> tsize = (max(self.size.x - self.sep_x*2, 0), max(self.size.y - self.sep_y*2, 0)) <NEW_LINE> for child in self.children: <NEW_LINE> <INDENT> child.position = tpos <NEW_LINE> child.size = tsize <NEW_LINE> <DEDENT> super().recalc_size()
Makes it's children smaller by **sep_x** and **sep_y** from each side
6259900fd164cc6175821bd4
class FilesParser(object): <NEW_LINE> <INDENT> def __init__(self, pathes = []): <NEW_LINE> <INDENT> self.pathes = pathes if type(pathes) is list else [pathes] <NEW_LINE> self.__abbrev_to_full = {} <NEW_LINE> self.__full_to_abbrev = {} <NEW_LINE> self.files = None <NEW_LINE> self.files_full = None <NEW_LINE> self.__parse_files() <NEW_LINE> <DEDENT> def __parse_files(self): <NEW_LINE> <INDENT> args = " ".join(["\'%s\'" % f for f in self.pathes]) <NEW_LINE> self.files = git("ls-files "+args).split("\n")[:-1] <NEW_LINE> self.files_full = git("ls-files --full-name "+args).split("\n")[:-1] <NEW_LINE> for i, f in enumerate(self.files): <NEW_LINE> <INDENT> full = self.files_full[i] <NEW_LINE> self.__abbrev_to_full[f] = full <NEW_LINE> self.__abbrev_to_full[full] = full <NEW_LINE> self.__full_to_abbrev[full] = f <NEW_LINE> self.__full_to_abbrev[f] = f <NEW_LINE> <DEDENT> <DEDENT> def get_full(self, file): <NEW_LINE> <INDENT> return self.__abbrev_to_full.get(file) <NEW_LINE> <DEDENT> def get_abbrev(self, full_path): <NEW_LINE> <INDENT> return self.__full_to_abbrev.get(full_path)
FilesParser run 'git ls-files' and parse.
6259900fbf627c535bcb2108
class BlatPslIndexer(SearchIndexer): <NEW_LINE> <INDENT> _parser = BlatPslParser <NEW_LINE> def __init__(self, filename, pslx=False): <NEW_LINE> <INDENT> SearchIndexer.__init__(self, filename, pslx=pslx) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> handle = self._handle <NEW_LINE> handle.seek(0) <NEW_LINE> query_id_idx = 9 <NEW_LINE> qresult_key = None <NEW_LINE> tab_char = _as_bytes('\t') <NEW_LINE> start_offset = handle.tell() <NEW_LINE> line = handle.readline() <NEW_LINE> while not re.search(_RE_ROW_CHECK_IDX, line.strip()): <NEW_LINE> <INDENT> start_offset = handle.tell() <NEW_LINE> line = handle.readline() <NEW_LINE> if not line: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> <DEDENT> while True: <NEW_LINE> <INDENT> end_offset = handle.tell() <NEW_LINE> cols = [x for x in line.strip().split(tab_char) if x] <NEW_LINE> if qresult_key is None: <NEW_LINE> <INDENT> qresult_key = cols[query_id_idx] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> curr_key = cols[query_id_idx] <NEW_LINE> if curr_key != qresult_key: <NEW_LINE> <INDENT> yield _bytes_to_string(qresult_key), start_offset, end_offset - start_offset <NEW_LINE> qresult_key = curr_key <NEW_LINE> start_offset = end_offset - len(line) <NEW_LINE> <DEDENT> <DEDENT> line = handle.readline() <NEW_LINE> if not line: <NEW_LINE> <INDENT> yield _bytes_to_string(qresult_key), start_offset, end_offset - start_offset <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_raw(self, offset): <NEW_LINE> <INDENT> handle = self._handle <NEW_LINE> handle.seek(offset) <NEW_LINE> query_id_idx = 9 <NEW_LINE> qresult_key = None <NEW_LINE> qresult_raw = _as_bytes('') <NEW_LINE> tab_char = _as_bytes('\t') <NEW_LINE> while True: <NEW_LINE> <INDENT> line = handle.readline() <NEW_LINE> if not line: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> cols = [x for x in line.strip().split(tab_char) if x] <NEW_LINE> if qresult_key is None: <NEW_LINE> <INDENT> qresult_key = cols[query_id_idx] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> curr_key = cols[query_id_idx] <NEW_LINE> if curr_key != qresult_key: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> qresult_raw += line <NEW_LINE> <DEDENT> return qresult_raw
Indexer class for BLAT PSL output.
6259900f56b00c62f0fb3517
class FunctionDefinition(FunctionPrototype): <NEW_LINE> <INDENT> __slots__ = FunctionPrototype.__slots__[:-1] + ('body', 'attrs') <NEW_LINE> @classmethod <NEW_LINE> def _construct_body(cls, itr): <NEW_LINE> <INDENT> if isinstance(itr, CodeBlock): <NEW_LINE> <INDENT> return itr <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return CodeBlock(*itr) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_FunctionPrototype(cls, func_proto, body): <NEW_LINE> <INDENT> if not isinstance(func_proto, FunctionPrototype): <NEW_LINE> <INDENT> raise TypeError("func_proto is not an instance of FunctionPrototype") <NEW_LINE> <DEDENT> return cls(body=body, **func_proto.kwargs())
Represents a function definition in the code. Parameters ========== return_type : Type name : str parameters: iterable of Variable instances body : CodeBlock or iterable attrs : iterable of Attribute instances Examples ======== >>> from sympy import ccode, symbols >>> from sympy.codegen.ast import real, FunctionPrototype >>> x, y = symbols('x y', real=True) >>> fp = FunctionPrototype(real, 'foo', [x, y]) >>> ccode(fp) 'double foo(double x, double y)' >>> from sympy.codegen.ast import FunctionDefinition, Return >>> body = [Return(x*y)] >>> fd = FunctionDefinition.from_FunctionPrototype(fp, body) >>> print(ccode(fd)) double foo(double x, double y){ return x*y; }
6259900f3cc13d1c6d4663a1
class TimeSensor(object): <NEW_LINE> <INDENT> def plot_Ex(self, logplot=False): <NEW_LINE> <INDENT> self.__plot_field(self.Ex, logplot) <NEW_LINE> <DEDENT> def plot_Ey(self, logplot=False): <NEW_LINE> <INDENT> self.__plot_field(self.Ey, logplot) <NEW_LINE> <DEDENT> def plot_Ez(self, logplot=False): <NEW_LINE> <INDENT> self.__plot_field(self.Ez, logplot) <NEW_LINE> <DEDENT> def plot_Hx(self, logplot=False): <NEW_LINE> <INDENT> self.__plot_field(self.Hx, logplot) <NEW_LINE> <DEDENT> def plot_Hy(self, logplot=False): <NEW_LINE> <INDENT> self.__plot_field(self.Hy, logplot) <NEW_LINE> <DEDENT> def plot_Hz(self, logplot=False): <NEW_LINE> <INDENT> self.__plot_field(self.Hz, logplot) <NEW_LINE> <DEDENT> def __plot_field(self, field, logplot=False): <NEW_LINE> <INDENT> if logplot: <NEW_LINE> <INDENT> data = 20 * numpy.log10(1e-20 + numpy.abs(field)) <NEW_LINE> pylab.plot(self.t, data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = field <NEW_LINE> pylab.plot(self.t, data) <NEW_LINE> <DEDENT> pylab.show()
Data structure to handle the time sensor's data.
6259900fd18da76e235b777a
class DeleteVPNTunnelResponseSchema(schema.ResponseSchema): <NEW_LINE> <INDENT> fields = {}
DeleteVPNTunnel - 删除VPN隧道
6259900f56b00c62f0fb3519
class Courier: <NEW_LINE> <INDENT> def __init__(self, models: dict, scaler, alpha: float = 0.5, beta: float = 0.5, batches=[64, 128, 256, 512]): <NEW_LINE> <INDENT> self.batches = np.array(batches).reshape(len(batches), 1) <NEW_LINE> if alpha + beta != 1: <NEW_LINE> <INDENT> raise ValueError('The hyperparameters need to add up to 1') <NEW_LINE> <DEDENT> self.alpha = alpha <NEW_LINE> self.beta = beta <NEW_LINE> self.scaler = scaler <NEW_LINE> self.acc_model = models['accuracy'] <NEW_LINE> self.time_model = models['time'] <NEW_LINE> <DEDENT> def optimize(self, job: Job, latency=None): <NEW_LINE> <INDENT> acc, t = self._predict(job) <NEW_LINE> b = self.batches <NEW_LINE> if latency: <NEW_LINE> <INDENT> fit = t[t < latency] <NEW_LINE> if len(fit) == 0: <NEW_LINE> <INDENT> print('Not a single value fulfills the ' 'time requirements, selecting minimum time') <NEW_LINE> min_t_idx = np.where(t == t.min())[0][0] <NEW_LINE> return self.batches[min_t_idx], (acc[min_t_idx], t[min_t_idx]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> acc = acc[t < latency] <NEW_LINE> b = self.batches[t < latency] <NEW_LINE> t = t[t < latency] <NEW_LINE> idx = np.where(acc == acc.max())[0][0] <NEW_LINE> return b[idx], (acc[idx], t[idx]) <NEW_LINE> <DEDENT> <DEDENT> sc_a = acc / np.max(acc) <NEW_LINE> sc_t = 1 - (t / np.max(t)) <NEW_LINE> sc = self.alpha * sc_a + self.beta * sc_t <NEW_LINE> max_sc_idx = np.where(sc == sc.max())[0][0] <NEW_LINE> return b[max_sc_idx], (acc[max_sc_idx], t[max_sc_idx]) <NEW_LINE> <DEDENT> def _fit_model(self, labels): <NEW_LINE> <INDENT> reg = self.model <NEW_LINE> reg.fit(self.X, labels) <NEW_LINE> return reg <NEW_LINE> <DEDENT> def _preprocess_data(self, X): <NEW_LINE> <INDENT> scaler = StandardScaler() <NEW_LINE> return scaler.fit_transform(X) <NEW_LINE> <DEDENT> def _predict(self, job: Job): <NEW_LINE> <INDENT> acc = [] <NEW_LINE> t = [] <NEW_LINE> for b in self.batches: <NEW_LINE> <INDENT> data_point = self.scaler.transform([[job.cpu * job.njobs, job.cpu, job.njobs, b, job.network]]) <NEW_LINE> _acc = self.acc_model.predict(data_point) <NEW_LINE> _t = self.time_model.predict(data_point) <NEW_LINE> acc.append(_acc) <NEW_LINE> t.append(_t) <NEW_LINE> <DEDENT> return np.array(acc), np.array(t)
Courier has 3 parameters to account for utilization, accuracy and response time, based on which, and their weights, it chooses the optimal batch size for the task labels are in format dict accuracy -> labels time -> labels utilization -> labels
6259900f507cdc57c63a59fa
class MAC(Digest): <NEW_LINE> <INDENT> def __init__(self,algorithm,key,digest=None,**kwargs): <NEW_LINE> <INDENT> if isinstance(algorithm,str): <NEW_LINE> <INDENT> self.algorithm=Oid(algorithm) <NEW_LINE> <DEDENT> elif isinstance(algorithm,Oid): <NEW_LINE> <INDENT> self.algorithm=algorithm <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Algorthm must be string or Oid") <NEW_LINE> <DEDENT> if self.algorithm==Oid('hmac') and digest is None: <NEW_LINE> <INDENT> digest='md5' <NEW_LINE> <DEDENT> self.name=self.algorithm.shortname().lower() <NEW_LINE> if digest is not None: <NEW_LINE> <INDENT> self.digest_type=DigestType(digest) <NEW_LINE> self.name+='-'+self.digest_type.digest_name <NEW_LINE> d=self.digest_type.digest <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.digest_type=None <NEW_LINE> d=None <NEW_LINE> <DEDENT> self.key=libcrypto.EVP_PKEY_new_mac_key(self.algorithm.nid,None,key,len(key)) <NEW_LINE> if self.key is None: <NEW_LINE> <INDENT> raise DigestError("EVP_PKEY_new_mac_key") <NEW_LINE> <DEDENT> pctx=c_void_p() <NEW_LINE> self.ctx = self.newctx() <NEW_LINE> if self.ctx == 0: <NEW_LINE> <INDENT> raise DigestError("Unable to create digest context") <NEW_LINE> <DEDENT> if libcrypto.EVP_DigestSignInit(self.ctx,pointer(pctx),d,None,self.key) <= 0: <NEW_LINE> <INDENT> raise DigestError("Unable to intialize digest context") <NEW_LINE> <DEDENT> self.digest_finalized=False <NEW_LINE> if self.digest_type is None: <NEW_LINE> <INDENT> self.digest_type=DigestType(Oid(libcrypto.EVP_MD_type(libcrypto.EVP_MD_CTX_md(self.ctx)))) <NEW_LINE> <DEDENT> for (name,val) in kwargs.items(): <NEW_LINE> <INDENT> if libcrypto.EVP_PKEY_CTX_ctrl_str(pctx,name,val)<=0: <NEW_LINE> <INDENT> raise DigestError("Unable to set mac parameter") <NEW_LINE> <DEDENT> <DEDENT> self.digest_size = self.digest_type.digest_size <NEW_LINE> self.block_size = self.digest_type.block_size <NEW_LINE> <DEDENT> def digest(self,data=None): <NEW_LINE> <INDENT> if data is not None: <NEW_LINE> <INDENT> self.update(data) <NEW_LINE> <DEDENT> b=create_string_buffer(256) <NEW_LINE> size=c_size_t(256) <NEW_LINE> if libcrypto.EVP_DigestSignFinal(self.ctx,b,pointer(size))<=0: <NEW_LINE> <INDENT> raise DigestError('SignFinal') <NEW_LINE> <DEDENT> self.digest_finalized=True <NEW_LINE> return b.raw[:size.value]
This object represents MAC context. It is quite simular to digest algorithm. It is simular to hmac objects provided by standard library
6259900fd18da76e235b777b
class ViewComplexDouble(ctypes.Structure): <NEW_LINE> <INDENT> _fields_ = [('height', catamari_int), ('width', catamari_int), ('leading_dim', catamari_int), ('data', POINTER(ComplexDouble))]
An equivalent of CatamariBlasMatrixViewComplexDouble.
6259900f627d3e7fe0e07af6
class VGG3DModel(BaseModel): <NEW_LINE> <INDENT> def __init__(self, sequence_size, img_size=321, batch_size=1, weight_file=None): <NEW_LINE> <INDENT> BaseModel.__init__(self, "C3DModel", batch_size) <NEW_LINE> self.sequence_size = sequence_size <NEW_LINE> self.img_size = img_size <NEW_LINE> self.build_model() <NEW_LINE> self.output_size = self.model.get_output_shape_at(-1)[-1] <NEW_LINE> if weight_file: <NEW_LINE> <INDENT> self.model.load_weights(weight_file) <NEW_LINE> <DEDENT> <DEDENT> def _build_model(self): <NEW_LINE> <INDENT> inputs = Input(shape=(3, self.sequence_size, self.img_size, self.img_size)) <NEW_LINE> x = Convolution3D(64, 3, 3, 3, activation=Relu, border_mode="same")(inputs) <NEW_LINE> x = BatchNormalization(axis=2)(x) <NEW_LINE> x = MaxPooling3D((1, 2, 2), strides=(1, 2, 2), border_mode='valid')(x) <NEW_LINE> x = Convolution3D(128, 3, 3, 3, activation=Relu, border_mode="same")(x) <NEW_LINE> x = BatchNormalization(axis=2)(x) <NEW_LINE> x = MaxPooling3D((2, 2, 2), strides=(2, 2, 2), border_mode='valid')(x) <NEW_LINE> x = Convolution3D(256, 3, 3, 3, activation=Relu, border_mode="same")(x) <NEW_LINE> x = Dropout(0.4)(x) <NEW_LINE> x = Convolution3D(256, 3, 3, 3, activation=Relu, border_mode="same")(x) <NEW_LINE> x = BatchNormalization(axis=2)(x) <NEW_LINE> x = Reshape([(self.sequence_size // 2) * 256, 80, 80])(x) <NEW_LINE> x = Deconvolution2D((self.sequence_size // 2) * 256, 3, 3, [self.batch_size, (self.sequence_size // 2) * 256, 159, 159], border_mode='same', subsample=(2, 2), activation=Relu)(x) <NEW_LINE> x = Convolution2D(512, 3, 3, activation=Relu, border_mode="same")(x) <NEW_LINE> x = BatchNormalization(axis=1)(x) <NEW_LINE> x = Convolution2D(64, 3, 3, activation=Relu, border_mode="same")(x) <NEW_LINE> x = Dropout(0.4)(x) <NEW_LINE> x = BatchNormalization(axis=1)(x) <NEW_LINE> x = Convolution2D(1, 3, 3, activation="sigmoid", border_mode="same")(x) <NEW_LINE> model = Model(input=inputs, output=x) <NEW_LINE> return model
This model is using the first 5 blocks of VGG19 with the Conv2D replaced with Conv3D. This model is our baseline.
6259900f5166f23b2e244030
class TMSettings(dict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwds): <NEW_LINE> <INDENT> dict.__init__(self, *args, **kwds) <NEW_LINE> self.name = "tweenMachineSettings" <NEW_LINE> if mc.optionVar(exists=self.name): <NEW_LINE> <INDENT> data = eval(mc.optionVar(q=self.name)) <NEW_LINE> for key in data: <NEW_LINE> <INDENT> self[key] = data[key] <NEW_LINE> <DEDENT> <DEDENT> if "use_special_tick" not in self: <NEW_LINE> <INDENT> self["special_tick"] = False <NEW_LINE> <DEDENT> if "slider_width" not in self: <NEW_LINE> <INDENT> self["slider_width"] = 200 <NEW_LINE> <DEDENT> if "docked" not in self: <NEW_LINE> <INDENT> self["docked"] = False <NEW_LINE> <DEDENT> if "show_mode" not in self: <NEW_LINE> <INDENT> self["show_mode"] = "both" <NEW_LINE> <DEDENT> if "use_overshoot" not in self: <NEW_LINE> <INDENT> self["use_overshoot"] = False <NEW_LINE> <DEDENT> if "use_special_tick" not in self: <NEW_LINE> <INDENT> self["use_special_tick"] = False <NEW_LINE> <DEDENT> if "default_button_data" not in self: <NEW_LINE> <INDENT> self["default_button_data"] = ((-75, (0.6, 0.6, 0.6)), (-60, (0.6, 0.6, 0.6)), (-33, (0.6, 0.6, 0.6)), (0, (0.6, 0.6, 0.6)), (33, (0.6, 0.6, 0.6)), (60, (0.6, 0.6, 0.6)), (75, (0.6, 0.6, 0.6))) <NEW_LINE> <DEDENT> if "button_height" not in self: <NEW_LINE> <INDENT> self["button_height"] = 8 <NEW_LINE> <DEDENT> if "show_label" not in self: <NEW_LINE> <INDENT> self["show_label"] = True <NEW_LINE> <DEDENT> if "show_menu_bar" not in self: <NEW_LINE> <INDENT> self["show_menu_bar"] = True <NEW_LINE> <DEDENT> if "update_check" not in self: <NEW_LINE> <INDENT> self["update_check"] = False <NEW_LINE> <DEDENT> if "ui_mode" not in self: <NEW_LINE> <INDENT> self["ui_mode"] = "window" <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> dict.__setitem__(self, key, value) <NEW_LINE> mc.optionVar(stringValue=(self.name, str(self)))
Convenience class to get/set global settings via an option variable
6259900fd164cc6175821bda
class Delete(base.SilentCommand): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> _AddDeleteArgs(parser) <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> return operations_util.Delete(operations.OperationsClient(), args.operation)
Delete a Cloud ML Engine operation.
6259900f925a0f43d25e8c9e
class ReconstructedFile(GroupUtils): <NEW_LINE> <INDENT> def __init__(self, filepath: str, parent: object, populate: dict = None, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(None, *args, **kwargs) <NEW_LINE> self.filepath = filepath <NEW_LINE> self.populate = populate <NEW_LINE> self._parent = parent <NEW_LINE> self.remove_parent_data() <NEW_LINE> self._digest() <NEW_LINE> <DEDENT> def __getitem__(self, name: str): <NEW_LINE> <INDENT> return getattr(self, name) <NEW_LINE> <DEDENT> def __setitem__(self, name: str, value): <NEW_LINE> <INDENT> setattr(self, name, value) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ' [ 1 file path to volume ]' <NEW_LINE> <DEDENT> def _digest(self): <NEW_LINE> <INDENT> if self.populate: <NEW_LINE> <INDENT> self.name = self.populate['name'] <NEW_LINE> self.dims = self.populate['dims'] <NEW_LINE> self.ImgAugmentations = ImgAugmentations().from_dict(self.populate['augmentations']) <NEW_LINE> self.header = self.populate['header'] <NEW_LINE> self.DateTime = DicomDateTime().from_dict(self.populate['DateTime']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ds = np.load(self.filepath, allow_pickle=True, mmap_mode='r').item() <NEW_LINE> self.name = ds['name'] <NEW_LINE> self.dims = ds['dims'] <NEW_LINE> augs = ImgAugmentations().from_dict(ds['augmentations']) <NEW_LINE> self.ImgAugmentations = augs <NEW_LINE> self.header = ds['header'] <NEW_LINE> self.DateTime = DicomDateTime().from_dict(ds['DateTime']) <NEW_LINE> <DEDENT> for name, value in self.populate['header'].items(): <NEW_LINE> <INDENT> self[name] = value <NEW_LINE> <DEDENT> <DEDENT> def is_file(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def convert_to_file(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def remove_parent_data(self) -> None: <NEW_LINE> <INDENT> if self._parent.has_dicoms(): <NEW_LINE> <INDENT> self._parent.clear_dicoms() <NEW_LINE> <DEDENT> if self._parent.has_volumes(): <NEW_LINE> <INDENT> self._parent.clear_volumes() <NEW_LINE> <DEDENT> <DEDENT> def load_array(self): <NEW_LINE> <INDENT> ds = np.load(self.filepath, allow_pickle=True).item() <NEW_LINE> ds_header = utils.dict_to_dataclass(ds['header']) <NEW_LINE> self.__class__ = ReconstructedVolume <NEW_LINE> self.__init__(ds_header, self.dims, self._parent) <NEW_LINE> self.volumes = ds['volumes'] <NEW_LINE> self.DateTime = DicomDateTime().from_dict(ds['DateTime']) <NEW_LINE> self.ImgAugmentations = ImgAugmentations().from_dict(ds['augmentations']) <NEW_LINE> self.remove_parent_data()
A file to the saved file, as opposed to the file in memory (ReconstructedVolume). When reconstructing and saving to disk, we will generate the file, write to disk and add this leaf type within the tree. If we want to load that into memory, we will simply replace this node type with the type of ReconstructedVolume
6259900f0a366e3fb87dd652
class RebootSingleVM(CookbookBase): <NEW_LINE> <INDENT> def get_runner(self, args): <NEW_LINE> <INDENT> return RebootSingleVMRunner(args, self.spicerack) <NEW_LINE> <DEDENT> def argument_parser(self): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser(description=self.__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) <NEW_LINE> parser.add_argument('vm', help='A single VM to reboot (specified in Cumin query syntax)') <NEW_LINE> parser.add_argument('-r', '--reason', required=False, help=('The reason for the reboot. The current username and originating' 'Cumin host are automatically added.')) <NEW_LINE> parser.add_argument('-t', '--task-id', help='An optional task ID to refer in the downtime message.') <NEW_LINE> parser.add_argument('--depool', help='Whether to run depool/pool on the VM around reboots.', action='store_true') <NEW_LINE> return parser
Downtime a single Ganeti VM and reboot it on the Ganeti level This is different from a normal reboot triggered on the OS level, it can be compared to powercycling a server. This kind of reboot is e.g. needed if KVM/QEMU machine settings have been modified. - Set Icinga/Alertmanager downtime - Reboot with optional depool - Wait for VM to come back online - Remove the Icinga/Alertmanager downtime after the VM has been rebooted, the first Puppet run is complete and all Icinga checks have recovered. Usage example: cookbook sre.ganeti.reboot-vm failoid1002.eqiad.wmnet
6259900fd18da76e235b777d
class TestTetriminos(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_i(self): <NEW_LINE> <INDENT> t = program.flat_board.TetriminoFactory.make('I') <NEW_LINE> self.assertEqual(t.title, 'I') <NEW_LINE> self.assertEquals(t.length, 4) <NEW_LINE> arr = [ [ (0,0), (0,1), (0,2), (0,3), ], [ (0,0), (1,0), (2,0), (3,0), ], ] <NEW_LINE> self.assertEqual(t.arr, arr) <NEW_LINE> <DEDENT> def test_l(self): <NEW_LINE> <INDENT> l = program.flat_board.TetriminoFactory.make('L') <NEW_LINE> self.assertEqual(l.title, 'L') <NEW_LINE> self.assertEquals(l.length, 5) <NEW_LINE> arr = [ [ (0,0), (1,0), (2,0), (3,0), (3,1), ], [ (0,0), (1,0), (2,0), (3,0), (3,-1), ], [ (0,0), (0,1), (0,2), (0,3), (1,0), ], [ (0,3), (0,2), (0,1), (0,0), (1,3), ], [ (0,0), (0,1), (1,1), (2,1), (3,1), ], [ (0,1), (0,0), (1,0), (2,0), (3,0), ], [ (0,0), (1,-3), (1,-2), (1,-1), (1,0), ], [ (0,0), (1,3), (1,2), (1,1), (1,0), ], ] <NEW_LINE> self.assertEqual(l.arr, arr) <NEW_LINE> <DEDENT> def test_o(self): <NEW_LINE> <INDENT> o = program.flat_board.TetriminoFactory.make('O') <NEW_LINE> self.assertEqual(o.title, 'O') <NEW_LINE> self.assertEquals(o.length, 4) <NEW_LINE> arr = [ [ (0,0), (0,1), (1,0), (1,1), ], ] <NEW_LINE> self.assertEqual(o.arr, arr) <NEW_LINE> <DEDENT> def test_eq_arr(self): <NEW_LINE> <INDENT> t_func = program.flat_board.Tetrimino.eq_arr <NEW_LINE> arr = [1,2] <NEW_LINE> rarr = [2,1] <NEW_LINE> self.assertTrue(t_func(arr, rarr)) <NEW_LINE> arr = [1,2,2] <NEW_LINE> rarr = [2,1,2] <NEW_LINE> self.assertTrue(t_func(arr, rarr)) <NEW_LINE> arr = [1,2] <NEW_LINE> rarr = [2] <NEW_LINE> self.assertFalse(t_func(arr, rarr)) <NEW_LINE> arr = [1,2] <NEW_LINE> rarr = [2] <NEW_LINE> self.assertFalse(t_func(arr, rarr)) <NEW_LINE> arr = [2] <NEW_LINE> rarr = [2,1] <NEW_LINE> self.assertFalse(t_func(arr, rarr))
Test Tetriminos.
6259900fd164cc6175821bdc
class Bucketlist(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'bucketlist' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(255)) <NEW_LINE> date_created = db.Column(db.DateTime, default=db.func.current_timestamp()) <NEW_LINE> date_modified = db.Column( db.DateTime, default=db.func.current_timestamp(), onupdate=db.func.current_timestamp() ) <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> db.session.add(self) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_all(): <NEW_LINE> <INDENT> return Bucketlist.query.all() <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> db.session.delete(self) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Bucketlist {}>".format(self.name)
This class represents a Bucketlist table
6259900fbf627c535bcb2110
class CancelTask(Trigger): <NEW_LINE> <INDENT> def _on_complete_hook(self, my_task): <NEW_LINE> <INDENT> for task_name in self.context: <NEW_LINE> <INDENT> cancel_tasks = my_task.workflow.get_task_spec_from_name(task_name) <NEW_LINE> for cancel_task in my_task._get_root()._find_any(cancel_tasks): <NEW_LINE> <INDENT> cancel_task.cancel() <NEW_LINE> <DEDENT> <DEDENT> return TaskSpec._on_complete_hook(self, my_task) <NEW_LINE> <DEDENT> def serialize(self, serializer): <NEW_LINE> <INDENT> return serializer._serialize_cancel_task(self) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def deserialize(self, serializer, wf_spec, s_state): <NEW_LINE> <INDENT> return serializer._deserialize_cancel_task(wf_spec, s_state)
This class implements a trigger that cancels another task (branch). If more than one input is connected, the task performs an implicit multi merge. If more than one output is connected, the task performs an implicit parallel split.
6259900f21a7993f00c66bdc
class maxima_kernel(kernel): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.kernel = asam.maxima_kernel(np.vstack([self.Q_ref, self.Q_ref + .5]), self.rho_D) <NEW_LINE> <DEDENT> def test_init(self): <NEW_LINE> <INDENT> assert self.kernel.TOL == 1e-8 <NEW_LINE> assert self.kernel.increase == 2.0 <NEW_LINE> assert self.kernel.decrease == 0.5 <NEW_LINE> nptest.assert_equal(self.kernel.MAXIMA, np.vstack([self.Q_ref, self.Q_ref + .5])) <NEW_LINE> assert self.kernel.num_maxima == 2 <NEW_LINE> nptest.assert_equal(self.kernel.rho_max, self.rho_D(np.vstack([self.Q_ref, self.Q_ref + .5]))) <NEW_LINE> assert self.kernel.sort_ascending == True <NEW_LINE> <DEDENT> def test_delta_step(self): <NEW_LINE> <INDENT> output_old = np.vstack( [self.Q_ref + 3.0, self.Q_ref, self.Q_ref - 3.0]) <NEW_LINE> kern_old, proposal = self.kernel.delta_step(output_old) <NEW_LINE> assert proposal is None <NEW_LINE> output_new = np.vstack( [self.Q_ref, self.Q_ref + 3.0, self.Q_ref - 3.0]) <NEW_LINE> kern_new, proposal = self.kernel.delta_step(output_new, kern_old) <NEW_LINE> nptest.assert_array_equal(proposal, [0.5, 2.0, 1.0])
Test :class:`bet.sampling.adaptiveSampling.maxima_kernel`
6259900fd18da76e235b777e
class Immediate(_Immediate): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> @classmethod <NEW_LINE> def from_list(cls, l): <NEW_LINE> <INDENT> return cls(l.pop(0),) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_string(cls, s): <NEW_LINE> <INDENT> if not s.isdigit(): <NEW_LINE> <INDENT> raise ValueError('%s is not a valid immediate.' % s) <NEW_LINE> <DEDENT> return Immediate(int(s)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.value)
An immediate value.
6259900f925a0f43d25e8ca2
class SolveurTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.solveur = solveur.Solveur(0, 0) <NEW_LINE> <DEDENT> def test_change_message(self): <NEW_LINE> <INDENT> message = self.solveur.message <NEW_LINE> self.solveur.change_message() <NEW_LINE> self.assertEqual(self.solveur.message, message) <NEW_LINE> self.solveur.compteur_changement_message = VAL_CHANGEMENT_MESSAGE - 1 <NEW_LINE> self.solveur.change_message() <NEW_LINE> self.assertTrue(self.solveur.message != message) <NEW_LINE> <DEDENT> def test_correction_grille(self): <NEW_LINE> <INDENT> self.solveur._fenetre = pygame.display.set_mode(TAILLE_FENETRE_TEST) <NEW_LINE> self.solveur._grille = grille.Grille() <NEW_LINE> self.solveur._grille.generer_grille_vide() <NEW_LINE> for (i, j) in self.solveur._grille.keys(): <NEW_LINE> <INDENT> self.assertEqual(type(self.solveur._grille[i, j]), cases.CaseVide) <NEW_LINE> <DEDENT> self.solveur.correction_grille() <NEW_LINE> for (i, j) in self.solveur._grille.keys(): <NEW_LINE> <INDENT> self.assertEqual(type(self.solveur._grille[i, j]), cases.CaseNoire) <NEW_LINE> <DEDENT> <DEDENT> def test_has_solution(self): <NEW_LINE> <INDENT> self.solveur._fenetre = pygame.display.set_mode(TAILLE_FENETRE_TEST) <NEW_LINE> self.solveur._grille = grille.Grille() <NEW_LINE> self.solveur._grille.generer_grille_vide() <NEW_LINE> compteur = 0 <NEW_LINE> for (i, j) in self.solveur._grille.keys(): <NEW_LINE> <INDENT> self.solveur._grille[i, j].domaine = {compteur} <NEW_LINE> compteur += 1 <NEW_LINE> <DEDENT> self.solveur.has_solution() <NEW_LINE> compteur = 0 <NEW_LINE> for (i, j) in self.solveur._grille.keys(): <NEW_LINE> <INDENT> self.assertTrue(self.solveur._grille[i, j].valeur_saisie == compteur and self.solveur._grille[i, j].domaine == {compteur}) <NEW_LINE> self.solveur._grille[i, j].domaine = set() <NEW_LINE> self.solveur._grille[i, j].valeur_saisie = -1 <NEW_LINE> compteur += 1 <NEW_LINE> <DEDENT> with self.assertRaises(NoSolutionException): <NEW_LINE> <INDENT> self.solveur.has_solution()
Classe permettant de tester les fonctionnement des méthodes de la classe Editeur
6259900f507cdc57c63a5a02
class BusinessCardInfo(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Name = None <NEW_LINE> self.Value = None <NEW_LINE> self.ItemCoord = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Name = params.get("Name") <NEW_LINE> self.Value = params.get("Value") <NEW_LINE> if params.get("ItemCoord") is not None: <NEW_LINE> <INDENT> self.ItemCoord = ItemCoord() <NEW_LINE> self.ItemCoord._deserialize(params.get("ItemCoord")) <NEW_LINE> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
名片识别结果
6259900f56b00c62f0fb3521
class peek_iter: <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> self._iterable = iter(*args) <NEW_LINE> self._cache = collections.deque() <NEW_LINE> if len(args) == 2: <NEW_LINE> <INDENT> self.sentinel = args[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sentinel = object() <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self, n=None): <NEW_LINE> <INDENT> return getattr(self, 'next')(n) <NEW_LINE> <DEDENT> def _fillcache(self, n): <NEW_LINE> <INDENT> if not n: <NEW_LINE> <INDENT> n = 1 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> while len(self._cache) < n: <NEW_LINE> <INDENT> self._cache.append(next(self._iterable)) <NEW_LINE> <DEDENT> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> while len(self._cache) < n: <NEW_LINE> <INDENT> self._cache.append(self.sentinel) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def has_next(self): <NEW_LINE> <INDENT> return self.peek() != self.sentinel <NEW_LINE> <DEDENT> def next(self, n=None): <NEW_LINE> <INDENT> self._fillcache(n) <NEW_LINE> if not n: <NEW_LINE> <INDENT> if self._cache[0] == self.sentinel: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> if n is None: <NEW_LINE> <INDENT> result = self._cache.popleft() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = [] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self._cache[n - 1] == self.sentinel: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> result = [self._cache.popleft() for i in range(n)] <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def peek(self, n=None): <NEW_LINE> <INDENT> self._fillcache(n) <NEW_LINE> if n is None: <NEW_LINE> <INDENT> result = self._cache[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = [self._cache[i] for i in range(n)] <NEW_LINE> <DEDENT> return result
An iterator object that supports peeking ahead. Parameters ---------- o : iterable or callable `o` is interpreted very differently depending on the presence of `sentinel`. If `sentinel` is not given, then `o` must be a collection object which supports either the iteration protocol or the sequence protocol. If `sentinel` is given, then `o` must be a callable object. sentinel : any value, optional If given, the iterator will call `o` with no arguments for each call to its `next` method; if the value returned is equal to `sentinel`, :exc:`StopIteration` will be raised, otherwise the value will be returned. See Also -------- `peek_iter` can operate as a drop in replacement for the built-in `iter <https://docs.python.org/3/library/functions.html#iter>`_ function. Attributes ---------- sentinel The value used to indicate the iterator is exhausted. If `sentinel` was not given when the `peek_iter` was instantiated, then it will be set to a new object instance: ``object()``.
6259900f15fb5d323ce7f9a4
class PyKustoClient(PyKustoClientBase): <NEW_LINE> <INDENT> __client: KustoClient <NEW_LINE> __auth_method: Callable[[str], KustoConnectionStringBuilder] <NEW_LINE> __global_client_cache: Dict[str, KustoClient] = {} <NEW_LINE> __global_cache_lock: Lock = Lock() <NEW_LINE> def __init__( self, client_or_cluster: Union[str, KustoClient], fetch_by_default: bool = True, use_global_cache: bool = False, retry_config: RetryConfig = NO_RETRIES, auth_method: Optional[Callable[[str], KustoConnectionStringBuilder]] = KustoConnectionStringBuilder.with_az_cli_authentication, ) -> None: <NEW_LINE> <INDENT> self.__auth_method = auth_method <NEW_LINE> client_resolved = False <NEW_LINE> if isinstance(client_or_cluster, KustoClient): <NEW_LINE> <INDENT> self.__client = client_or_cluster <NEW_LINE> client_resolved = True <NEW_LINE> cluster_name = urlparse(client_or_cluster._query_endpoint).netloc <NEW_LINE> assert not use_global_cache, "Global cache not supported when providing your own client instance" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cluster_name = client_or_cluster <NEW_LINE> <DEDENT> super().__init__(cluster_name, fetch_by_default, retry_config) <NEW_LINE> if not client_resolved: <NEW_LINE> <INDENT> self.__client = (self._cached_get_client_for_cluster if use_global_cache else self._get_client_for_cluster)() <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f"PyKustoClient('{self._cluster_name}')" <NEW_LINE> <DEDENT> def _internal_execute(self, database: str, query: KQL, properties: ClientRequestProperties = None, retry_config: RetryConfig = None) -> KustoResponse: <NEW_LINE> <INDENT> resolved_retry_config = self._retry_config if retry_config is None else retry_config <NEW_LINE> if resolved_retry_config is not None: <NEW_LINE> <INDENT> resolved_retry_config = resolved_retry_config.retry_on(KustoServiceError) <NEW_LINE> <DEDENT> return KustoResponse(resolved_retry_config.retry(lambda: self.__client.execute(database, query, properties))) <NEW_LINE> <DEDENT> def _get_client_for_cluster(self) -> KustoClient: <NEW_LINE> <INDENT> return KustoClient(self.__auth_method(self._cluster_name)) <NEW_LINE> <DEDENT> def _cached_get_client_for_cluster(self) -> KustoClient: <NEW_LINE> <INDENT> with PyKustoClient.__global_cache_lock: <NEW_LINE> <INDENT> client = PyKustoClient.__global_client_cache.get(self._cluster_name) <NEW_LINE> if client is None: <NEW_LINE> <INDENT> client = self._get_client_for_cluster() <NEW_LINE> PyKustoClient.__global_client_cache[self._cluster_name] = client <NEW_LINE> assert len(PyKustoClient.__global_client_cache) <= 1024, "Global client cache cannot exceed size of 1024" <NEW_LINE> <DEDENT> <DEDENT> return client
Handle to a Kusto cluster. Uses :class:`ItemFetcher` to fetch and cache the full cluster schema, including all databases, tables, columns and their types.
6259900f21a7993f00c66be0
class DummyPlugin(FCConditionBasePlugin): <NEW_LINE> <INDENT> def __init__(self, conditions): <NEW_LINE> <INDENT> super(DummyPlugin, self).__init__(conditions) <NEW_LINE> <DEDENT> def eval(self, **kwargs): <NEW_LINE> <INDENT> return eval(self.conditions)
This plugin is to be used to simply return True or False
6259900f3cc13d1c6d4663ad
class HealthDrawer: <NEW_LINE> <INDENT> def __init__(self, actor=None): <NEW_LINE> <INDENT> self._actor = actor <NEW_LINE> self._background = ResourceManager.load_image( ResourceClass.UI, 'health-bar-background.png') <NEW_LINE> self._progress = ProgressBarDrawer(ResourceManager.load_image( ResourceClass.UI, 'health-bar.png')) <NEW_LINE> <DEDENT> @property <NEW_LINE> def actor(self): <NEW_LINE> <INDENT> return self._actor <NEW_LINE> <DEDENT> @actor.setter <NEW_LINE> def actor(self, value): <NEW_LINE> <INDENT> self._actor = value <NEW_LINE> <DEDENT> def draw(self, surface): <NEW_LINE> <INDENT> if self._actor is not None: <NEW_LINE> <INDENT> statistics = self.actor.statistics <NEW_LINE> percentage = self.actor.hp / statistics.max_health <NEW_LINE> rect = self._background.get_rect() <NEW_LINE> rect.center = Camera.to_screen_position(self.actor.position) - half_size_of_rect(self.actor.rect) + half_size_of_rect(self._background.get_rect()) - (0, 10) <NEW_LINE> surface.blit(self._background, rect) <NEW_LINE> self._progress.draw(surface, rect, percentage)
Draws health of actor, on top of actor
6259900fd18da76e235b7780
class TestMAMLVPG: <NEW_LINE> <INDENT> def setup_method(self): <NEW_LINE> <INDENT> self.env = MetaRLEnv( normalize(HalfCheetahDirEnv(), expected_action_scale=10.)) <NEW_LINE> self.policy = GaussianMLPPolicy( env_spec=self.env.spec, hidden_sizes=(64, 64), hidden_nonlinearity=torch.tanh, output_nonlinearity=None, ) <NEW_LINE> self.baseline = LinearFeatureBaseline(env_spec=self.env.spec) <NEW_LINE> <DEDENT> def teardown_method(self): <NEW_LINE> <INDENT> self.env.close() <NEW_LINE> <DEDENT> def test_ppo_pendulum(self): <NEW_LINE> <INDENT> deterministic.set_seed(0) <NEW_LINE> rollouts_per_task = 5 <NEW_LINE> max_path_length = 100 <NEW_LINE> runner = LocalRunner(snapshot_config) <NEW_LINE> algo = MAMLVPG(env=self.env, policy=self.policy, baseline=self.baseline, max_path_length=max_path_length, meta_batch_size=5, discount=0.99, gae_lambda=1., inner_lr=0.1, num_grad_updates=1) <NEW_LINE> runner.setup(algo, self.env) <NEW_LINE> last_avg_ret = runner.train(n_epochs=10, batch_size=rollouts_per_task * max_path_length) <NEW_LINE> assert last_avg_ret > -5
Test class for MAML-VPG.
6259900f627d3e7fe0e07b00