code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class Imagewang(tfds.core.GeneratorBasedBuilder): <NEW_LINE> <INDENT> BUILDER_CONFIGS = _make_builder_configs() <NEW_LINE> def _info(self): <NEW_LINE> <INDENT> names_file = tfds.core.get_tfds_path(_LABELS_FNAME) <NEW_LINE> return tfds.core.DatasetInfo( builder=self, description=_DESCRIPTION, features=tfds.features.FeaturesDict({ "image": tfds.features.Image(), "label": tfds.features.ClassLabel(names_file=names_file) }), supervised_keys=("image", "label"), homepage="https://github.com/fastai/imagenette", citation=_CITATION, ) <NEW_LINE> <DEDENT> def _split_generators(self, dl_manager): <NEW_LINE> <INDENT> size = self.builder_config.size <NEW_LINE> if size in _SIZES: <NEW_LINE> <INDENT> size_str = "" if size == "full-size" else "-" + size[:-2] <NEW_LINE> url = "/".join([_URL_PREFIX, "imagewang%s.tgz" % size_str]) <NEW_LINE> path = dl_manager.download_and_extract(url) <NEW_LINE> train_path = os.path.join(path, _SIZE_TO_DIRNAME[size], "train") <NEW_LINE> val_path = os.path.join(path, _SIZE_TO_DIRNAME[size], "val") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("size must be one of %s" % _SIZES) <NEW_LINE> <DEDENT> return [ tfds.core.SplitGenerator( name=tfds.Split.TRAIN, gen_kwargs={ "datapath": train_path, }, ), tfds.core.SplitGenerator( name=tfds.Split.VALIDATION, gen_kwargs={ "datapath": val_path, }, ), ] <NEW_LINE> <DEDENT> def _generate_examples(self, datapath): <NEW_LINE> <INDENT> for label in tf.io.gfile.listdir(datapath): <NEW_LINE> <INDENT> for fpath in tf.io.gfile.glob(os.path.join(datapath, label, "*.JPEG")): <NEW_LINE> <INDENT> fname = os.path.basename(fpath) <NEW_LINE> record = { "image": fpath, "label": label, } <NEW_LINE> yield fname, record | Imagewang contains Imagenette and Imagewoof combined. | 62598fb07c178a314d78d4e0 |
class TecnicoListCreate(generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = Tecnico.objects.all() <NEW_LINE> serializer_class = TecnicoSerializer | Lista todos os técnicos ou cria um novo técnico | 62598fb0f548e778e596b5e7 |
class PhEDExInjectorPassableError(WMException): <NEW_LINE> <INDENT> pass | _PassableError_
Raised in cases where the error is sufficiently severe to terminate
the loop, but not severe enough to force us to crash the code.
Built to use with PhEDEx injection failures - if PhEDEx fails we should
terminate the loop, but continue to retry without terminating the entire
component. | 62598fb02c8b7c6e89bd3809 |
class LargestLastIndependentSet3: <NEW_LINE> <INDENT> def __init__(self, graph): <NEW_LINE> <INDENT> if graph.is_directed(): <NEW_LINE> <INDENT> raise ValueError("the graph is directed") <NEW_LINE> <DEDENT> self.graph = graph <NEW_LINE> for edge in self.graph.iteredges(): <NEW_LINE> <INDENT> if edge.source == edge.target: <NEW_LINE> <INDENT> raise ValueError("a loop detected") <NEW_LINE> <DEDENT> <DEDENT> self.independent_set = set(self.graph.iternodes()) <NEW_LINE> self.cardinality = self.graph.v() <NEW_LINE> self.source = None <NEW_LINE> <DEDENT> def run(self, source=None): <NEW_LINE> <INDENT> if source is not None: <NEW_LINE> <INDENT> self.source = source <NEW_LINE> <DEDENT> degree_dict = dict((node, self.graph.degree(node)) for node in self.graph.iternodes()) <NEW_LINE> while not self._is_independent(): <NEW_LINE> <INDENT> source = max((node for node in self.independent_set if node != self.source), key=degree_dict.__getitem__) <NEW_LINE> self.independent_set.remove(source) <NEW_LINE> for target in self.graph.iteradjacent(source): <NEW_LINE> <INDENT> degree_dict[target] -= 1 <NEW_LINE> <DEDENT> <DEDENT> self.cardinality = len(self.independent_set) <NEW_LINE> <DEDENT> def _is_independent(self): <NEW_LINE> <INDENT> for edge in self.graph.iteredges(): <NEW_LINE> <INDENT> if (edge.source in self.independent_set and edge.target in self.independent_set): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Find a maximal independent set. | 62598fb0adb09d7d5dc0a5ce |
class DummyRandomizer(object): <NEW_LINE> <INDENT> def _do_nothing(value, *args, **kwargs): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> relative = _do_nothing <NEW_LINE> absolute = _do_nothing <NEW_LINE> def factor(self, *args, **kwargs): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> def term(self, *args, **kwargs): <NEW_LINE> <INDENT> return 0 | docstring for DummyRandomizer | 62598fb001c39578d7f12dc3 |
class ConnectionLostEventTestCase(StatusEventTestCase): <NEW_LINE> <INDENT> CLASS = aggregator.ConnectionLostStatus <NEW_LINE> def test_many_message_built_correctly(self): <NEW_LINE> <INDENT> if self.status: <NEW_LINE> <INDENT> count = 99 <NEW_LINE> test_events = [FakeStatus(88)] * count + [self.CLASS()] <NEW_LINE> expected = self.CLASS.MESSAGE_ONE <NEW_LINE> self.assertEqual(self.status.many(test_events), expected) | Test the event when the connection is lost. | 62598fb0a05bb46b3848a8af |
class PreEvent(AutoUnload): <NEW_LINE> <INDENT> def __init__(self, *event_names): <NEW_LINE> <INDENT> self._event_names = event_names <NEW_LINE> self._callback = None <NEW_LINE> <DEDENT> def __call__(self, callback): <NEW_LINE> <INDENT> self._callback = callback <NEW_LINE> for event_name in self._event_names: <NEW_LINE> <INDENT> pre_event_manager.register_for_event(event_name, self._callback) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def _unload_instance(self): <NEW_LINE> <INDENT> for event_name in self._event_names: <NEW_LINE> <INDENT> pre_event_manager.unregister_for_event(event_name, self._callback) | Pre-Event decorator class. | 62598fb0be383301e025383e |
class Cliente(models.Model): <NEW_LINE> <INDENT> domicilio = models.CharField(max_length=128) <NEW_LINE> email = models.CharField(max_length=50) <NEW_LINE> fechaalta = models.DateField(default=timezone.now) <NEW_LINE> nombre = models.CharField(max_length=128) <NEW_LINE> poblacion = models.CharField(max_length=128) <NEW_LINE> telefono = models.CharField(max_length=15) <NEW_LINE> @classmethod <NEW_LINE> def create(cls, nombre, domicilio, poblacion, telefono, email): <NEW_LINE> <INDENT> return Cliente(nombre=nombre, domicilio=domicilio, poblacion=poblacion, telefono=telefono, email=email) <NEW_LINE> <DEDENT> def __str__(self,): <NEW_LINE> <INDENT> return self.nombre | Esta clase incluye los datos de un cliente | 62598fb0cc40096d6161a1fb |
class Divination(SubClass): <NEW_LINE> <INDENT> name = "School of Divination" <NEW_LINE> features_by_level = defaultdict(list) <NEW_LINE> features_by_level[2] = [features.DivinationSavant, features.Portent] <NEW_LINE> features_by_level[6] = [features.ExpertDivination] <NEW_LINE> features_by_level[10] = [features.TheThirdEye] <NEW_LINE> features_by_level[14] = [features.GreaterPortent] | The counsel of a diviner is sought by royalty and commoners alike, for all
seek a clearer understanding of the past, present, and future. As a
diviner, you strive to part the veils of space, time, and consciousness so
that you can see clearly. You work to master spells of discernment, remote
viewing, supernatural knowledge, and foresight. | 62598fb04e4d56256637246b |
class MultiLayerStatefulLSTMEncoder(ChainList): <NEW_LINE> <INDENT> def __init__(self, embed_size, hidden_size, num_layers): <NEW_LINE> <INDENT> super(MultiLayerStatefulLSTMEncoder, self).__init__() <NEW_LINE> self.add_link(links.LSTM(embed_size,hidden_size)) <NEW_LINE> for i in range(1, num_layers): <NEW_LINE> <INDENT> self.add_link(links.LSTM(hidden_size, hidden_size)) <NEW_LINE> <DEDENT> self.num_layers = num_layers <NEW_LINE> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> h_list = [] <NEW_LINE> h_curr = self[0](x) <NEW_LINE> h_list.append(h_curr) <NEW_LINE> for i in range(1,self.num_layers): <NEW_LINE> <INDENT> h_curr = self[1](h_curr) <NEW_LINE> h_list.append(h_curr) <NEW_LINE> <DEDENT> return h_list <NEW_LINE> <DEDENT> def get_states(): <NEW_LINE> <INDENT> c_list = [] <NEW_LINE> for i in range(self.num_layers): <NEW_LINE> <INDENT> c_list.append(self[i].c) <NEW_LINE> <DEDENT> return c_list | This is an implementation of a Multilayered Stateful LSTM.
The underlying idea is to simply stack multiple LSTMs where the LSTM at the bottom takes the regular input,
and the LSTMs after that simply take the outputs (represented by h) of the previous LSMTs as inputs.
This is simply an analogous version of the Multilayered Stateless LSTM Encoder where the LSTM states are kept hidden.
This LSTM is to be called only by passing the input (x).
To access the cell states you must call the "get_states" function with parameter "num_layers" indicating the number of layers.
Although the cell outputs for each layer are returned, typically only the one of the topmost layer is used for various purposes like attention.
Note that in Tensorflow the concept of "number of attention heads" is used which probably points to attention using the output of each layer.
Args:
embed_size - The size of embeddings of the inputs
hidden_size - The size of the hidden layer representation of the RNN
num_layers - The number of layers of the RNN (Indicates the number of RNNS stacked on top of each other)
Attributes:
num_layers: Indicates the number of layers in the RNN
User Defined Methods:
get_states: This simply returns the latest cell states (c) as an array for all layers. | 62598fb05fcc89381b26616e |
class TestBlackwhite(unittest.TestCase): <NEW_LINE> <INDENT> def test_saved_output(self): <NEW_LINE> <INDENT> execute_and_test_output_images(self, CliRunner(), 3, 3, "save_", ["save"]) | Tests for `save` subcommand. | 62598fb0aad79263cf42e818 |
class Site(ptforum.Site): <NEW_LINE> <INDENT> def get_forum_page(self, forum): <NEW_LINE> <INDENT> xml = self.get_page('/forum/%s' % forum.forumId) <NEW_LINE> return xml <NEW_LINE> <DEDENT> def forum_page_posts(self, forum, page_xml): <NEW_LINE> <INDENT> xfeed = xml.etree.ElementTree.XML(page_xml) <NEW_LINE> if xfeed.tag != atom+'feed': <NEW_LINE> <INDENT> raise ValueError('Root element is %s, not atom:feed' % xfeed.tag) <NEW_LINE> <DEDENT> posts = [] <NEW_LINE> for xentry in xfeed.findall(atom+'entry'): <NEW_LINE> <INDENT> pid = name = datetime = subject = body = None <NEW_LINE> xauthor = xentry.find(atom+'author') <NEW_LINE> xname = xauthor.find(atom+'name') <NEW_LINE> name = xname.text <NEW_LINE> xpublished = xentry.find(atom+'published') <NEW_LINE> pubdate = xpublished.text <NEW_LINE> url = xentry.find(atom+'id').text <NEW_LINE> pid = url_params(url)['p'][0] <NEW_LINE> tid = url_params(url)['t'][0] <NEW_LINE> topic = forum.topic_find(tid) <NEW_LINE> xtitle = xentry.find(atom+'title') <NEW_LINE> ttype = xtitle.attrib['type'] <NEW_LINE> subject = xtitle.text <NEW_LINE> if subject.startswith(forum.subjectRemove): <NEW_LINE> <INDENT> subject = subject[len(forum.subjectRemove):] <NEW_LINE> <DEDENT> xcontent = xentry.find(atom+'content') <NEW_LINE> ctype = xcontent.attrib['type'] <NEW_LINE> body = xcontent.text <NEW_LINE> post = ptforum.Post(pid=pid, topic=topic, author=name, datetime=convert_time(pubdate), subject=subject, body=body) <NEW_LINE> posts.append(post) <NEW_LINE> <DEDENT> posts.sort(key=lambda p: p.datetime) <NEW_LINE> for p in posts: <NEW_LINE> <INDENT> if not p.topic.firstpost: <NEW_LINE> <INDENT> p.topic.firstpost = p.pid <NEW_LINE> <DEDENT> if not p.topic.title: <NEW_LINE> <INDENT> p.topic.title = p.subject <NEW_LINE> <DEDENT> <DEDENT> return posts | Atom feed | 62598fb0f7d966606f74802a |
class CompiledConstant(): <NEW_LINE> <INDENT> def __init__(self, constVal): <NEW_LINE> <INDENT> self.constantValue = constVal <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "CompiledConstant '{}'".format(self.constantValue) <NEW_LINE> <DEDENT> def execute(self, engine, caller): <NEW_LINE> <INDENT> engine.stack.append(self.constantValue) | Compiled Constants is a primitive that will push a constant on the stack | 62598fb0fff4ab517ebcd82a |
class CourseStop(Base): <NEW_LINE> <INDENT> _din_file = "route.din" <NEW_LINE> version_id: Column[int] = Column("VERSION", Integer(), ForeignKey(Version.id), primary_key=True) <NEW_LINE> line: Column[int] = Column("LINE_NR", Integer(), primary_key=True) <NEW_LINE> course_id: Column[str] = Column("STR_LINE_VAR", String(length=4), primary_key=True) <NEW_LINE> line_dir: Column[int] = Column("LINE_DIR_NR", Integer(), CheckConstraint("LINE_DIR_NR in (1, 2)"), primary_key=True) <NEW_LINE> consec_stop_nr: Column[int] = Column("LINE_CONSEC_NR", Integer(), primary_key=True) <NEW_LINE> stop_id: Column[int] = Column("STOP_NR", Integer(), nullable=False) <NEW_LINE> stop_point_id: Column[int] = Column("STOPPING_POINT_NR", Integer(), nullable=False) <NEW_LINE> stop_point_type: Column[StopPointType] = Column("STOPPING_POINT_TYPE", IntEnum(StopPointType), nullable=False, info={'keep_minus_1': True}) <NEW_LINE> length: Column[Optional[int]] = Column("LENGTH", Integer()) <NEW_LINE> stop: RelationshipProperty[Stop] = relationship("Stop", viewonly=True) <NEW_LINE> stop_point: RelationshipProperty[StopPoint] = relationship("StopPoint", viewonly=True) <NEW_LINE> course: RelationshipProperty[Course] = relationship(Course, back_populates="stops") <NEW_LINE> timings: RelationshipProperty[Sequence[CourseStopTiming]] = relationship("CourseStopTiming", back_populates="course_stop") <NEW_LINE> trip_vdt_changes: RelationshipProperty[Sequence[TripVDT]] = relationship("TripVDT", order_by="asc(TripVDT.consec_stop_nr)", viewonly=True) <NEW_LINE> version: RelationshipProperty[Version] = relationship("Version", viewonly=True) <NEW_LINE> __table_args__ = ( ForeignKeyConstraint([version_id, line, course_id, line_dir], [Course.version_id, Course.line, Course.id, Course.line_dir]), ForeignKeyConstraint([version_id, stop_id], ["stop.VERSION", "stop.STOP_NR"]), ForeignKeyConstraint([version_id, stop_id, stop_point_id], ["stop_point.VERSION", "stop_point.STOP_NR", "stop_point.STOPPING_POINT_NR"]) ) <NEW_LINE> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f"<CourseStop(version_id={self.version_id}, course={self.course}, consec_stop_nr={self.consec_stop_nr}, stop_point={self.stop_point})>" <NEW_LINE> <DEDENT> __abstract__ = False | Course stop
A single stop on a `Course`.
Primary key: `version_id` & `line` & `course_id` & `line_dir` & `consec_stop_nr` | 62598fb07047854f4633f41f |
class PairCompose(object): <NEW_LINE> <INDENT> def __init__(self, transforms): <NEW_LINE> <INDENT> self.transforms = transforms <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> for t in self.transforms: <NEW_LINE> <INDENT> args = t(*args) <NEW_LINE> <DEDENT> return args | Composes several transforms together.
Args:
transforms (list of ``Transform`` objects): list of transforms to compose.
Example:
>>> transforms.Compose([
>>> transforms.CenterCrop(10),
>>> transforms.ToTensor(),
>>> ]) | 62598fb023849d37ff8510f8 |
class WithTradingSessions(WithTradingCalendars, WithDefaultDateBounds): <NEW_LINE> <INDENT> DATA_MIN_DAY = alias('START_DATE') <NEW_LINE> DATA_MAX_DAY = alias('END_DATE') <NEW_LINE> trading_days = alias('nyse_sessions') <NEW_LINE> @classmethod <NEW_LINE> def init_class_fixtures(cls): <NEW_LINE> <INDENT> super(WithTradingSessions, cls).init_class_fixtures() <NEW_LINE> cls.trading_sessions = {} <NEW_LINE> for cal_str in cls.TRADING_CALENDAR_STRS: <NEW_LINE> <INDENT> trading_calendar = cls.trading_calendars[cal_str] <NEW_LINE> sessions = trading_calendar.sessions_in_range( cls.DATA_MIN_DAY, cls.DATA_MAX_DAY) <NEW_LINE> setattr(cls, '{0}_sessions'.format(cal_str.lower()), sessions) <NEW_LINE> cls.trading_sessions[cal_str] = sessions <NEW_LINE> <DEDENT> for exchange, cal_str in iteritems(cls.TRADING_CALENDAR_FOR_EXCHANGE): <NEW_LINE> <INDENT> trading_calendar = cls.trading_calendars[cal_str] <NEW_LINE> sessions = trading_calendar.sessions_in_range( cls.DATA_MIN_DAY, cls.DATA_MAX_DAY) <NEW_LINE> cls.trading_sessions[exchange] = sessions | ZiplineTestCase mixin providing cls.trading_days, cls.all_trading_sessions
as a class-level fixture.
After init_class_fixtures has been called, `cls.all_trading_sessions`
is populated with a dictionary of calendar name to the DatetimeIndex
containing the calendar trading days ranging from:
(DATA_MAX_DAY - (cls.TRADING_DAY_COUNT) -> DATA_MAX_DAY)
`cls.trading_days`, for compatibility with existing tests which make the
assumption that trading days are equity only, defaults to the nyse trading
sessions.
Attributes
----------
DATA_MAX_DAY : datetime
The most recent trading day in the calendar.
TRADING_DAY_COUNT : int
The number of days to put in the calendar. The default value of
``TRADING_DAY_COUNT`` is 126 (half a trading-year). Inheritors can
override TRADING_DAY_COUNT to request more or less data. | 62598fb026068e7796d4c99a |
class ListComputeHomes(command.Lister): <NEW_LINE> <INDENT> log = logging.getLogger(__name__ + '.ListComputeHomes') <NEW_LINE> def take_action(self, parsed_args): <NEW_LINE> <INDENT> self.log.debug('take_action(%s)', parsed_args) <NEW_LINE> client = self.app.client_manager.allocation <NEW_LINE> zones = client.zones.compute_homes() <NEW_LINE> columns = ['Allocation Home', 'Zones'] <NEW_LINE> return ( columns, zones.items() ) | List zones available to a allocation home | 62598fb02c8b7c6e89bd380a |
class TestCustomComplianceControlsApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = esp_sdk.apis.custom_compliance_controls_api.CustomComplianceControlsApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_add_custom_signature(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_add_signature(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_create(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_list_custom_signatures(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_list_signatures(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_remove_custom_signature(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_remove_signature(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_show(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_update(self): <NEW_LINE> <INDENT> pass | CustomComplianceControlsApi unit test stubs | 62598fb063b5f9789fe851ad |
class BytesUntil(Parser): <NEW_LINE> <INDENT> def __init__(self, terminal): <NEW_LINE> <INDENT> self.buffer = UnsizedParserBuffer(terminal) <NEW_LINE> <DEDENT> def parser(self, data): <NEW_LINE> <INDENT> result = '' <NEW_LINE> if (self.buffer.add_data(data)): <NEW_LINE> <INDENT> result = self.buffer.result <NEW_LINE> self.buffer.reset() <NEW_LINE> return result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Uncomplete() | paresr multi bytes until terminal and The terminus is NOT included in
the returned value | 62598fb056ac1b37e630222f |
class TokenDetailView(LoginRequiredMixin, DetailView): <NEW_LINE> <INDENT> model = Token <NEW_LINE> page_title = "Token Detail" <NEW_LINE> template_name = 'common/token_detail.html' <NEW_LINE> def get_context_data(self, *args, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(*args, **kwargs) <NEW_LINE> if self.object.application: <NEW_LINE> <INDENT> context['page_title'] = '%s (%s)' % (self.page_title, self.object.application) <NEW_LINE> <DEDENT> bucket = [] <NEW_LINE> host_summary = self.object.statistics().by_host() <NEW_LINE> for obj in host_summary: <NEW_LINE> <INDENT> bucket.append({ 'host': obj['host'], 'num_events': obj['num_events'], 'num_bytes': obj['num_bytes'], 'trendline': Host(obj['host']).trendline() }) <NEW_LINE> <DEDENT> context['host_summary'] = bucket <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_queryset(self, **kwargs): <NEW_LINE> <INDENT> return self.request.user.tokens.filter(enabled=True) | Displays detailed information about a particular token. | 62598fb0097d151d1a2c1070 |
class ProjectTest(RepoTestCase): <NEW_LINE> <INDENT> def test_create(self): <NEW_LINE> <INDENT> project = self.create_project() <NEW_LINE> self.assertTrue(os.path.exists(project.full_path)) <NEW_LINE> self.assertTrue(project.slug in project.full_path) <NEW_LINE> <DEDENT> def test_rename(self): <NEW_LINE> <INDENT> component = self.create_link() <NEW_LINE> self.assertTrue( Component.objects.filter(repo='weblate://test/test').exists() ) <NEW_LINE> project = component.project <NEW_LINE> old_path = project.full_path <NEW_LINE> self.assertTrue(os.path.exists(old_path)) <NEW_LINE> project.slug = 'changed' <NEW_LINE> project.save() <NEW_LINE> new_path = project.full_path <NEW_LINE> self.addCleanup(shutil.rmtree, new_path, True) <NEW_LINE> self.assertFalse(os.path.exists(old_path)) <NEW_LINE> self.assertTrue(os.path.exists(new_path)) <NEW_LINE> self.assertTrue( Component.objects.filter(repo='weblate://changed/test').exists() ) <NEW_LINE> self.assertFalse( Component.objects.filter(repo='weblate://test/test').exists() ) <NEW_LINE> <DEDENT> def test_delete(self): <NEW_LINE> <INDENT> project = self.create_project() <NEW_LINE> self.assertTrue(os.path.exists(project.full_path)) <NEW_LINE> project.delete() <NEW_LINE> self.assertFalse(os.path.exists(project.full_path)) <NEW_LINE> <DEDENT> def test_delete_all(self): <NEW_LINE> <INDENT> project = self.create_project() <NEW_LINE> self.assertTrue(os.path.exists(project.full_path)) <NEW_LINE> Project.objects.all().delete() <NEW_LINE> self.assertFalse(os.path.exists(project.full_path)) <NEW_LINE> <DEDENT> def test_wrong_path(self): <NEW_LINE> <INDENT> project = self.create_project() <NEW_LINE> with override_settings(DATA_DIR='/weblate-nonexisting:path'): <NEW_LINE> <INDENT> project.invalidate_path_cache() <NEW_LINE> self.assertRaisesMessage( ValidationError, 'Could not create project directory', project.full_clean ) <NEW_LINE> <DEDENT> <DEDENT> def test_acl(self): <NEW_LINE> <INDENT> user = create_test_user() <NEW_LINE> project = self.create_project() <NEW_LINE> project.access_control = Project.ACCESS_PRIVATE <NEW_LINE> project.save() <NEW_LINE> self.assertFalse(user.can_access_project(project)) <NEW_LINE> user.groups.add(Group.objects.get(name='Test@Translate')) <NEW_LINE> user = User.objects.get(username='testuser') <NEW_LINE> self.assertTrue(user.can_access_project(project)) | Project object testing. | 62598fb08da39b475be0322b |
class AssertionSession(Session): <NEW_LINE> <INDENT> JWT_BEARER_GRANT_TYPE = JWTBearerGrant.GRANT_TYPE <NEW_LINE> ASSERTION_METHODS = { JWT_BEARER_GRANT_TYPE: JWTBearerGrant.sign, } <NEW_LINE> def __init__(self, token_url, issuer, subject, audience, grant_type, claims=None, token_placement='header', scope=None, **kwargs): <NEW_LINE> <INDENT> super(AssertionSession, self).__init__() <NEW_LINE> self.token_url = token_url <NEW_LINE> self.grant_type = grant_type <NEW_LINE> self.issuer = issuer <NEW_LINE> self.subject = subject <NEW_LINE> self.audience = audience <NEW_LINE> self.claims = claims <NEW_LINE> self.scope = scope <NEW_LINE> self._token_auth = OAuth2Auth(None, token_placement) <NEW_LINE> self._kwargs = kwargs <NEW_LINE> <DEDENT> @property <NEW_LINE> def token(self): <NEW_LINE> <INDENT> return self._token_auth.token <NEW_LINE> <DEDENT> @token.setter <NEW_LINE> def token(self, token): <NEW_LINE> <INDENT> self._token_auth.token = OAuth2Token.from_dict(token) <NEW_LINE> <DEDENT> def auto_refresh_token(self): <NEW_LINE> <INDENT> if not self.token or self.token.is_expired(): <NEW_LINE> <INDENT> self.refresh_token() <NEW_LINE> <DEDENT> <DEDENT> def refresh_token(self): <NEW_LINE> <INDENT> generate_assertion = self.ASSERTION_METHODS[self.grant_type] <NEW_LINE> assertion = generate_assertion( issuer=self.issuer, subject=self.subject, audience=self.audience, claims=self.claims, **self._kwargs ) <NEW_LINE> data = {'assertion': assertion, 'grant_type': self.grant_type} <NEW_LINE> if self.scope: <NEW_LINE> <INDENT> data['scope'] = self.scope <NEW_LINE> <DEDENT> resp = self.request('POST', self.token_url, data=data, withhold_token=True) <NEW_LINE> self.token = resp.json() <NEW_LINE> return self.token <NEW_LINE> <DEDENT> def request(self, method, url, data=None, headers=None, withhold_token=False, auth=None, **kwargs): <NEW_LINE> <INDENT> if not withhold_token: <NEW_LINE> <INDENT> self.auto_refresh_token() <NEW_LINE> if auth is None: <NEW_LINE> <INDENT> auth = self._token_auth <NEW_LINE> <DEDENT> <DEDENT> return super(AssertionSession, self).request( method, url, headers=headers, data=data, auth=auth, **kwargs) | Constructs a new Assertion Framework for OAuth 2.0 Authorization Grants
per RFC7521_.
.. _RFC7521: https://tools.ietf.org/html/rfc7521 | 62598fb03d592f4c4edbaf06 |
class BooleanMetric(Metric): <NEW_LINE> <INDENT> def _populate_value(self, metric, value, start_time): <NEW_LINE> <INDENT> metric.boolean_value = value <NEW_LINE> <DEDENT> def _populate_value_new(self, data, value): <NEW_LINE> <INDENT> data.bool_value = value <NEW_LINE> <DEDENT> def _populate_value_type(self, data_set): <NEW_LINE> <INDENT> data_set.value_type = new_metrics_pb2.BOOL <NEW_LINE> <DEDENT> def set(self, value, fields=None, target_fields=None): <NEW_LINE> <INDENT> if not isinstance(value, bool): <NEW_LINE> <INDENT> raise errors.MonitoringInvalidValueTypeError(self._name, value) <NEW_LINE> <DEDENT> self._set(fields, target_fields, value) <NEW_LINE> <DEDENT> def is_cumulative(self): <NEW_LINE> <INDENT> return False | A metric whose value type is a boolean. | 62598fb097e22403b383af53 |
class LazyPlugInFlowable(Flowable): <NEW_LINE> <INDENT> def __init__(self, dirname, modulename, functionname, content): <NEW_LINE> <INDENT> self.dirname = dirname <NEW_LINE> self.modulename = modulename <NEW_LINE> self.functionname = functionname <NEW_LINE> self.content = content <NEW_LINE> self.flowable = None <NEW_LINE> <DEDENT> def initializeFlowable(self): <NEW_LINE> <INDENT> m = recursiveImport(self.modulename, _rml2pdf_locations(self.dirname)) <NEW_LINE> f = getattr(m, self.functionname) <NEW_LINE> if self.content is None: <NEW_LINE> <INDENT> self.flowable = f() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = "".join(map(asUnicodeEx, self.content)) <NEW_LINE> kw = {} <NEW_LINE> try: <NEW_LINE> <INDENT> args = eval(data) <NEW_LINE> if type(args) is not type(()): args = (args,) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> args, kw = eval('_args_kw(%s)' % data) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> args = (data,) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> self.flowable = f(*args,**kw) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print('flowable %s, args=%s kw=%s' % (f, args, kw)) <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def wrap(self, w, h): <NEW_LINE> <INDENT> if self.flowable is None: <NEW_LINE> <INDENT> self.initializeFlowable() <NEW_LINE> <DEDENT> return self.flowable.wrapOn(self.canv,w,h) <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> if self.flowable is None: <NEW_LINE> <INDENT> self.initializeFlowable() <NEW_LINE> <DEDENT> self.flowable._drawOn(self.canv) <NEW_LINE> <DEDENT> def split(self,aW,aH): <NEW_LINE> <INDENT> if self.flowable is None: <NEW_LINE> <INDENT> self.initializeFlowable() <NEW_LINE> <DEDENT> return self.flowable.splitOn(self.canv,aW,aH) | defer conversion of content until wrap time (to allow, eg, page numbering) | 62598fb01f5feb6acb162c64 |
class RayTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_ray_basic(self): <NEW_LINE> <INDENT> ox_axis = Ray(Vec3(), Vec3.versor(0)) <NEW_LINE> self.assertEqual(ox_axis.point_at(4), Vec3(4, 0, 0)) <NEW_LINE> direction = Vec3(1, -1, 0).normalised() <NEW_LINE> ray1 = Ray(Vec3(0, 2, 0), direction) <NEW_LINE> ray2 = Ray.from_points(Vec3(0, 2, 0), Vec3(2, 0, 0)) <NEW_LINE> self.assertEqual(ray1.direction, direction) <NEW_LINE> self.assertEqual(ray2.direction, direction) <NEW_LINE> for i in range(10): <NEW_LINE> <INDENT> self.assertEqual(ray1.point_at(i), ray2.point_at(i)) <NEW_LINE> <DEDENT> self.assertEqual(ray1.point_at(0), ray1.origin) <NEW_LINE> self.assertEqual(ray2.point_at(0), ray2.origin) | Tests for Ray class. | 62598fb0bd1bec0571e150e5 |
class OFPPortMod(MsgBase): <NEW_LINE> <INDENT> _TYPE = { 'ascii': [ 'hw_addr', ] } <NEW_LINE> version = ofproto.OFP_VERSION <NEW_LINE> msg_type = ofproto.OFPT_PORT_MOD <NEW_LINE> def __init__(self, port_no=0, hw_addr='00:00:00:00:00:00', config=0, mask=0, advertise=0): <NEW_LINE> <INDENT> super(OFPPortMod, self).__init__(ofproto.OFP_VERSION, OFPPortMod.msg_type) <NEW_LINE> self.port_no = port_no <NEW_LINE> self.hw_addr = hw_addr <NEW_LINE> self.config = config <NEW_LINE> self.mask = mask <NEW_LINE> self.advertise = advertise <NEW_LINE> <DEDENT> def _serialize_body(self): <NEW_LINE> <INDENT> msg_pack_into(ofproto.OFP_PORT_MOD_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.port_no, addrconv.mac.text_to_bin(self.hw_addr), self.config, self.mask, self.advertise) | Port modification message
The controller sneds this message to modify the behavior of the port.
================ ======================================================
Attribute Description
================ ======================================================
port_no Port number to modify
hw_addr The hardware address that must be the same as hw_addr
of ``OFPPort`` of ``OFPSwitchFeatures``
config Bitmap of configuration flags.
| OFPPC_PORT_DOWN
| OFPPC_NO_RECV
| OFPPC_NO_FWD
| OFPPC_NO_PACKET_IN
mask Bitmap of configuration flags above to be changed
advertise Bitmap of the following flags.
| OFPPF_10MB_HD
| OFPPF_10MB_FD
| OFPPF_100MB_HD
| OFPPF_100MB_FD
| OFPPF_1GB_HD
| OFPPF_1GB_FD
| OFPPF_10GB_FD
| OFPPF_40GB_FD
| OFPPF_100GB_FD
| OFPPF_1TB_FD
| OFPPF_OTHER
| OFPPF_COPPER
| OFPPF_FIBER
| OFPPF_AUTONEG
| OFPPF_PAUSE
| OFPPF_PAUSE_ASYM
================ ======================================================
Example::
def send_port_mod(self):
ofp = datapath.ofproto
ofp_parser = datapath.ofproto_parser
port_no = 3
hw_addr = 'fa:c8:e8:76:1d:7e'
config = 0
mask = (ofp.OFPPC_PORT_DOWN | ofp.OFPPC_NO_RECV |
ofp.OFPPC_NO_FWD | ofp.OFPPC_NO_PACKET_IN)
advertise = (ofp.OFPPF_10MB_HD | ofp.OFPPF_100MB_FD |
ofp.OFPPF_1GB_FD | ofp.OFPPF_COPPER |
ofp.OFPPF_AUTONEG | ofp.OFPPF_PAUSE |
ofp.OFPPF_PAUSE_ASYM)
req = ofp_parser.OFPPortMod(datapath, port_no, hw_addr, config,
mask, advertise)
datapath.send_msg(req) | 62598fb0b7558d5895463670 |
class MainProduct(BTreeContainer): <NEW_LINE> <INDENT> implements(IMainProduct, IMainProductContained) <NEW_LINE> name = u"" <NEW_LINE> description = u"" | Implementation of a IMainProduct using B-Tree Container
Make sure that the ``MainProduct`` implements the ``IMainProduct``
interface:
>>> from zope.interface.verify import verifyClass
>>> verifyClass(IMainProduct, MainProduct)
True
Make sure that the ``MainProduct`` implements the ``IMainProductContained``
interface:
>>> from zope.interface.verify import verifyClass
>>> verifyClass(IMainProductContained, MainProduct)
True
An example of checking the name of Main Product:
>>> mp = MainProduct()
>>> mp.name
u''
>>> mp.name = u'MyMainProduct'
>>> mp.name
u'MyMainProduct'
An example of checking the description of Main Product:
>>> mp = MainProduct()
>>> mp.description
u''
>>> mp.description = u'MyMainProduct'
>>> mp.description
u'MyMainProduct' | 62598fb057b8e32f5250813e |
class DbServiceConnect: <NEW_LINE> <INDENT> def __init__(self, postgres_config): <NEW_LINE> <INDENT> self.conn = psycopg2.connect(**postgres_config) | Class to separate connect to Postgre DB.
To initialize requires path to config in dictionary with keys 'user','password','host','dbname' ,'port'. | 62598fb071ff763f4b5e77b7 |
class Battery(): <NEW_LINE> <INDENT> def __init__(self, size=70): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self.charge_level = 0 <NEW_LINE> <DEDENT> def get_range(self): <NEW_LINE> <INDENT> if self.size == 70: <NEW_LINE> <INDENT> return 240 <NEW_LINE> <DEDENT> elif self.size == 85: <NEW_LINE> <INDENT> return 270 | A battery for an electric car. | 62598fb07d847024c075c408 |
class IncompatibleScopeError(Exception): <NEW_LINE> <INDENT> def __init__(self, scope, other_scope): <NEW_LINE> <INDENT> msg = f"Scope {scope} is not compatible with {other_scope}" <NEW_LINE> super().__init__(msg) | Raised when trying to align two factors' index with unequal scope. | 62598fb02c8b7c6e89bd380b |
class CachedObject(object): <NEW_LINE> <INDENT> name: str = "unnamed" <NEW_LINE> hashlist = () <NEW_LINE> cached_properties = [] <NEW_LINE> def __hash__(self): <NEW_LINE> <INDENT> return hash_attributes(self, self.hashlist) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> for prop in self.cached_properties: <NEW_LINE> <INDENT> if id(self) in prop.cache: <NEW_LINE> <INDENT> prop.cache.pop(id(self)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> rep = super(CachedObject, self).__repr__() <NEW_LINE> if hasattr(self, "name"): <NEW_LINE> <INDENT> rep = rep[:-1] + ': "{}">'.format(self.name) <NEW_LINE> <DEDENT> return rep | An object to provide cached properties and functions.
Provide a list of attributes to hash down for tracking changes | 62598fb076e4537e8c3ef5ed |
class SomeGraph: <NEW_LINE> <INDENT> def __init__( self, some_datetime: datetime.datetime, formatless_datetime: datetime.datetime) -> None: <NEW_LINE> <INDENT> self.some_datetime = some_datetime <NEW_LINE> self.formatless_datetime = formatless_datetime | defines some object graph. | 62598fb06e29344779b006a2 |
class AjaxStringLookupWidget(forms.Widget): <NEW_LINE> <INDENT> class Media: <NEW_LINE> <INDENT> css = { 'all' : ("css/autocomplete.css",) } <NEW_LINE> js = ("js/jquery.autocomplete.min.js", "js/setup_ajax.js", "js/ajax_string_lookup.js",) <NEW_LINE> <DEDENT> def render(self, name, value, *args, **kwargs): <NEW_LINE> <INDENT> div_id = 'ajax_text_field_' + kwargs['attrs']['id'] <NEW_LINE> input_name = name <NEW_LINE> input_id = kwargs['attrs']['id'] <NEW_LINE> if value is None: <NEW_LINE> <INDENT> value = '' <NEW_LINE> <DEDENT> render_html = "<input type='text' size=\"40\" name='"+str(input_name)+"' id='"+str(input_id)+"' value='"+value+"' />\n" <NEW_LINE> render_html += '<script type="text/javascript">\n' <NEW_LINE> render_html += "attach_ajax_string_listener(\""+self.ajax_url+"\", \""+str(input_id)+"\")\n" <NEW_LINE> render_html += "</script>\n" <NEW_LINE> return render_html <NEW_LINE> <DEDENT> def __init__(self, ajax_url, *args, **kwargs): <NEW_LINE> <INDENT> self.ajax_url = ajax_url <NEW_LINE> super(AjaxStringLookupWidget, self).__init__(*args, **kwargs) | Widget for a string lookup with suggestions | 62598fb0cc0a2c111447b058 |
class MessageForm(forms.ModelForm): <NEW_LINE> <INDENT> user_to = AutoCompleteField("usernames", required=False, help_text=None, label="To") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Message <NEW_LINE> fields = ["title", "message"] <NEW_LINE> <DEDENT> def clean_user_to(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = self.cleaned_data["user_to"] <NEW_LINE> return User.objects.get(username=data) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> raise forms.ValidationError("Please enter an existing username") | The form a user fills in when creating a new message | 62598fb030dc7b766599f893 |
class Camera(object): <NEW_LINE> <INDENT> def __init__(self, width, height): <NEW_LINE> <INDENT> self.state = pygame.Rect(0, 0, width, height) <NEW_LINE> <DEDENT> def apply(self, target): <NEW_LINE> <INDENT> return target.rect.move(self.state.topleft) <NEW_LINE> <DEDENT> def update(self, target): <NEW_LINE> <INDENT> self.state = self.playerCamera(self.state, target.rect) <NEW_LINE> <DEDENT> def playerCamera(self, level, target_rect): <NEW_LINE> <INDENT> xcoord = target_rect[0] <NEW_LINE> ycoord = target_rect[1] <NEW_LINE> xlength = level[2] <NEW_LINE> ylength = level[3] <NEW_LINE> xcoord = -xcoord + (windowWidth/2) <NEW_LINE> ycoord = -ycoord + (windowHeight/2) <NEW_LINE> if xcoord > -16: <NEW_LINE> <INDENT> xcoord = -16 <NEW_LINE> <DEDENT> if xcoord < -(level.width-windowWidth)+16: <NEW_LINE> <INDENT> xcoord = -(level.width-windowWidth)+16 <NEW_LINE> <DEDENT> if ycoord > 0: <NEW_LINE> <INDENT> ycoord = 0 <NEW_LINE> <DEDENT> if ycoord < -(level.height-windowHeight): <NEW_LINE> <INDENT> ycoord = -(level.height-windowHeight) <NEW_LINE> <DEDENT> return pygame.Rect(xcoord, ycoord, xlength, ylength) | classe qui gere l'affichage du niveau du jeu sur l'ecran qui est plus petit
| 62598fb0f548e778e596b5ea |
class HigherOrderFunctionTests(unittest.TestCase): <NEW_LINE> <INDENT> PATH = './sample/asm/functions/higher_order' <NEW_LINE> def testApply(self): <NEW_LINE> <INDENT> runTest(self, 'apply.asm', '25') <NEW_LINE> <DEDENT> def testApplyByMove(self): <NEW_LINE> <INDENT> runTest(self, 'apply_by_move.asm', '25') <NEW_LINE> <DEDENT> def testInvoke(self): <NEW_LINE> <INDENT> runTestSplitlines(self, 'invoke.asm', ['42', '42']) <NEW_LINE> <DEDENT> def testMap(self): <NEW_LINE> <INDENT> runTest(self, 'map.asm', [[1, 2, 3, 4, 5], [1, 4, 9, 16, 25]], 0, lambda o: [json.loads(i) for i in o.splitlines()]) <NEW_LINE> <DEDENT> def testMapVectorByMove(self): <NEW_LINE> <INDENT> runTest(self, 'map_vector_by_move.asm', [[1, 2, 3, 4, 5], [1, 4, 9, 16, 25]], 0, lambda o: [json.loads(i) for i in o.splitlines()]) <NEW_LINE> <DEDENT> def testFilter(self): <NEW_LINE> <INDENT> runTest(self, 'filter.asm', [[1, 2, 3, 4, 5], [2, 4]], 0, lambda o: [json.loads(i) for i in o.splitlines()]) <NEW_LINE> <DEDENT> def testFilterVectorByMove(self): <NEW_LINE> <INDENT> runTest(self, 'filter_vector_by_move.asm', [[1, 2, 3, 4, 5], [2, 4]], 0, lambda o: [json.loads(i) for i in o.splitlines()]) <NEW_LINE> <DEDENT> def testFilterByClosure(self): <NEW_LINE> <INDENT> runTest(self, 'filter_closure.asm', [[1, 2, 3, 4, 5], [2, 4]], 0, lambda o: [json.loads(i) for i in o.splitlines()], assembly_opts=('--no-sa',)) <NEW_LINE> <DEDENT> def testFilterByClosureVectorByMove(self): <NEW_LINE> <INDENT> runTest(self, 'filter_closure_vector_by_move.asm', [[1, 2, 3, 4, 5], [2, 4]], 0, lambda o: [json.loads(i) for i in o.splitlines()], assembly_opts=('--no-sa',)) <NEW_LINE> <DEDENT> def testTailcallOfObject(self): <NEW_LINE> <INDENT> runTestThrowsExceptionJSON(self, 'tailcall_of_object.asm', {'frame': {}, 'trace': ['main/0/0()', 'foo/0/0()',], 'uncaught': {'type': 'Integer', 'value': '42',}}, output_processing_function=lambda s: json.loads(s.strip())) <NEW_LINE> <DEDENT> def testTailcallOfClosure(self): <NEW_LINE> <INDENT> runTestThrowsExceptionJSON(self, 'tailcall_of_closure.asm', {'frame': {}, 'trace': ['main/0/0()', 'test/0/0()',], 'uncaught': {'type': 'Integer', 'value': '42',}}, assembly_opts=('--no-sa',), output_processing_function=lambda s: json.loads(s.strip())) | Tests for higher-order function support.
| 62598fb066673b3332c30412 |
class VraFactory(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def factory(object_type, customization_func=None, **kwargs): <NEW_LINE> <INDENT> config = VraConfig().config_file <NEW_LINE> if object_type == 'payload': <NEW_LINE> <INDENT> if all(k in kwargs for k in ("payload_version", "payload_type")): <NEW_LINE> <INDENT> str_version = str(kwargs['payload_version']) <NEW_LINE> payload_type = kwargs['payload_type'] <NEW_LINE> object_path = f'vra_sdk.models.vra_payload_{str_version}.{payload_type}' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise VraSdkFactoryException( "Error creating payload object. Missing required parameters") <NEW_LINE> <DEDENT> <DEDENT> elif object_type in config['business_models']: <NEW_LINE> <INDENT> object_path = config.get( 'business_models').get(object_type).get('path') <NEW_LINE> if not object_path: <NEW_LINE> <INDENT> raise VraSdkFactoryException( f"Error retrieving module_class for {object_type} object type.") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise VraSdkConfigException( 'Error building vraObject, unknown type') <NEW_LINE> <DEDENT> _, object_class = get_module_class(object_path) <NEW_LINE> id_cards = inspect.signature(object_class).parameters <NEW_LINE> cleaned_kwargs = clean_kwargs_key(**kwargs) <NEW_LINE> if object_type != 'payload': <NEW_LINE> <INDENT> for kwarg in cleaned_kwargs: <NEW_LINE> <INDENT> if kwarg not in id_cards: <NEW_LINE> <INDENT> raise VraSdkConfigException( f"Error creating vraObject {object_type}, {kwarg} not authorized by the id card of {object_path}") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if object_type == 'payload': <NEW_LINE> <INDENT> return object_class(customization_func, **cleaned_kwargs) <NEW_LINE> <DEDENT> return object_class(**cleaned_kwargs) | Factory to create specific object class
| 62598fb05fcc89381b26616f |
class Tinder: <NEW_LINE> <INDENT> def __init__(self, facebook_id: AnyStr, facebook_token: AnyStr) -> None: <NEW_LINE> <INDENT> self.facebook_id = facebook_id <NEW_LINE> self.facebook_token = facebook_token <NEW_LINE> self.tinder = Api(facebook_id, facebook_token) <NEW_LINE> <DEDENT> def __repr__(self) -> AnyStr: <NEW_LINE> <INDENT> return "<Tinder(facebook_id: {0})>".format(self.facebook_id) <NEW_LINE> <DEDENT> async def prospective_matches(self, locale: AnyStr = "en-US") -> List[User]: <NEW_LINE> <INDENT> response = await self.tinder.prospective(locale) <NEW_LINE> res = [] <NEW_LINE> for result in response["results"]: <NEW_LINE> <INDENT> if result.get("type") == "user": <NEW_LINE> <INDENT> res.append(User(**result.get("user"))) <NEW_LINE> <DEDENT> <DEDENT> return res | Tinder API response handler.
| 62598fb016aa5153ce400549 |
class Port(object): <NEW_LINE> <INDENT> def __init__(self, device, name): <NEW_LINE> <INDENT> self.device = device <NEW_LINE> self.name = name.replace(" ","_") <NEW_LINE> self.speed = None <NEW_LINE> self.type = None <NEW_LINE> self.l2adjacency = None <NEW_LINE> self.l3adjacency = None <NEW_LINE> self.address = None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{n} {s} {c} {a}".format(n=self.name, s=self.speed, c=self.l2adjacency, a=self.address) | switch or router port | 62598fb07b25080760ed74f6 |
class AnnotationJSONPresenter(AnnotationBasePresenter): <NEW_LINE> <INDENT> def __init__(self, annotation_resource, formatters=None): <NEW_LINE> <INDENT> super(AnnotationJSONPresenter, self).__init__(annotation_resource) <NEW_LINE> self._formatters = [] <NEW_LINE> if formatters is not None: <NEW_LINE> <INDENT> for formatter in formatters: <NEW_LINE> <INDENT> self._add_formatter(formatter) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _add_formatter(self, formatter): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> verifyObject(IAnnotationFormatter, formatter) <NEW_LINE> <DEDENT> except DoesNotImplement: <NEW_LINE> <INDENT> raise ValueError('formatter is not implementing IAnnotationFormatter interface') <NEW_LINE> <DEDENT> self._formatters.append(formatter) <NEW_LINE> <DEDENT> def asdict(self): <NEW_LINE> <INDENT> docpresenter = DocumentJSONPresenter(self.annotation.document) <NEW_LINE> base = { 'id': self.annotation.id, 'created': self.created, 'updated': self.updated, 'user': self.annotation.userid, 'uri': self.annotation.target_uri, 'text': self.text, 'tags': self.tags, 'group': self.annotation.groupid, 'permissions': self.permissions, 'target': self.target, 'document': docpresenter.asdict(), 'links': self.links, } <NEW_LINE> if self.annotation.references: <NEW_LINE> <INDENT> base['references'] = self.annotation.references <NEW_LINE> <DEDENT> annotation = copy.copy(self.annotation.extra) or {} <NEW_LINE> annotation.update(base) <NEW_LINE> for formatter in self._formatters: <NEW_LINE> <INDENT> annotation.update(formatter.format(self.annotation_resource)) <NEW_LINE> <DEDENT> return annotation <NEW_LINE> <DEDENT> @property <NEW_LINE> def permissions(self): <NEW_LINE> <INDENT> read = self.annotation.userid <NEW_LINE> if self.annotation.shared: <NEW_LINE> <INDENT> read = 'group:{}'.format(self.annotation.groupid) <NEW_LINE> principals = security.principals_allowed_by_permission( self.annotation_resource, 'read') <NEW_LINE> if security.Everyone in principals: <NEW_LINE> <INDENT> read = 'group:__world__' <NEW_LINE> <DEDENT> <DEDENT> return {'read': [read], 'admin': [self.annotation.userid], 'update': [self.annotation.userid], 'delete': [self.annotation.userid]} | Present an annotation in the JSON format returned by API requests. | 62598fb07d43ff2487427425 |
class TonalCertainty(featuresModule.FeatureExtractor): <NEW_LINE> <INDENT> id = 'K1' <NEW_LINE> def __init__(self, dataOrStream=None, *arguments, **keywords): <NEW_LINE> <INDENT> featuresModule.FeatureExtractor.__init__(self, dataOrStream=dataOrStream, *arguments, **keywords) <NEW_LINE> self.name = 'Tonal Certainty' <NEW_LINE> self.description = 'A floating point magnitude value that suggest tonal certainty based on automatic key analysis.' <NEW_LINE> self.dimensions = 1 <NEW_LINE> self.discrete = False <NEW_LINE> <DEDENT> def _process(self): <NEW_LINE> <INDENT> self._feature.vector[0] = self.data['flat.tonalCertainty'] | >>> s = corpus.parse('bwv66.6')
>>> fe = features.native.TonalCertainty(s)
>>> f = fe.extract()
>>> f.vector
[1.26...]
>>> pitches = [56, 55, 56, 57, 58, 57, 58, 59, 60, 59, 60, 61, 62, 61, 62, 63, 64, 63, 64, 65, 66, 65, 66, 67]
>>> s = stream.Stream()
>>> for pitch in pitches:
... s.append(note.Note(pitch))
>>> features.native.TonalCertainty(s).extract().vector
[0.0] | 62598fb066656f66f7d5a436 |
class ScriptMinimalDummy(Script): <NEW_LINE> <INDENT> _DEFAULT_SETTINGS = [ Parameter('execution_time', 0.1, float, 'execution time of script (s)') ] <NEW_LINE> _INSTRUMENTS = {} <NEW_LINE> _SCRIPTS = {} <NEW_LINE> def __init__(self, name=None, settings=None, log_function = None, data_path = None): <NEW_LINE> <INDENT> Script.__init__(self, name, settings, log_function= log_function, data_path = data_path) <NEW_LINE> <DEDENT> def _function(self): <NEW_LINE> <INDENT> import time <NEW_LINE> time.sleep(self.settings['execution_time']) | Minimal Example Script that has only a single parameter (execution time)
| 62598fb0dd821e528d6d8f7b |
class CGetRSPMessage(DIMSEResponseMessage, StatusMixin): <NEW_LINE> <INDENT> command_field = 0x8010 <NEW_LINE> command_fields = ['CommandGroupLength', 'AffectedSOPClassUID', 'MessageIDBeingRespondedTo', 'Status', 'NumberOfRemainingSuboperations', 'NumberOfCompletedSuboperations', 'NumberOfFailedSuboperations', 'NumberOfWarningSuboperations'] <NEW_LINE> num_of_remaining_sub_ops = dimse_property((0x0000, 0x1020)) <NEW_LINE> num_of_completed_sub_ops = dimse_property((0x0000, 0x1021)) <NEW_LINE> num_of_failed_sub_ops = dimse_property((0x0000, 0x1022)) <NEW_LINE> num_of_warning_sub_ops = dimse_property((0x0000, 0x1023)) | C-GET-RSP Message.
Complete definition can be found in DICOM PS3.7, 9.3.3.2 C-GET-RSP | 62598fb0fff4ab517ebcd82c |
class EquivalentModel(Model): <NEW_LINE> <INDENT> number_of_no_data_constraints = None <NEW_LINE> def setup(self, model, dependent_uncertainties, setting): <NEW_LINE> <INDENT> data_constraints, no_data_constraints = [], [] <NEW_LINE> for i, p in enumerate(model.unsubbed): <NEW_LINE> <INDENT> equivalent_p = EquivalentPosynomials(p, i, setting.get('simpleModel'), dependent_uncertainties) <NEW_LINE> no_data, data = equivalent_p.no_data_constraints, equivalent_p.data_constraints <NEW_LINE> data_constraints += data <NEW_LINE> no_data_constraints += no_data <NEW_LINE> <DEDENT> self.number_of_no_data_constraints = len(no_data_constraints) <NEW_LINE> self.cost = model.cost <NEW_LINE> return [no_data_constraints, data_constraints] <NEW_LINE> <DEDENT> def get_number_of_no_data_constraints(self): <NEW_LINE> <INDENT> return self.number_of_no_data_constraints | A class that generates models that are equivalent to the original models and ready to be
robustified. | 62598fb026068e7796d4c99c |
class is_standard_module_tc(ModutilsTestCase): <NEW_LINE> <INDENT> def test_knownValues_is_standard_module_builtins(self): <NEW_LINE> <INDENT> if sys.version_info < (3, 0): <NEW_LINE> <INDENT> self.assertEqual(modutils.is_standard_module('__builtin__'), True) <NEW_LINE> self.assertEqual(modutils.is_standard_module('builtins'), False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assertEqual(modutils.is_standard_module('__builtin__'), False) <NEW_LINE> self.assertEqual(modutils.is_standard_module('builtins'), True) <NEW_LINE> <DEDENT> <DEDENT> def test_knownValues_is_standard_module_builtin(self): <NEW_LINE> <INDENT> self.assertEqual(modutils.is_standard_module('sys'), True) <NEW_LINE> <DEDENT> def test_knownValues_is_standard_module_nonstandard(self): <NEW_LINE> <INDENT> self.assertEqual(modutils.is_standard_module('logilab'), False) <NEW_LINE> <DEDENT> def test_knownValues_is_standard_module_unknown(self): <NEW_LINE> <INDENT> self.assertEqual(modutils.is_standard_module('unknown'), False) <NEW_LINE> <DEDENT> def test_knownValues_is_standard_module_4(self): <NEW_LINE> <INDENT> self.assertEqual(modutils.is_standard_module('marshal'), True) <NEW_LINE> self.assertEqual(modutils.is_standard_module('hashlib'), True) <NEW_LINE> self.assertEqual(modutils.is_standard_module('pickle'), True) <NEW_LINE> self.assertEqual(modutils.is_standard_module('email'), True) <NEW_LINE> self.assertEqual(modutils.is_standard_module('io'), sys.version_info >= (2, 6)) <NEW_LINE> self.assertEqual(modutils.is_standard_module('StringIO'), sys.version_info < (3, 0)) <NEW_LINE> <DEDENT> def test_knownValues_is_standard_module_custom_path(self): <NEW_LINE> <INDENT> self.assertEqual(modutils.is_standard_module('data.module', (DATADIR,)), True) <NEW_LINE> self.assertEqual(modutils.is_standard_module('data.module', (path.abspath(DATADIR),)), True) | return true if the module may be considered as a module from the standard
library | 62598fb05166f23b2e243420 |
class UpdateListings(OpsActor): <NEW_LINE> <INDENT> public = True <NEW_LINE> class Schema(mm.Schema): <NEW_LINE> <INDENT> query = mmf.Dict(missing=dict, title='Listing query') <NEW_LINE> <DEDENT> def perform(self, query=None): <NEW_LINE> <INDENT> vendors = filter_with_json(Vendor.query, {'listings': query}) <NEW_LINE> for vendor in vendors: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> message = vendor.extension.message('UpdateListings') <NEW_LINE> <DEDENT> except (ValueError, AttributeError): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.context.bind(message) | Update all listings in the given query, if their vendor extension provides an UpdateListings action. | 62598fb0be7bc26dc9251e80 |
class AlreadyUnlocked(UnlockError): <NEW_LINE> <INDENT> pass | Raised when an attempt is made to unlock an unlocked file. | 62598fb08da39b475be0322d |
class pageo(pageol): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() | Store and unpack O page data.
- Output power
@memo とりあえずscalingsはpending
@todo OK
@memo format
[0]: 'O'
[1]: internal time
[2-5]: time
[6-8]: ch0
[9-11]: ch1
[12-14]: ch2
[15-17]: ch3
[18-20]: ch0
[21-23]: ch1
[24-26]: ch2
[27-29]: ch3
[30-31]: LQI | 62598fb04428ac0f6e65856d |
class AsynchronousWrapper(Wrapper): <NEW_LINE> <INDENT> def __init__(self, env: Env) -> None: <NEW_LINE> <INDENT> super().__init__(env) <NEW_LINE> self._executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) <NEW_LINE> self._futures = [] <NEW_LINE> <DEDENT> def _wait(self): <NEW_LINE> <INDENT> if len(self._futures) > 0: <NEW_LINE> <INDENT> for future in self._futures: <NEW_LINE> <INDENT> future.result() <NEW_LINE> <DEDENT> self._futures = [] <NEW_LINE> <DEDENT> <DEDENT> def observe(self) -> Tuple[Any, Any, Any]: <NEW_LINE> <INDENT> self._wait() <NEW_LINE> return self.env.observe() <NEW_LINE> <DEDENT> def get_info(self) -> List[Dict]: <NEW_LINE> <INDENT> self._wait() <NEW_LINE> return self.env.get_info() <NEW_LINE> <DEDENT> def act(self, ac: Any) -> None: <NEW_LINE> <INDENT> future = self._executor.submit(self.env.act, ac) <NEW_LINE> self._futures.append(future) | For environments with a synchronous act() function, run act() asynchronously on a
separate thread.
:param env: environment to wrap | 62598fb0498bea3a75a57b67 |
class Handler(webapp2.RequestHandler): <NEW_LINE> <INDENT> def renderError(self, error_code): <NEW_LINE> <INDENT> self.error(error_code) <NEW_LINE> self.response.write("Oops! Something went wrong.") <NEW_LINE> <DEDENT> def login_user(self, user): <NEW_LINE> <INDENT> user_id = user.key().id() <NEW_LINE> self.set_secure_cookie('user_id', str(user_id)) <NEW_LINE> <DEDENT> def logout_user(self): <NEW_LINE> <INDENT> self.set_secure_cookie('user_id', '') <NEW_LINE> <DEDENT> def read_secure_cookie(self, name): <NEW_LINE> <INDENT> cookie_val = self.request.cookies.get(name) <NEW_LINE> if cookie_val: <NEW_LINE> <INDENT> return hashutils.check_secure_val(cookie_val) <NEW_LINE> <DEDENT> <DEDENT> def set_secure_cookie(self, name, val): <NEW_LINE> <INDENT> cookie_val = hashutils.make_secure_val(val) <NEW_LINE> self.response.headers.add_header('Set-Cookie', '%s=%s; Path=/' % (name, cookie_val)) <NEW_LINE> <DEDENT> def initialize(self, *a, **kw): <NEW_LINE> <INDENT> webapp2.RequestHandler.initialize(self, *a, **kw) <NEW_LINE> uid = self.read_secure_cookie('user_id') <NEW_LINE> self.user = uid and User.get_by_id(int(uid)) <NEW_LINE> if not self.user and self.request.path not in allowed_routes: <NEW_LINE> <INDENT> self.redirect('/login') <NEW_LINE> <DEDENT> <DEDENT> def get_user_by_name(self, username): <NEW_LINE> <INDENT> user = db.GqlQuery("SELECT * from User WHERE username = '%s'" % username) <NEW_LINE> if user: <NEW_LINE> <INDENT> return user.get() | A base RequestHandler class for our app.
The other handlers inherit form this one. | 62598fb0e1aae11d1e7ce847 |
class ResourceError(testpool.core.exceptions.TestpoolError): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> super(testpool.core.exceptions.TestpoolError, self).__init__(message) | Thrown when a resource is not available. | 62598fb0e5267d203ee6b950 |
class ListOfExpressions(Subparser): <NEW_LINE> <INDENT> def parse(self, parser, tokens): <NEW_LINE> <INDENT> items = [] <NEW_LINE> while not tokens.is_end(): <NEW_LINE> <INDENT> exp = Expression().parse(parser, tokens) <NEW_LINE> if exp != None: <NEW_LINE> <INDENT> items.append(exp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if tokens.current().name == 'COMMA': <NEW_LINE> <INDENT> tokens.consume_expected('COMMA') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return items | list_of_expr: (expr COMMA)* | 62598fb0009cb60464d01568 |
class InvalidRubricSelection(Exception): <NEW_LINE> <INDENT> pass | The specified criterion/option do not exist in the rubric. | 62598fb0b7558d5895463672 |
class Transformation(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def transform(cls, transformation, vector): <NEW_LINE> <INDENT> assert isinstance(transformation, np.ndarray) <NEW_LINE> assert isinstance(vector, PVector) <NEW_LINE> assert all([len(vector) == dimension for dimension in transformation.shape]) <NEW_LINE> response = np.matmul(transformation, vector) <NEW_LINE> return PVector(*response) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def rotate_x(cls, theta, vector): <NEW_LINE> <INDENT> assert isinstance(vector, PVector) <NEW_LINE> transformation = np.array([[1, 0, 0 ], [0, cos(theta), -sin(theta) ], [0, sin(theta), cos(theta) ]]) <NEW_LINE> return cls.transform(transformation, vector) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def rotate_y(cls, theta, vector): <NEW_LINE> <INDENT> assert isinstance(vector, PVector) <NEW_LINE> transformation = np.array([[cos(theta), 0, sin(theta) ], [0, 1, 0 ], [-sin(theta), 0, cos(theta) ]]) <NEW_LINE> return cls.transform(transformation, vector) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def rotate_z(cls, theta, vector): <NEW_LINE> <INDENT> assert isinstance(vector, PVector) <NEW_LINE> transformation = np.array([[cos(theta), -sin(theta), 0 ], [sin(theta), cos(theta), 0 ], [0, 0, 1 ]]) <NEW_LINE> return cls.transform(transformation, vector) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def transpose(cls, offset, vector): <NEW_LINE> <INDENT> return PVector(*[sum(elems) for elems in zip(offset, vector)]) | Classes for transforming PVectors | 62598fb0796e427e5384e7dc |
class Redis(StrictRedis): <NEW_LINE> <INDENT> RESPONSE_CALLBACKS = dict_merge( StrictRedis.RESPONSE_CALLBACKS, { 'TTL': lambda r: r >= 0 and r or None, 'PTTL': lambda r: r >= 0 and r or None, } ) <NEW_LINE> def pipeline(self, transaction=True, shard_hint=None): <NEW_LINE> <INDENT> return Pipeline( self.db, self.response_callbacks, transaction, shard_hint) <NEW_LINE> <DEDENT> def setex(self, name, value, time): <NEW_LINE> <INDENT> if isinstance(time, datetime.timedelta): <NEW_LINE> <INDENT> time = time.seconds + time.days * 24 * 3600 <NEW_LINE> <DEDENT> return self.execute_command('SETEX', name, time, value) <NEW_LINE> <DEDENT> def lrem(self, name, value, num=0): <NEW_LINE> <INDENT> return self.execute_command('LREM', name, num, value) <NEW_LINE> <DEDENT> def zadd(self, name, *args, **kwargs): <NEW_LINE> <INDENT> pieces = [] <NEW_LINE> if args: <NEW_LINE> <INDENT> if len(args) % 2 != 0: <NEW_LINE> <INDENT> raise RedisError("ZADD requires an equal number of " "values and scores") <NEW_LINE> <DEDENT> pieces.extend(reversed(args)) <NEW_LINE> <DEDENT> for pair in iteritems(kwargs): <NEW_LINE> <INDENT> pieces.append(pair[1]) <NEW_LINE> pieces.append(pair[0]) <NEW_LINE> <DEDENT> return self.execute_command('ZADD', name, *pieces) | Provides backwards compatibility with older versions of redis-py that
changed arguments to some commands to be more Pythonic, sane, or by
accident. | 62598fb0be8e80087fbbf0ac |
class CoattentionNet(nn.Module): <NEW_LINE> <INDENT> def __init__(self, vocab_size, embedding_dim, max_len,answer_vocab=1000): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.word_embeddings = nn.Embedding(vocab_size, embedding_dim, padding_idx=0) <NEW_LINE> self.unigram = nn.Conv1d(embedding_dim,embedding_dim,1) <NEW_LINE> self.bigram = nn.Conv1d(embedding_dim,embedding_dim,2) <NEW_LINE> self.trigram = nn.Conv1d(embedding_dim,embedding_dim,3) <NEW_LINE> self.maxp = nn.MaxPool2d(kernel_size=(3,1)) <NEW_LINE> self.lstm = nn.LSTM(embedding_dim, embedding_dim) <NEW_LINE> k = 512 <NEW_LINE> self.word_parallel = ParallelCoattention(D=embedding_dim, k=k) <NEW_LINE> self.phrase_parallel = ParallelCoattention(D=embedding_dim, k=k) <NEW_LINE> self.sentence_parallel = ParallelCoattention(D=embedding_dim, k=k) <NEW_LINE> self.dp = nn.Dropout(p=0.5) <NEW_LINE> self.tanh = nn.Tanh() <NEW_LINE> self.softm = nn.Softmax() <NEW_LINE> self.lin_w = nn.Linear(512,512) <NEW_LINE> self.lin_p = nn.Linear(1024,512) <NEW_LINE> self.lin_s = nn.Linear(1024,1024) <NEW_LINE> self.lin_h = nn.Linear(1024,answer_vocab) <NEW_LINE> <DEDENT> def forward(self, image, question_encoding): <NEW_LINE> <INDENT> image = image.view(image.shape[0],image.shape[1],-1) <NEW_LINE> word_embeddings = self.word_embeddings(question_encoding) <NEW_LINE> word_embeddings = word_embeddings.transpose(2,1) <NEW_LINE> unigram = self.tanh(self.unigram(word_embeddings)) <NEW_LINE> bigram = self.tanh(self.bigram(torch.cat((word_embeddings, torch.zeros(word_embeddings.shape[0],word_embeddings.shape[1],1).cuda()), dim=2))) <NEW_LINE> trigram = self.tanh(self.trigram(torch.cat((word_embeddings, torch.zeros(word_embeddings.shape[0],word_embeddings.shape[1],2).cuda()), dim=2))) <NEW_LINE> kilogram = torch.cat(( unigram.view(unigram.shape[0],unigram.shape[1],1,unigram.shape[2]), bigram.view(bigram.shape[0],bigram.shape[1],1,bigram.shape[2]), trigram.view(trigram.shape[0],trigram.shape[1],1,trigram.shape[2]) ), dim=2) <NEW_LINE> kilogram = self.maxp(kilogram) <NEW_LINE> kilogram = kilogram.view(kilogram.shape[0],kilogram.shape[1],kilogram.shape[3]) <NEW_LINE> q_s, (h_n,c_n) = self.lstm(kilogram.permute(2,0,1)) <NEW_LINE> q_s = q_s.permute(1,2,0) <NEW_LINE> f_w = self.word_parallel(image, word_embeddings) <NEW_LINE> f_p = self.phrase_parallel(image, kilogram) <NEW_LINE> f_s = self.sentence_parallel(image, q_s) <NEW_LINE> h_w = self.tanh(self.lin_w(f_w)) <NEW_LINE> h_p = self.tanh(self.lin_p(torch.cat((f_p,h_w),dim=1))) <NEW_LINE> h_s = self.tanh(self.lin_s(torch.cat((f_s,h_p),dim=1))) <NEW_LINE> p = self.lin_h(h_s) <NEW_LINE> return p | Predicts an answer to a question about an image using the Hierarchical Question-Image Co-Attention
for Visual Question Answering (Lu et al, 2017) paper. | 62598fb0adb09d7d5dc0a5d2 |
class Groups(generics.ListAPIView): <NEW_LINE> <INDENT> queryset = Group.objects.all() <NEW_LINE> pagination_class = PageNumberPagination <NEW_LINE> serializer_class = GroupSerializer | API фотоальбомов. | 62598fb0fff4ab517ebcd82d |
class isDirectory_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'path', None, None, ), ) <NEW_LINE> def __init__(self, path=None,): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.path = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('isDirectory_args') <NEW_LINE> if self.path is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('path', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.path) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- path | 62598fb0aad79263cf42e81b |
class IMessageSendingTest(Interface): <NEW_LINE> <INDENT> email = schema.Email( title=_(u'Email', default='Email'), description=_( u'email_sendingtest_description', default=u'Email to send the test message', ), required=True, ) | define field for sending test of message | 62598fb0a219f33f346c685d |
class PS3JoystickOld(Joystick): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(PS3JoystickOld, self).__init__(*args, **kwargs) <NEW_LINE> self.axis_names = { 0x00: 'left_stick_horz', 0x01: 'left_stick_vert', 0x02: 'right_stick_horz', 0x05: 'right_stick_vert', 0x1a: 'tilt_x', 0x1b: 'tilt_y', 0x3d: 'tilt_a', 0x3c: 'tilt_b', 0x32: 'L1_pressure', 0x33: 'R1_pressure', 0x31: 'R2_pressure', 0x30: 'L2_pressure', 0x36: 'cross_pressure', 0x35: 'circle_pressure', 0x37: 'square_pressure', 0x34: 'triangle_pressure', 0x2d: 'dpad_r_pressure', 0x2e: 'dpad_d_pressure', 0x2c: 'dpad_u_pressure', } <NEW_LINE> self.button_names = { 0x120: 'select', 0x123: 'start', 0x2c0: 'PS', 0x12a: 'L1', 0x12b: 'R1', 0x128: 'L2', 0x129: 'R2', 0x121: 'L3', 0x122: 'R3', 0x12c: "triangle", 0x12d: "circle", 0x12e: "cross", 0x12f: 'square', 0x124: 'dpad_up', 0x126: 'dpad_down', 0x127: 'dpad_left', 0x125: 'dpad_right', } | An interface to a physical PS3 joystick available at /dev/input/js0
Contains mapping that worked for Raspian Jessie drivers | 62598fb044b2445a339b6995 |
class AnimalDetail(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Animal.objects.all() <NEW_LINE> serializer_class = AnimalSerializer <NEW_LINE> name = 'animal-details' | Class that inherits from:
RetrieveUpdateDestroyAPIView: GET / PUT / DELETE
on a single object (detail) | 62598fb0091ae35668704c68 |
class Board: <NEW_LINE> <INDENT> def __init__(self, secret): <NEW_LINE> <INDENT> self.board = ['_'] * len(secret) <NEW_LINE> self.guessed = [] <NEW_LINE> self._secret = secret <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '< ' + " ".join(self.word()) + " : " + ",".join(self.guesses()) + ' >' <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.word_len() <NEW_LINE> <DEDENT> def word_len(self): <NEW_LINE> <INDENT> return len(self.board) <NEW_LINE> <DEDENT> def guess(self, char): <NEW_LINE> <INDENT> self.guessed += char <NEW_LINE> char_positions = self._secret.match(char) <NEW_LINE> for pos in char_positions: <NEW_LINE> <INDENT> self.board[pos] = char <NEW_LINE> <DEDENT> return len(char_positions) <NEW_LINE> <DEDENT> def word(self): <NEW_LINE> <INDENT> return self.board <NEW_LINE> <DEDENT> def guesses(self): <NEW_LINE> <INDENT> return self.guessed <NEW_LINE> <DEDENT> def hits(self): <NEW_LINE> <INDENT> correct_chars = [correct for correct in self.board if correct != '_'] <NEW_LINE> return list(set(correct_chars)) <NEW_LINE> <DEDENT> def misses(self): <NEW_LINE> <INDENT> return [wrong for wrong in self.guesses() if wrong not in self.board] <NEW_LINE> <DEDENT> def done(self): <NEW_LINE> <INDENT> if '_' not in self.board: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif len(self.misses()) == Board.max_miss: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> max_miss = 11 <NEW_LINE> def miss_man(missed): <NEW_LINE> <INDENT> missed = min(missed, Board.max_miss) <NEW_LINE> return "assets/man{0}.txt".format(missed) <NEW_LINE> <DEDENT> def display(self): <NEW_LINE> <INDENT> missed = len(self.misses()) <NEW_LINE> path = Board.miss_man(missed) <NEW_LINE> with open(path) as fp: <NEW_LINE> <INDENT> symbol = fp.read() <NEW_LINE> <DEDENT> print(symbol) <NEW_LINE> print(self.word()) <NEW_LINE> print("Guessed chars: ", self.guesses()) | Board for hangman with attributes board and guessed.
Attributes:
board - list of correct characters or "_" in the secret word
guessed - list of characters guessed so far
>>> from secret import SecretWord
>>> b = Board(SecretWord("bookkeeper"))
>>> len(b)
10
>>> b.guess('o')
2
>>> b
< _ o o _ _ _ _ _ _ _ : o >
>>> b.done()
False
>>> b.guess('k')
2
>>> b
< _ o o k k _ _ _ _ _ : o,k >
>>> b.guess('j')
0
>>> b
< _ o o k k _ _ _ _ _ : o,k,j >
>>> b.word()
['_', 'o', 'o', 'k', 'k', '_', '_', '_', '_', '_']
>>> b.guesses()
['o', 'k', 'j'] | 62598fb05fcc89381b266170 |
class DuplicateRegistrationError(Exception): <NEW_LINE> <INDENT> pass | A Node already has a registration.
| 62598fb07d847024c075c40b |
class Equal(_EqualityOperator, SympyComparison): <NEW_LINE> <INDENT> operator = '==' <NEW_LINE> grouping = 'None' <NEW_LINE> sympy_name = 'Eq' <NEW_LINE> @staticmethod <NEW_LINE> def _op(x): <NEW_LINE> <INDENT> return x | <dl>
<dt>'Equal[$x$, $y$]'
<dt>'$x$ == $y$'
<dd>yields 'True' if $x$ and $y$ are known to be equal, or
'False' if $x$ and $y$ are known to be unequal.
<dt>'$lhs$ == $rhs$'
<dd>represents the equation $lhs$ = $rhs$.
</dl>
>> a==a
= True
>> a==b
= a == b
>> 1==1.
= True
Lists are compared based on their elements:
>> {{1}, {2}} == {{1}, {2}}
= True
>> {1, 2} == {1, 2, 3}
= False
Real values are considered equal if they only differ in their last digits:
>> 0.739085133215160642 == 0.739085133215160641
= True
>> 0.73908513321516064200000000 == 0.73908513321516064100000000
= False
## TODO Needs power precision tracking
## >> 0.1 ^ 10000 == 0.1 ^ 10000 + 0.1 ^ 10012
## = False
## >> 0.1 ^ 10000 == 0.1 ^ 10000 + 0.1 ^ 10013
## = True
#> 0.1111111111111111 == 0.1111111111111126
= True
#> 0.1111111111111111 == 0.1111111111111127
= False
## TODO needs better precision tracking
## #> 2^^1.000000000000000000000000000000000000000000000000000000000000 == 2^^1.000000000000000000000000000000000000000000000000000001111111
## = True
## #> 2^^1.000000000000000000000000000000000000000000000000000000000000 == 2^^1.000000000000000000000000000000000000000000000000000010000000
## = False
Comparisons are done using the lower precision:
>> N[E, 100] == N[E, 150]
= True
Symbolic constants are compared numerically:
>> E > 1
= True
>> Pi == 3.14
= False
#> Pi ^ E == E ^ Pi
= False
#> N[E, 3] == N[E]
= True
#> {1, 2, 3} < {1, 2, 3}
= {1, 2, 3} < {1, 2, 3}
#> E == N[E]
= True
## Issue260
#> {Equal[Equal[0, 0], True], Equal[0, 0] == True}
= {True, True}
#> {Mod[6, 2] == 0, Mod[6, 4] == 0, (Mod[6, 2] == 0) == (Mod[6, 4] == 0), (Mod[6, 2] == 0) != (Mod[6, 4] == 0)}
= {True, False, False, True}
#> a == a == a
= True
#> {Equal[], Equal[x], Equal[1]}
= {True, True, True} | 62598fb032920d7e50bc609c |
class ContactUsView(FormView): <NEW_LINE> <INDENT> template_name = 'app/2_contact_us.html' <NEW_LINE> form_class = ContactUsForm <NEW_LINE> success_url = "/" <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.contact_us_limit = False <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> if self.contact_us_limit: <NEW_LINE> <INDENT> context['cu_limit'] = True <NEW_LINE> self.contact_us_limit = False <NEW_LINE> <DEDENT> return context <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if 'send' in request.POST: <NEW_LINE> <INDENT> if request.session.get('cu_left_count', False): <NEW_LINE> <INDENT> cu_left_count = int(request.session.get('cu_left_count')) <NEW_LINE> if datetime.strptime(request.session['cu_last_date'], '%d/%m/%Y').date() != date.today(): <NEW_LINE> <INDENT> cu_left_count = 0 <NEW_LINE> <DEDENT> if cu_left_count >= 5: <NEW_LINE> <INDENT> self.contact_us_limit = True <NEW_LINE> return self.get(request) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request.session['cu_left_count'] = cu_left_count + 1 <NEW_LINE> request.session['cu_last_date'] = datetime.now().strftime('%d/%m/%Y') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> request.session['cu_left_count'] = 1 <NEW_LINE> request.session['cu_last_date'] = datetime.now().strftime('%d/%m/%Y') <NEW_LINE> <DEDENT> data = request.POST <NEW_LINE> q = ContactUsModel() <NEW_LINE> q.author_email = data['author_email'] <NEW_LINE> q.author_name = data['author_name'] <NEW_LINE> q.message = data['message'] <NEW_LINE> q.save(q) <NEW_LINE> return redirect('app:contact_us_success') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponseRedirect(reverse('app:index_page')) | View that is responsible for feedback page | 62598fb0851cf427c66b8304 |
class WAR_Game(object): <NEW_LINE> <INDENT> def __init__(self, names): <NEW_LINE> <INDENT> self.players = [] <NEW_LINE> for name in names: <NEW_LINE> <INDENT> player = WAR_Player(name) <NEW_LINE> self.players.append(player) <NEW_LINE> <DEDENT> self.dealer = WAR_Dealer("Дилер") <NEW_LINE> self.deck = WAR_Deck() <NEW_LINE> self.deck.populate() <NEW_LINE> self.deck.shuffle() <NEW_LINE> <DEDENT> @property <NEW_LINE> def still_playing(self): <NEW_LINE> <INDENT> sp = [] <NEW_LINE> for player in self.players: <NEW_LINE> <INDENT> if not player.is_busted(): <NEW_LINE> <INDENT> sp.append(player) <NEW_LINE> <DEDENT> <DEDENT> return sp <NEW_LINE> <DEDENT> def __additional_cards(self, player): <NEW_LINE> <INDENT> while not player.is_busted() and player.is_hitting(): <NEW_LINE> <INDENT> self.deck.deal([player]) <NEW_LINE> print(player) <NEW_LINE> if player.is_busted(): <NEW_LINE> <INDENT> player.bust() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def play(self): <NEW_LINE> <INDENT> self.deck.deal(self.players + [self.dealer], per_hand = 2) <NEW_LINE> self.dealer.flip_first_card() <NEW_LINE> for player in self.players: <NEW_LINE> <INDENT> print(player) <NEW_LINE> <DEDENT> print(self.dealer) <NEW_LINE> for player in self.players: <NEW_LINE> <INDENT> self.__additional_cards(player) <NEW_LINE> <DEDENT> self.dealer.flip_first_card() <NEW_LINE> if not self.still_playing: <NEW_LINE> <INDENT> print(self.dealer) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(self.dealer) <NEW_LINE> self.__additional_cards(self.dealer) <NEW_LINE> if self.dealer.is_busted(): <NEW_LINE> <INDENT> for player in self.still_playing: <NEW_LINE> <INDENT> player.win() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for player in self.still_playing: <NEW_LINE> <INDENT> if player.total > self.dealer.total: <NEW_LINE> <INDENT> player.win() <NEW_LINE> <DEDENT> elif player.total < self.dealer.total: <NEW_LINE> <INDENT> player.lose() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> player.push() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> for player in self.players: <NEW_LINE> <INDENT> player.clear() <NEW_LINE> <DEDENT> self.dealer.clear() | Игра в "Очко". | 62598fb03d592f4c4edbaf09 |
@pytest.mark.skipif(openmm_missing, reason='OpenMM and openmmtools are not installed') <NEW_LINE> class TestArgonTemperingSampler(object): <NEW_LINE> <INDENT> def test_initialization(self): <NEW_LINE> <INDENT> nparticles = 1000 <NEW_LINE> temperature_ladder = np.linspace(300.0, 500.0, 20) <NEW_LINE> biases = np.arange(len(temperature_ladder)) <NEW_LINE> sampler = ArgonTemperingSampler(nparticles, temperature_ladder, biases) <NEW_LINE> <DEDENT> def test_sampler(self): <NEW_LINE> <INDENT> sampler = ArgonTemperingSampler(100, np.linspace(300.0, 400.0, 20)) <NEW_LINE> sampler.sample(nsteps=10, niterations=5, save_freq=1) <NEW_LINE> <DEDENT> def test_current_state(self): <NEW_LINE> <INDENT> sampler = ArgonTemperingSampler(100, np.linspace(300.0, 400.0, 20)) <NEW_LINE> current_state = sampler.sample(nsteps=1, niterations=1, save_freq=1) <NEW_LINE> assert np.where(current_state == 1)[0] == sampler.state <NEW_LINE> <DEDENT> def test_histogram_counts(self): <NEW_LINE> <INDENT> niterations = 5 <NEW_LINE> sampler = ArgonTemperingSampler(100, np.linspace(300.0, 400.0, 20)) <NEW_LINE> sampler.sample(nsteps=1, niterations=niterations, save_freq=1) <NEW_LINE> assert np.sum(sampler.histogram) == niterations <NEW_LINE> <DEDENT> def test_sample_state_coverage(self): <NEW_LINE> <INDENT> nstates = 5 <NEW_LINE> sampler = ArgonTemperingSampler(100, np.repeat(300.0, nstates)) <NEW_LINE> sampler.sample(nsteps=1, niterations=100, save_freq=1) <NEW_LINE> assert np.sum(sampler.histogram > 1) == len(sampler.histogram) <NEW_LINE> <DEDENT> def test_reset_statistics(self): <NEW_LINE> <INDENT> sampler = ArgonTemperingSampler(100, np.linspace(300.0, 400.0, 20)) <NEW_LINE> sampler.sample(nsteps=1, niterations=5, save_freq=1) <NEW_LINE> sampler.reset_statistics() <NEW_LINE> assert np.sum(sampler.histogram) == 0 and sampler.nmoves == 0 <NEW_LINE> <DEDENT> def test_reduced_potential_scaling(self): <NEW_LINE> <INDENT> sampler = ArgonTemperingSampler(100, np.linspace(300.0, 400.0, 20)) <NEW_LINE> u = sampler.reduced_potential() <NEW_LINE> assert np.all(np.diff(u) < 0) | A set of tests for the simulated tempering example of an argon gas. | 62598fb02c8b7c6e89bd380e |
class ShelterInspectionRepresent(S3Represent): <NEW_LINE> <INDENT> def __init__(self, show_link=False): <NEW_LINE> <INDENT> super(ShelterInspectionRepresent, self).__init__(lookup = "cr_shelter_inspection", show_link = show_link, ) <NEW_LINE> <DEDENT> def link(self, k, v, row=None): <NEW_LINE> <INDENT> if row: <NEW_LINE> <INDENT> inspection_id = row.cr_shelter_inspection.id <NEW_LINE> if inspection_id: <NEW_LINE> <INDENT> return A(v, _href=URL(c = "cr", f = "shelter_inspection", args = [inspection_id], ), ) <NEW_LINE> <DEDENT> <DEDENT> return v <NEW_LINE> <DEDENT> def represent_row(self, row): <NEW_LINE> <INDENT> return "%(date)s: %(unit)s" % {"unit": row.cr_shelter_unit.name, "date": row.cr_shelter_inspection.date, } <NEW_LINE> <DEDENT> def lookup_rows(self, key, values, fields=None): <NEW_LINE> <INDENT> s3db = current.s3db <NEW_LINE> table = self.table <NEW_LINE> utable = s3db.cr_shelter_unit <NEW_LINE> left = utable.on(utable.id == table.shelter_unit_id) <NEW_LINE> count = len(values) <NEW_LINE> if count == 1: <NEW_LINE> <INDENT> query = (table.id == values[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> query = (table.id.belongs(values)) <NEW_LINE> <DEDENT> limitby = (0, count) <NEW_LINE> rows = current.db(query).select(table.id, table.date, utable.name, left = left, limitby = limitby, ) <NEW_LINE> return rows | Representations of Shelter Inspections | 62598fb04527f215b58e9f1e |
class _Texture: <NEW_LINE> <INDENT> __tex = None <NEW_LINE> def __init__(self, win, target): <NEW_LINE> <INDENT> self.__win = win <NEW_LINE> self.__target = target <NEW_LINE> self.__tex = gl.createTexture() <NEW_LINE> self.update = True <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if self.__tex: <NEW_LINE> <INDENT> self.__win._del_textures.append(self.__tex) <NEW_LINE> <DEDENT> <DEDENT> def bind(self): <NEW_LINE> <INDENT> gl = self.__win.gl <NEW_LINE> gl.bindTexture(self.__target, self.__tex) <NEW_LINE> <DEDENT> def texImage2D(self, target, w, h, fmt, data): <NEW_LINE> <INDENT> gl = self.__win.gl <NEW_LINE> self.bind() <NEW_LINE> gl.texImage2D(target, 0, fmt, w, h, 0, fmt, GL_UNSIGNED_BYTE, data) | Internal OpenGL texture handle | 62598fb092d797404e388b88 |
class ConfigParser(dict): <NEW_LINE> <INDENT> VARIABLE = re.compile(r'(?P<replace>\$(\{)?(?P<name>[a-zA-Z0-9_-]+)(?(2)\}|))') <NEW_LINE> UNICODE_CHAR = re.compile(r'(?P<replace>\\x(?P<char>[0-9a-f]{2}))', re.I) <NEW_LINE> def __init__(self, path): <NEW_LINE> <INDENT> with open(path, 'r') as f: <NEW_LINE> <INDENT> lexer = shlex.shlex(f.read()) <NEW_LINE> <DEDENT> lexer.wordchars += '.' <NEW_LINE> lexer.source = 'source' <NEW_LINE> tokens = list(lexer) <NEW_LINE> for i in [i for i,t in enumerate(tokens) if t == '=']: <NEW_LINE> <INDENT> val = tokens[i+1] <NEW_LINE> if val == '$' and i+2 < len(tokens): <NEW_LINE> <INDENT> val = tokens[i+2] <NEW_LINE> <DEDENT> val = val.strip().strip('\'').strip('"') <NEW_LINE> val = self.replace_variables(val) <NEW_LINE> val = self.replace_unicode(val) <NEW_LINE> self[tokens[i-1]] = val <NEW_LINE> <DEDENT> <DEDENT> def replace_variables(self, s, missing_value=''): <NEW_LINE> <INDENT> ret = s <NEW_LINE> for match in ConfigParser.VARIABLE.finditer(s): <NEW_LINE> <INDENT> d = match.groupdict() <NEW_LINE> if d['name'] in self or missing_value is not None: <NEW_LINE> <INDENT> ret = ret.replace(d['replace'], self.get(d['name'], missing_value)) <NEW_LINE> <DEDENT> <DEDENT> return ret <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def replace_unicode(s): <NEW_LINE> <INDENT> ret = s <NEW_LINE> for match in ConfigParser.UNICODE_CHAR.finditer(s): <NEW_LINE> <INDENT> d = match.groupdict() <NEW_LINE> ret = ret.replace(d['replace'], chr(int(d['char'], 16))) <NEW_LINE> <DEDENT> return ret | Parse bash scripts to extract variables | 62598fb067a9b606de546016 |
class SkuRestriction(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'type': {'readonly': True}, 'values': {'readonly': True}, 'reason_code': {'readonly': True}, 'restriction_info': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'values': {'key': 'values', 'type': '[str]'}, 'reason_code': {'key': 'reasonCode', 'type': 'str'}, 'restriction_info': {'key': 'restrictionInfo', 'type': 'SkuRestrictionInfo'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(SkuRestriction, self).__init__(**kwargs) <NEW_LINE> self.type = None <NEW_LINE> self.values = None <NEW_LINE> self.reason_code = None <NEW_LINE> self.restriction_info = None | The restrictions because of which SKU cannot be used.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The type of the restriction.
:vartype type: str
:ivar values: The locations where sku is restricted.
:vartype values: list[str]
:ivar reason_code: The SKU restriction reason. Possible values include:
"NotAvailableForSubscription", "QuotaId".
:vartype reason_code: str or
~azure.mgmt.databoxedge.v2019_08_01.models.SkuRestrictionReasonCode
:ivar restriction_info: Restriction of the SKU for the location/zone.
:vartype restriction_info: ~azure.mgmt.databoxedge.v2019_08_01.models.SkuRestrictionInfo | 62598fb04f88993c371f052f |
class HashKey(BaseSchemaField): <NEW_LINE> <INDENT> attr_type = 'HASH' | An field representing a hash key.
Example::
>>> from txboto.dynamodb2.types import NUMBER
>>> HashKey('username')
>>> HashKey('date_joined', data_type=NUMBER) | 62598fb0dd821e528d6d8f7e |
class Pupil(Person): <NEW_LINE> <INDENT> classes: List[Class] | Pupils attend classes to learn things. | 62598fb04428ac0f6e65856f |
class TestRunRequest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.httpClient = makeHttpClient() <NEW_LINE> <DEDENT> def testRunSearchRequest(self): <NEW_LINE> <INDENT> mockPost = mock.Mock() <NEW_LINE> with mock.patch('requests.request', mockPost): <NEW_LINE> <INDENT> mockPost.side_effect = [DummyResponse(), DummyResponse('{}')] <NEW_LINE> protocolRequest = DummyRequest() <NEW_LINE> objectName = "referencesets" <NEW_LINE> protocolResponseClass = protocol.GASearchReferenceSetsResponse <NEW_LINE> listAttr = "referenceSets" <NEW_LINE> result = [refSet for refSet in self.httpClient.runSearchRequest( protocolRequest, objectName, protocolResponseClass, listAttr)] <NEW_LINE> self.assertEqual(len(result), 2) <NEW_LINE> self.assertEqual(result[0].id, "refA") <NEW_LINE> self.assertEqual(result[0].md5checksum, "abc") <NEW_LINE> self.assertEqual(result[1].id, "refB") <NEW_LINE> url = "http://example.com/referencesets/search" <NEW_LINE> jsonString = protocolRequest.toJsonString() <NEW_LINE> headers = {"Content-type": "application/json"} <NEW_LINE> httpMethod = 'POST' <NEW_LINE> mockPost.assert_called_twice_with( httpMethod, url, jsonString, headers=headers, verify=False) <NEW_LINE> <DEDENT> <DEDENT> def testRunGetRequest(self): <NEW_LINE> <INDENT> mockGet = mock.Mock() <NEW_LINE> with mock.patch('requests.request', mockGet): <NEW_LINE> <INDENT> text = { "id": "gaid", "md5checksum": "def", } <NEW_LINE> mockGet.side_effect = [DummyResponse(json.dumps(text))] <NEW_LINE> objectName = "reference" <NEW_LINE> protocolResponseClass = protocol.GAReference <NEW_LINE> id_ = 'anId' <NEW_LINE> result = self.httpClient.runGetRequest( objectName, protocolResponseClass, id_) <NEW_LINE> self.assertEqual(result.id, "gaid") <NEW_LINE> self.assertEqual(result.md5checksum, "def") <NEW_LINE> url = "http://example.com/reference/anId" <NEW_LINE> params = {} <NEW_LINE> httpMethod = 'GET' <NEW_LINE> headers = {} <NEW_LINE> data = None <NEW_LINE> mockGet.assert_called_once_with( httpMethod, url, params=params, data=data, headers=headers) <NEW_LINE> <DEDENT> <DEDENT> def testRunListRequest(self): <NEW_LINE> <INDENT> mockGet = mock.Mock() <NEW_LINE> with mock.patch('requests.request', mockGet): <NEW_LINE> <INDENT> text = { "offset": 123, "sequence": "sequence", "nextPageToken": "pageTok", } <NEW_LINE> mockGet.side_effect = [ DummyResponse(json.dumps(text)), DummyResponse('{}')] <NEW_LINE> protocolRequest = protocol.GAListReferenceBasesRequest() <NEW_LINE> protocolRequest.start = 1 <NEW_LINE> protocolRequest.end = 5 <NEW_LINE> url = "references/{id}/bases" <NEW_LINE> protocolResponseClass = protocol.GAListReferenceBasesResponse <NEW_LINE> id_ = 'myId' <NEW_LINE> result = [base for base in self.httpClient.runListRequest( protocolRequest, url, protocolResponseClass, id_)] <NEW_LINE> self.assertEqual(len(result), 2) <NEW_LINE> self.assertEqual(result[0].offset, 123) <NEW_LINE> self.assertEqual(result[0].sequence, "sequence") <NEW_LINE> url = "http://example.com/references/myId/bases" <NEW_LINE> params = {"start": 1, "end": 5} <NEW_LINE> httpMethod = 'GET' <NEW_LINE> mockGet.assert_called_twice_with(httpMethod, url, params=params) | Test the logic of the run*Request methods | 62598fb0bf627c535bcb14e8 |
class RandomEvaluator(Evaluator): <NEW_LINE> <INDENT> ap = ArgumentParser() <NEW_LINE> ap.add_argument('--seed', dest='rand_seed', type=int, default=None) <NEW_LINE> arg_parsers = (ap,) <NEW_LINE> def __init__(self, argparser, args): <NEW_LINE> <INDENT> super(RandomEvaluator, self).__init__(argparser, args) <NEW_LINE> import random <NEW_LINE> self.gen_random = random.Random(self.args.rand_seed).random <NEW_LINE> <DEDENT> def __call__(self, doc, ind): <NEW_LINE> <INDENT> return self.gen_random() | Shuffle the input randomly | 62598fb099fddb7c1ca62e0e |
class Unique: <NEW_LINE> <INDENT> def __call__(self, value): <NEW_LINE> <INDENT> for i in range(len(value) - 1): <NEW_LINE> <INDENT> for j in range(i+1, len(value)): <NEW_LINE> <INDENT> if value[i] == value[j]: <NEW_LINE> <INDENT> raise vol.Invalid('duplicate value: {}'.format(value[i])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Unique' | Validates that elements of all different, works with unhashable types | 62598fb08a43f66fc4bf21c4 |
class EntityNotFoundException(VcdException): <NEW_LINE> <INDENT> pass | Raised when an entity is not found in vcd. | 62598fb0d7e4931a7ef3c0de |
class UsageWarning(UserWarning): <NEW_LINE> <INDENT> pass | Something unsafe was requested and carried out.
| 62598fb0d268445f26639ba8 |
class ItemCallback(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def call(self, zin: ZipFile, zout: ZipFile, item: ZipInfo) -> bool: <NEW_LINE> <INDENT> pass | Called on each item of the source. Use a ItemCallback to ignore a script. | 62598fb0fff4ab517ebcd82f |
class BARewardMolecule(molecules_mdp.Molecule): <NEW_LINE> <INDENT> def __init__(self, discount_factor, **kwargs): <NEW_LINE> <INDENT> super(BARewardMolecule, self).__init__(**kwargs) <NEW_LINE> self.discount_factor = discount_factor <NEW_LINE> <DEDENT> def _reward(self): <NEW_LINE> <INDENT> molecule = Chem.MolFromSmiles(self._state) <NEW_LINE> if molecule is None: <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> sa = calculateScore(molecule) <NEW_LINE> sa_norm = round((10-sa)/9,2) <NEW_LINE> qed = round(QED.qed(molecule),2) <NEW_LINE> print("SA score and QED: {}, {} : {}".format(sa_norm, qed, self._state)) <NEW_LINE> if self._counter < self.max_steps: <NEW_LINE> <INDENT> return round((sa_norm+qed)*self.discount_factor ** (self.max_steps - self.num_steps_taken),2) <NEW_LINE> <DEDENT> if self._counter >= self.max_steps: <NEW_LINE> <INDENT> with open('ligand.smi','w') as f: <NEW_LINE> <INDENT> f.write(self._state) <NEW_LINE> <DEDENT> cvt_cmd = "obabel ligand.smi -O ligand.pdbqt --gen3D -p > cvt_log.txt" <NEW_LINE> os.system(cvt_cmd) <NEW_LINE> docking_cmd ="qvina2.1 --config config.txt --num_modes=1 > log_docking.txt" <NEW_LINE> os.system(docking_cmd) <NEW_LINE> try: <NEW_LINE> <INDENT> data = pd.read_csv('log_docking.txt', sep= "\t",header=None) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> docking_score = round(float(data.values[-2][0].split()[1]),2) <NEW_LINE> print("binding energy value: "+str(round(docking_score,2))+'\t'+self._state) <NEW_LINE> <DEDENT> with open('./optimized_result_total.txt', 'a') as f2: <NEW_LINE> <INDENT> f2.write(self._state+'\t'+str(docking_score)+'\t'+str(sa_norm)+'\t'+str(qed)+'\n') <NEW_LINE> <DEDENT> return round(-docking_score, 2) | The molecule whose reward is the Bingding affinity. | 62598fb0a8370b77170f0426 |
class FREQuency(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "FREQuency" <NEW_LINE> args = ["1"] <NEW_LINE> class STEP(SCPINode): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "STEP" <NEW_LINE> args = [] <NEW_LINE> class INCRement(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "INCRement" <NEW_LINE> args = ["1"] <NEW_LINE> <DEDENT> INCRement = INCRement() <NEW_LINE> <DEDENT> STEP = STEP() | SOURce:PULSe:INTernal:FREQuency
Arguments: 1 | 62598fb07047854f4633f424 |
class ELBResponse(AWSGenericResponse): <NEW_LINE> <INDENT> namespace = NS <NEW_LINE> exceptions = {} <NEW_LINE> xpath = 'Error' | Amazon ELB response class. | 62598fb05fc7496912d482a5 |
class LinkedList(object): <NEW_LINE> <INDENT> def __init__(self, data=None): <NEW_LINE> <INDENT> self._length = 0 <NEW_LINE> self.head = None <NEW_LINE> try: <NEW_LINE> <INDENT> for val in data: <NEW_LINE> <INDENT> self.push(val) <NEW_LINE> <DEDENT> <DEDENT> except TypeError: <NEW_LINE> <INDENT> if data: <NEW_LINE> <INDENT> self.push(data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def push(self, val): <NEW_LINE> <INDENT> old_head = self.head <NEW_LINE> self.head = Node(val, old_head) <NEW_LINE> self._length += 1 <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if not self.head: <NEW_LINE> <INDENT> raise IndexError('Cannot pop from an empty list') <NEW_LINE> <DEDENT> to_return = self.head <NEW_LINE> self.head = self.head.next <NEW_LINE> self._length -= 1 <NEW_LINE> return to_return.data <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return self._length <NEW_LINE> <DEDENT> def search(self, val): <NEW_LINE> <INDENT> curr = self.head <NEW_LINE> while curr: <NEW_LINE> <INDENT> if curr.data == val: <NEW_LINE> <INDENT> return curr <NEW_LINE> <DEDENT> curr = curr.next <NEW_LINE> <DEDENT> <DEDENT> def remove(self, val): <NEW_LINE> <INDENT> curr = self.head <NEW_LINE> if curr and val is self.head.data: <NEW_LINE> <INDENT> self.head = self.head.next <NEW_LINE> self._length -= 1 <NEW_LINE> <DEDENT> while curr: <NEW_LINE> <INDENT> if (curr.next and curr.next.data == val): <NEW_LINE> <INDENT> curr.next = curr.next.next <NEW_LINE> self._length -= 1 <NEW_LINE> <DEDENT> curr = curr.next <NEW_LINE> <DEDENT> <DEDENT> def display(self): <NEW_LINE> <INDENT> curr = self.head <NEW_LINE> display = '(' <NEW_LINE> while curr: <NEW_LINE> <INDENT> display += str(curr.data) + ', ' <NEW_LINE> curr = curr.next <NEW_LINE> <DEDENT> return display[:-2] + ')' | Method for linked list.
push(val) - will insert the value at the head of the list.
pop() - remove the first value off the head and return it.
size() - will return the length of the list.
search(val) - will return the node containing val in the list, if
present, else None
remove(node) - will remove the given node from the list, wherever
it might be (node must be an item in the list)
display() - will return a unicode string representing the list as
if it were a Python tuple literal: "(12, 'sam', 37, 'tango')" | 62598fb0aad79263cf42e81d |
class UserMessage(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=20, verbose_name=u'用户名') <NEW_LINE> email = models.EmailField(verbose_name=u'邮箱') <NEW_LINE> address = models.CharField(max_length=100, verbose_name=u'联系地址') <NEW_LINE> message = models.CharField(max_length=500, verbose_name=u'用户留言信息') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = u'用户留言信息' <NEW_LINE> verbose_name_plural = verbose_name | 用户留言信息 | 62598fb0a05bb46b3848a8b5 |
class _CardAccessor(object): <NEW_LINE> <INDENT> def __init__(self, header): <NEW_LINE> <INDENT> self._header = header <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '\n'.join(repr(c) for c in self._header._cards) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._header._cards) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._header._cards) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isiterable(other): <NEW_LINE> <INDENT> for a, b in zip(self, other): <NEW_LINE> <INDENT> if a != b: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> if isinstance(item, slice) or self._header._haswildcard(item): <NEW_LINE> <INDENT> return self.__class__(self._header[item]) <NEW_LINE> <DEDENT> idx = self._header._cardindex(item) <NEW_LINE> return self._header._cards[idx] <NEW_LINE> <DEDENT> def _setslice(self, item, value): <NEW_LINE> <INDENT> if isinstance(item, slice) or self._header._haswildcard(item): <NEW_LINE> <INDENT> if isinstance(item, slice): <NEW_LINE> <INDENT> indices = range(*item.indices(len(self))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> indices = self._header._wildcardmatch(item) <NEW_LINE> <DEDENT> if isinstance(value, string_types) or not isiterable(value): <NEW_LINE> <INDENT> value = itertools.repeat(value, len(indices)) <NEW_LINE> <DEDENT> for idx, val in zip(indices, value): <NEW_LINE> <INDENT> self[idx] = val <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> return False | This is a generic class for wrapping a Header in such a way that you can
use the header's slice/filtering capabilities to return a subset of cards
and do something with them.
This is sort of the opposite notion of the old CardList class--whereas
Header used to use CardList to get lists of cards, this uses Header to get
lists of cards. | 62598fb0442bda511e95c4a2 |
class ComputeNetworksRemovePeeringRequest(_messages.Message): <NEW_LINE> <INDENT> network = _messages.StringField(1, required=True) <NEW_LINE> networksRemovePeeringRequest = _messages.MessageField('NetworksRemovePeeringRequest', 2) <NEW_LINE> project = _messages.StringField(3, required=True) <NEW_LINE> requestId = _messages.StringField(4) | A ComputeNetworksRemovePeeringRequest object.
Fields:
network: Name of the network resource to remove peering from.
networksRemovePeeringRequest: A NetworksRemovePeeringRequest resource to
be passed as the request body.
project: Project ID for this request.
requestId: An optional request ID to identify requests. Specify a unique
request ID so that if you must retry your request, the server will know
to ignore the request if it has already been completed. For example,
consider a situation where you make an initial request and then the
request times out. If you make the request again with the same request
ID, the server can check if original operation with the same request ID
was received, and if so, will ignore the second request. This prevents
clients from accidentally creating duplicate commitments. | 62598fb0cc40096d6161a1fe |
class RelationalExpressionValue(BinaryExpressionValue): <NEW_LINE> <INDENT> VALID_OPS = {'>', '<', '>=', '<=', '==', '!='} <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(RelationalExpressionValue, self).__init__() <NEW_LINE> <DEDENT> def _validateOperator(self, value): <NEW_LINE> <INDENT> return value in self.VALID_OPS | Relational expression - comparison | 62598fb0091ae35668704c6a |
class MessageSync: <NEW_LINE> <INDENT> def __init__( self, messages: typing.Iterable[Message], timestamps: bool = True, gap: float = 0.0001, skip: float = 60.0, ) -> None: <NEW_LINE> <INDENT> self.raw_messages = messages <NEW_LINE> self.timestamps = timestamps <NEW_LINE> self.gap = gap <NEW_LINE> self.skip = skip <NEW_LINE> <DEDENT> def __iter__(self) -> typing.Generator[Message, None, None]: <NEW_LINE> <INDENT> playback_start_time = time() <NEW_LINE> recorded_start_time = None <NEW_LINE> for message in self.raw_messages: <NEW_LINE> <INDENT> if self.timestamps: <NEW_LINE> <INDENT> if recorded_start_time is None: <NEW_LINE> <INDENT> recorded_start_time = message.timestamp <NEW_LINE> <DEDENT> now = time() <NEW_LINE> current_offset = now - playback_start_time <NEW_LINE> recorded_offset_from_start = message.timestamp - recorded_start_time <NEW_LINE> remaining_gap = max(0.0, recorded_offset_from_start - current_offset) <NEW_LINE> sleep_period = max(self.gap, min(self.skip, remaining_gap)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sleep_period = self.gap <NEW_LINE> <DEDENT> sleep(sleep_period) <NEW_LINE> yield message | Used to iterate over some given messages in the recorded time. | 62598fb0f9cc0f698b1c52ef |
class RefB(SKABaseDevice): <NEW_LINE> <INDENT> __metaclass__ = DeviceMeta <NEW_LINE> attr1 = attribute( dtype='str', doc="Attribute 1 for DevB", ) <NEW_LINE> attr2 = attribute( dtype='str', doc="Attribute 2 for DevB", ) <NEW_LINE> importantState = attribute( dtype='DevEnum', access=AttrWriteType.READ_WRITE, enum_labels=["OK", "GOOD", "BAD", "VERY-BAD", ], ) <NEW_LINE> def init_device(self): <NEW_LINE> <INDENT> SKABaseDevice.init_device(self) <NEW_LINE> <DEDENT> def always_executed_hook(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def delete_device(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def read_attr1(self): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def read_attr2(self): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def read_importantState(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def write_importantState(self, value): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @command( ) <NEW_LINE> @DebugIt() <NEW_LINE> def Reset(self): <NEW_LINE> <INDENT> pass | Ref (Reference Element) device of type B. | 62598fb060cbc95b0636439a |
class ArticleListView(CustomListRendererMixin, generics.ListAPIView): <NEW_LINE> <INDENT> queryset = Article.objects.all() <NEW_LINE> serializer_class = ArticleSerializer | List of articles | 62598fb032920d7e50bc609f |
class Predlogenie(models.Model): <NEW_LINE> <INDENT> TYPE_OF = ( ('P', 'Потребительский'), ('I', 'Ипотека'), ('A', 'Автокредит'), ) <NEW_LINE> create_dt = models.DateTimeField(auto_now_add=True) <NEW_LINE> update_dt = models.DateTimeField(auto_now=True) <NEW_LINE> start_rotate = models.DateTimeField() <NEW_LINE> end_rotate = models.DateTimeField() <NEW_LINE> name = models.CharField(max_length=254) <NEW_LINE> type_of = models.CharField(max_length=1, choices=TYPE_OF) <NEW_LINE> min_scoring = models.FloatField() <NEW_LINE> max_scoring = models.FloatField() <NEW_LINE> credit_org = models.ForeignKey(CreditOrg) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Предложения' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '%s' % (self.name,) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return '%s' % (self.name,) | Предложение. | 62598fb0cc0a2c111447b05d |
class Solution: <NEW_LINE> <INDENT> def countBits(self, num): <NEW_LINE> <INDENT> answer = [0] <NEW_LINE> for i in range(1, num + 1): <NEW_LINE> <INDENT> if i % 2 == 0: <NEW_LINE> <INDENT> number = answer[i//2] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> number = answer[i - 1] + 1 <NEW_LINE> <DEDENT> answer.append(number) <NEW_LINE> <DEDENT> return answer | @param num: a non negative integer number
@return: an array represent the number of 1's in their binary | 62598fb0d486a94d0ba2c01b |
class WorkerProcessTest(EventLoopTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> EventLoopTest.setUp(self) <NEW_LINE> workerprocess._subprocess_manager.handler_class = ( UnittestWorkerProcessHandler) <NEW_LINE> self.result = self.error = None <NEW_LINE> <DEDENT> def callback(self, result): <NEW_LINE> <INDENT> self.result = result <NEW_LINE> self.stopEventLoop(abnormal=False) <NEW_LINE> <DEDENT> def errback(self, error): <NEW_LINE> <INDENT> self.error = error <NEW_LINE> self.stopEventLoop(abnormal=False) <NEW_LINE> <DEDENT> def send_feedparser_task(self): <NEW_LINE> <INDENT> path = os.path.join(resources.path("testdata/feedparsertests/feeds"), "http___feeds_miroguide_com_miroguide_featured.xml") <NEW_LINE> html = open(path).read() <NEW_LINE> workerprocess.run_feedparser(html, self.callback, self.errback) <NEW_LINE> <DEDENT> def check_successful_result(self): <NEW_LINE> <INDENT> self.assertNotEquals(self.result, None) <NEW_LINE> self.assertEquals(self.error, None) <NEW_LINE> if self.result['bozo']: <NEW_LINE> <INDENT> raise AssertionError("Feedparser parse error: %s", self.result['bozo_exception']) <NEW_LINE> <DEDENT> <DEDENT> def test_feedparser_success(self): <NEW_LINE> <INDENT> workerprocess.startup() <NEW_LINE> self.send_feedparser_task() <NEW_LINE> self.runEventLoop(4.0) <NEW_LINE> self.check_successful_result() <NEW_LINE> <DEDENT> def test_feedparser_error(self): <NEW_LINE> <INDENT> workerprocess.startup() <NEW_LINE> workerprocess.run_feedparser('FORCE EXCEPTION', self.callback, self.errback) <NEW_LINE> self.runEventLoop(4.0) <NEW_LINE> self.assertEquals(self.result, None) <NEW_LINE> self.assert_(isinstance(self.error, ValueError)) <NEW_LINE> <DEDENT> def test_crash(self): <NEW_LINE> <INDENT> workerprocess.startup() <NEW_LINE> original_pid = workerprocess._subprocess_manager.process.pid <NEW_LINE> self.send_feedparser_task() <NEW_LINE> workerprocess._subprocess_manager.process.terminate() <NEW_LINE> self.runEventLoop(4.0) <NEW_LINE> self.assertNotEqual(original_pid, workerprocess._subprocess_manager.process.pid) <NEW_LINE> self.check_successful_result() <NEW_LINE> <DEDENT> def test_queue_before_start(self): <NEW_LINE> <INDENT> self.send_feedparser_task() <NEW_LINE> workerprocess.startup() <NEW_LINE> self.runEventLoop(4.0) <NEW_LINE> self.check_successful_result() | Test our worker process. | 62598fb097e22403b383af59 |
class ULAcoin(Bitcoin): <NEW_LINE> <INDENT> name = 'ulacoin' <NEW_LINE> symbols = ('ULA', ) <NEW_LINE> seeds = ('ulacoin.com', 'node.walletbuilders.com', ) <NEW_LINE> port = 21659 <NEW_LINE> message_start = b'\x8b\xa3\x36\x9a' <NEW_LINE> base58_prefixes = { 'PUBKEY_ADDR': 68, 'SCRIPT_ADDR': 5, 'SECRET_KEY': 196 } | Class with all the necessary ULAcoin (ULA) network information based on
https://github.com/UlaTechGroup/UlatechGroup/blob/master/src/net.cpp
(date of access: 02/17/2018) | 62598fb01f5feb6acb162c6a |
@_display_as_base <NEW_LINE> class _ArrayMemoryError(MemoryError): <NEW_LINE> <INDENT> def __init__(self, shape, dtype): <NEW_LINE> <INDENT> self.shape = shape <NEW_LINE> self.dtype = dtype <NEW_LINE> <DEDENT> @property <NEW_LINE> def _total_size(self): <NEW_LINE> <INDENT> num_bytes = self.dtype.itemsize <NEW_LINE> for dim in self.shape: <NEW_LINE> <INDENT> num_bytes *= dim <NEW_LINE> <DEDENT> return num_bytes <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _size_to_string(num_bytes): <NEW_LINE> <INDENT> import math <NEW_LINE> LOG2_STEP = 10 <NEW_LINE> STEP = 1024 <NEW_LINE> units = ['bytes', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB'] <NEW_LINE> unit_i = max(num_bytes.bit_length() - 1, 1) // LOG2_STEP <NEW_LINE> unit_val = 1 << (unit_i * LOG2_STEP) <NEW_LINE> n_units = num_bytes / unit_val <NEW_LINE> del unit_val <NEW_LINE> if round(n_units) == STEP: <NEW_LINE> <INDENT> unit_i += 1 <NEW_LINE> n_units /= STEP <NEW_LINE> <DEDENT> if unit_i >= len(units): <NEW_LINE> <INDENT> new_unit_i = len(units) - 1 <NEW_LINE> n_units *= 1 << ((unit_i - new_unit_i) * LOG2_STEP) <NEW_LINE> unit_i = new_unit_i <NEW_LINE> <DEDENT> unit_name = units[unit_i] <NEW_LINE> if unit_i == 0: <NEW_LINE> <INDENT> return '{:.0f} {}'.format(n_units, unit_name) <NEW_LINE> <DEDENT> elif round(n_units) < 1000: <NEW_LINE> <INDENT> return '{:#.3g} {}'.format(n_units, unit_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '{:#.0f} {}'.format(n_units, unit_name) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> size_str = self._size_to_string(self._total_size) <NEW_LINE> return ( "Unable to allocate {} for an array with shape {} and data type {}" .format(size_str, self.shape, self.dtype) ) | Thrown when an array cannot be allocated | 62598fb12ae34c7f260ab12d |
class HMISUser (object): <NEW_LINE> <INDENT> def __init__(self, user): <NEW_LINE> <INDENT> self.user = user <NEW_LINE> self.groups = None <NEW_LINE> <DEDENT> def group_names(self): <NEW_LINE> <INDENT> return (g.name for g in self.user.groups.all()) <NEW_LINE> <DEDENT> def is_intake_staff(self): <NEW_LINE> <INDENT> return 'intake-staff' in self.group_names() <NEW_LINE> <DEDENT> def is_project_staff(self): <NEW_LINE> <INDENT> return 'project-staff' in self.group_names() <NEW_LINE> <DEDENT> def can_refer_household(self): <NEW_LINE> <INDENT> user = self.user <NEW_LINE> return user.is_superuser or user.has_perm('simplehmis.refer_household') <NEW_LINE> <DEDENT> def can_enroll_household(self): <NEW_LINE> <INDENT> user = self.user <NEW_LINE> return user.is_superuser or user.has_perm('simplehmis.enroll_household') <NEW_LINE> <DEDENT> def login_url(self, secure=False, host=None): <NEW_LINE> <INDENT> from django.core.urlresolvers import reverse_lazy <NEW_LINE> host = host or getattr(settings, 'SERVER_URL', None) or 'example.com' <NEW_LINE> view = '/' <NEW_LINE> return '%s://%s%s' % ( 'https' if secure else 'http', host, view[0] ) <NEW_LINE> <DEDENT> def send_onboarding_email(self, secure=False, host=None): <NEW_LINE> <INDENT> if not self.email: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> from django.core.mail import send_mail <NEW_LINE> from django.template.loader import render_to_string <NEW_LINE> subject = _('Welcome to SimpleHMIS') <NEW_LINE> to_email = [self.email] <NEW_LINE> from_email = getattr(settings, 'DEFAULT_FROM_EMAIL', 'root@example.com') <NEW_LINE> if self.is_superuser: <NEW_LINE> <INDENT> staff_type = 'a site-wide administrator' <NEW_LINE> <DEDENT> elif self.is_project_staff(): <NEW_LINE> <INDENT> staff_type = 'a project staff member for {}'.format(', '.join(p.name for p in self.projects.all())) <NEW_LINE> <DEDENT> elif self.is_intake_staff(): <NEW_LINE> <INDENT> staff_type = 'an intake staff member' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> staff_type = 'a site staff member' <NEW_LINE> <DEDENT> context = { 'login_url': self.login_url(secure=secure, host=host), 'username': self.username, 'staff_type': staff_type, 'help_email': getattr(settings, 'HELP_EMAIL', 'help@example.com'), } <NEW_LINE> text_content = render_to_string('onboarding_email.txt', context) <NEW_LINE> html_content = render_to_string('onboarding_email.html', context) <NEW_LINE> send_mail(subject, text_content, from_email, to_email, html_message=html_content) <NEW_LINE> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> return getattr(self.user, key) <NEW_LINE> <DEDENT> def __dir__(self): <NEW_LINE> <INDENT> return dir(self.user) + super().__dir__() | A user object that adds a few convenience methods to the Django User. | 62598fb14f6381625f1994e5 |
class CheckReplDBHashInBackground(jsfile.JSHook): <NEW_LINE> <INDENT> def __init__(self, hook_logger, fixture, shell_options=None): <NEW_LINE> <INDENT> description = "Check dbhashes of all replica set members while a test is running" <NEW_LINE> js_filename = os.path.join("jstests", "hooks", "run_check_repl_dbhash_background.js") <NEW_LINE> jsfile.JSHook.__init__(self, hook_logger, fixture, js_filename, description, shell_options=shell_options) <NEW_LINE> self._background_job = None <NEW_LINE> <DEDENT> def before_suite(self, test_report): <NEW_LINE> <INDENT> client = self.fixture.mongo_client() <NEW_LINE> server_status = client.admin.command("serverStatus") <NEW_LINE> if not server_status["storageEngine"].get("supportsSnapshotReadConcern", False): <NEW_LINE> <INDENT> self.logger.info("Not enabling the background thread because '%s' storage engine" " doesn't support snapshot reads.", server_status["storageEngine"]["name"]) <NEW_LINE> return <NEW_LINE> <DEDENT> if not server_status["storageEngine"].get("persistent", False): <NEW_LINE> <INDENT> self.logger.info("Not enabling the background thread because '%s' storage engine" " is not persistent.", server_status["storageEngine"]["name"]) <NEW_LINE> return <NEW_LINE> <DEDENT> self._background_job = _BackgroundJob() <NEW_LINE> self.logger.info("Starting the background thread.") <NEW_LINE> self._background_job.start() <NEW_LINE> <DEDENT> def after_suite(self, test_report): <NEW_LINE> <INDENT> if self._background_job is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.logger.info("Stopping the background thread.") <NEW_LINE> self._background_job.stop() <NEW_LINE> <DEDENT> def before_test(self, test, test_report): <NEW_LINE> <INDENT> if self._background_job is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> hook_test_case = _ContinuousDynamicJSTestCase.create_before_test( self.logger.test_case_logger, test, self, self._js_filename, self._shell_options) <NEW_LINE> hook_test_case.configure(self.fixture) <NEW_LINE> self.logger.info("Resuming the background thread.") <NEW_LINE> self._background_job.resume(hook_test_case, test_report) <NEW_LINE> <DEDENT> def after_test(self, test, test_report): <NEW_LINE> <INDENT> if self._background_job is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.logger.info("Pausing the background thread.") <NEW_LINE> self._background_job.pause() <NEW_LINE> if self._background_job.exc_info is not None: <NEW_LINE> <INDENT> if isinstance(self._background_job.exc_info[1], errors.TestFailure): <NEW_LINE> <INDENT> raise errors.ServerFailure(self._background_job.exc_info[1].args[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.error("Encountered an error inside the background thread.", exc_info=self._background_job.exc_info) <NEW_LINE> raise self._background_job.exc_info[1] | A hook for comparing the dbhashes of all replica set members while a test is running. | 62598fb1f548e778e596b5ef |
class MultiHeadAttention(Module): <NEW_LINE> <INDENT> def __init__(self, num_heads: int, d_q_in: int, d_k_in: int, d_v_in: int, d_atn: int, d_v: int, d_out: int, dropout_rate: float = 0.1) -> None: <NEW_LINE> <INDENT> super(MultiHeadAttention, self).__init__() <NEW_LINE> self.num_heads = num_heads <NEW_LINE> self.q_transformation = Linear(in_features=d_q_in, out_features=d_atn*num_heads, bias=False) <NEW_LINE> self.k_transformation = Linear(in_features=d_k_in, out_features=d_atn*num_heads, bias=False) <NEW_LINE> self.v_transformation = Linear(in_features=d_v_in, out_features=d_v*num_heads, bias=False) <NEW_LINE> self.wo = Linear(in_features=num_heads * d_v, out_features=d_out, bias=False) <NEW_LINE> self.dropout = Dropout(dropout_rate) <NEW_LINE> <DEDENT> def forward(self, queries: Tensor, keys: Tensor, values: Tensor, mask: Optional[LongTensor] = None) -> Tensor: <NEW_LINE> <INDENT> qs = self.q_transformation(queries).view(queries.shape[0], queries.shape[1], -1, self.num_heads) <NEW_LINE> ks = self.k_transformation(keys).view(keys.shape[0], keys.shape[1], -1, self.num_heads) <NEW_LINE> vs = self.v_transformation(values).view(values.shape[0], values.shape[1], -1, self.num_heads) <NEW_LINE> mha = multihead_atn_fn(qs, ks, vs, mask) <NEW_LINE> mha = self.dropout(mha) <NEW_LINE> return self.wo(mha) | Neural module wrapping multihead scaled dot-product attention. | 62598fb18a43f66fc4bf21c7 |
class Dialect(object): <NEW_LINE> <INDENT> __slots__ = ["_delimiter", "_doublequote", "_escapechar", "_lineterminator", "_quotechar", "_quoting", "_skipinitialspace", "_strict"] <NEW_LINE> def __new__(cls, dialect, **kwargs): <NEW_LINE> <INDENT> for name in kwargs: <NEW_LINE> <INDENT> if '_' + name not in Dialect.__slots__: <NEW_LINE> <INDENT> raise TypeError("unexpected keyword argument '%s'" % (name,)) <NEW_LINE> <DEDENT> <DEDENT> if dialect is not None: <NEW_LINE> <INDENT> if isinstance(dialect, str): <NEW_LINE> <INDENT> dialect = get_dialect(dialect) <NEW_LINE> <DEDENT> if (isinstance(dialect, Dialect) and all(value is None for value in kwargs.values())): <NEW_LINE> <INDENT> return dialect <NEW_LINE> <DEDENT> <DEDENT> self = object.__new__(cls) <NEW_LINE> def set_char(x): <NEW_LINE> <INDENT> if x is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(x, str) and len(x) <= 1: <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> raise TypeError("%r must be a 1-character string" % (name,)) <NEW_LINE> <DEDENT> def set_str(x): <NEW_LINE> <INDENT> if isinstance(x, str): <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> raise TypeError("%r must be a string" % (name,)) <NEW_LINE> <DEDENT> def set_quoting(x): <NEW_LINE> <INDENT> if x in range(4): <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> raise TypeError("bad 'quoting' value") <NEW_LINE> <DEDENT> attributes = {"delimiter": (',', set_char), "doublequote": (True, bool), "escapechar": (None, set_char), "lineterminator": ("\r\n", set_str), "quotechar": ('"', set_char), "quoting": (QUOTE_MINIMAL, set_quoting), "skipinitialspace": (False, bool), "strict": (False, bool), } <NEW_LINE> notset = object() <NEW_LINE> for name in Dialect.__slots__: <NEW_LINE> <INDENT> name = name[1:] <NEW_LINE> value = notset <NEW_LINE> if name in kwargs: <NEW_LINE> <INDENT> value = kwargs[name] <NEW_LINE> <DEDENT> elif dialect is not None: <NEW_LINE> <INDENT> value = getattr(dialect, name, notset) <NEW_LINE> <DEDENT> if value is notset: <NEW_LINE> <INDENT> value = attributes[name][0] <NEW_LINE> if name == 'quoting' and not self.quotechar: <NEW_LINE> <INDENT> value = QUOTE_NONE <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> converter = attributes[name][1] <NEW_LINE> if converter: <NEW_LINE> <INDENT> value = converter(value) <NEW_LINE> <DEDENT> <DEDENT> self.__dict__['_' + name] = value <NEW_LINE> <DEDENT> if not self.delimiter: <NEW_LINE> <INDENT> raise TypeError("delimiter must be set") <NEW_LINE> <DEDENT> if self.quoting != QUOTE_NONE and not self.quotechar: <NEW_LINE> <INDENT> raise TypeError("quotechar must be set if quoting enabled") <NEW_LINE> <DEDENT> if not self.lineterminator: <NEW_LINE> <INDENT> raise TypeError("lineterminator must be set") <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> delimiter = property(lambda self: self._delimiter) <NEW_LINE> doublequote = property(lambda self: self._doublequote) <NEW_LINE> escapechar = property(lambda self: self._escapechar) <NEW_LINE> lineterminator = property(lambda self: self._lineterminator) <NEW_LINE> quotechar = property(lambda self: self._quotechar) <NEW_LINE> quoting = property(lambda self: self._quoting) <NEW_LINE> skipinitialspace = property(lambda self: self._skipinitialspace) <NEW_LINE> strict = property(lambda self: self._strict) | CSV dialect
The Dialect type records CSV parsing and generation options. | 62598fb1fff4ab517ebcd831 |
class VCDValueHistoryEntry(VCDObject): <NEW_LINE> <INDENT> def __init__(self, scope: VCDScope, signal: str, time: int): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._scope = scope <NEW_LINE> self._signal = signal <NEW_LINE> self._time = int(time) <NEW_LINE> <DEDENT> @property <NEW_LINE> def scope(self) -> VCDScope: <NEW_LINE> <INDENT> return self._scope <NEW_LINE> <DEDENT> @property <NEW_LINE> def signal(self) -> str: <NEW_LINE> <INDENT> return self._signal <NEW_LINE> <DEDENT> @property <NEW_LINE> def time(self) -> int: <NEW_LINE> <INDENT> return self._time <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{{{}::{} @{}}}".format(str(self.scope), self.signal, self.time) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, VCDValueHistoryEntry): <NEW_LINE> <INDENT> raise TypeError("other must be a VCDValueHistoryEntry object") <NEW_LINE> <DEDENT> if other.scope != self.scope: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if other.signal != self.signal: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if other.time != self.time: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(tuple([self.scope, self.signal, self.time])) | Value history entry. | 62598fb12ae34c7f260ab12e |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.