code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class UserProfile(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, on_delete=models.CASCADE) <NEW_LINE> copy_tags = models.BooleanField( default=True, verbose_name="Copy tags when adding someone else's recipe") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.user.username | mostly for preferences | 62598fc57b180e01f3e491c0 |
class ArgumentUndefine(ArgumentError): <NEW_LINE> <INDENT> pass | 参数未定义错误 | 62598fc5167d2b6e312b7258 |
class RouterTestIdFailCtrlChar(TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super(RouterTestIdFailCtrlChar, cls).setUpClass() <NEW_LINE> cls.name = "test-router-ctrl-char" <NEW_LINE> <DEDENT> def __init__(self, test_method): <NEW_LINE> <INDENT> TestCase.__init__(self, test_method) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> super(RouterTestIdFailCtrlChar, cls).tearDownClass() <NEW_LINE> <DEDENT> def test_verify_reject_id_with_ctrl_char(self): <NEW_LINE> <INDENT> conf_path = "../setUpClass/test-router-ctrl-char.conf" <NEW_LINE> with open(conf_path, 'w') as router_conf: <NEW_LINE> <INDENT> router_conf.write("router { \n") <NEW_LINE> router_conf.write(" id: abc\\bdef \n") <NEW_LINE> router_conf.write("}") <NEW_LINE> <DEDENT> p = self.popen( ['qdrouterd', '-c', conf_path], stdin=PIPE, stdout=PIPE, stderr=STDOUT, expect=Process.EXIT_FAIL, universal_newlines=True) <NEW_LINE> out = p.communicate(timeout=5)[0] <NEW_LINE> try: <NEW_LINE> <INDENT> p.teardown() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise Exception("%s\n%s" % (e, out)) <NEW_LINE> <DEDENT> self.assertTrue("AttributeError" in out) | This test case sets up a router using a configuration router id
that is illegal (control character). The router should not start. | 62598fc5f9cc0f698b1c5442 |
class NotEnoughSamplesError(Exception): <NEW_LINE> <INDENT> pass | Not Enough Samples | 62598fc53617ad0b5ee06428 |
class RegistrationForm(forms.Form): <NEW_LINE> <INDENT> username = forms.RegexField(regex=r'^[\w.@+-]+$', max_length=30, widget=forms.TextInput(attrs=attrs_dict), label=_("Username"), error_messages={ 'invalid': _("This value must contain " "only letters, numbers and " "underscores.") }) <NEW_LINE> email1 = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict, maxlength=75)), label=_("E-mail")) <NEW_LINE> email2 = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict, maxlength=75)), label=_("E-mail (again)")) <NEW_LINE> def clean_username(self): <NEW_LINE> <INDENT> User = get_user_model() <NEW_LINE> try: <NEW_LINE> <INDENT> User.objects.get(username__iexact=self.cleaned_data['username']) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return self.cleaned_data['username'] <NEW_LINE> <DEDENT> raise forms.ValidationError(_( "A user with that username already exists.")) <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> if 'email1' in self.cleaned_data and 'email2' in self.cleaned_data: <NEW_LINE> <INDENT> if self.cleaned_data['email1'] != self.cleaned_data['email2']: <NEW_LINE> <INDENT> raise forms.ValidationError(_( "The two email fields didn't match.")) <NEW_LINE> <DEDENT> <DEDENT> return self.cleaned_data | Form for registration a user account.
Validates that the requested username is not already in use, and requires
the email to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they need, but
should avoid defining a ``save()`` method -- the actual saving of collected
user data is delegated to the active registration backend. | 62598fc560cbc95b0636461f |
class ConnectionViewSet(ListModelMixin, GenericViewSet): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticated,) <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> def get_serializer_context(self): <NEW_LINE> <INDENT> return {'auth_user': self.request.user} <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> user = self.request.query_params.get('user') <NEW_LINE> if user: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(pk=user) <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> raise NotFound('User {} not found'.format(request.user.id)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> <DEDENT> status = self.request.query_params.get('status') <NEW_LINE> if status == Connection.STATUS.pending: <NEW_LINE> <INDENT> return user.get_pending_connections() <NEW_LINE> <DEDENT> if status == 'requested': <NEW_LINE> <INDENT> return user.get_requested_connections() <NEW_LINE> <DEDENT> elif status == Connection.STATUS.accepted: <NEW_LINE> <INDENT> return user.get_accepted_connections() <NEW_LINE> <DEDENT> elif status == Connection.STATUS.blocked: <NEW_LINE> <INDENT> return user.get_blocked_connections() <NEW_LINE> <DEDENT> return user.get_connections() | Retrieves the connections of the authenticated user, or a specified user.
- Filter by: `user`, `status`
Valid options when filtering by `status` are:
- `pending`: awaiting the user's acceptance.
- `requested`: awaiting another user's acceptance.
- `accepted`: both users have accepted the connection.
- `blocked`: users the user has blocked. | 62598fc54c3428357761a59e |
class Identity: <NEW_LINE> <INDENT> def __call__(self, x): <NEW_LINE> <INDENT> g = np.maximum(0, x) <NEW_LINE> g = np.minimum(g, 1) <NEW_LINE> return g <NEW_LINE> <DEDENT> def d(self, g): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> def inv(self, g): <NEW_LINE> <INDENT> return g | Activation function to be used with MatrixModels.
Clip the evaluation output to interval [0, 1]. | 62598fc5ec188e330fdf8b76 |
class BoardfarmTestConfig(): <NEW_LINE> <INDENT> def __init__(self, name='results'): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.output_dir = os.path.join( os.path.abspath(os.path.join(os.getcwd(), name, '')), '') <NEW_LINE> self.EXTRA_TESTS = [] <NEW_LINE> self.BOARD_NAMES = [] <NEW_LINE> self.boardfarm_config_location = None <NEW_LINE> self.boardfarm_config = None <NEW_LINE> self.UBOOT = None <NEW_LINE> self.KERNEL = None <NEW_LINE> self.ROOTFS = None <NEW_LINE> self.NFSROOT = None <NEW_LINE> self.META_BUILD = None <NEW_LINE> self.WAN_PROTO = 'dhcp' <NEW_LINE> self.setup_device_networking = True <NEW_LINE> self.bootargs = None <NEW_LINE> self.golden = [] <NEW_LINE> self.golden_master_results = {} <NEW_LINE> self.features = [] <NEW_LINE> self.TEST_SUITE_NOSTRICT = False <NEW_LINE> self.regex_config = [] <NEW_LINE> self.retry = 0 <NEW_LINE> self.test_args_location = os.environ.get('BFT_ARGS', None) <NEW_LINE> self.test_args = None <NEW_LINE> self.err_injection_dict = {} | This class defines the location or values of high-level objects
used to run tests. Such as: url of the inventory server,
environment files, etc... | 62598fc5956e5f7376df57ef |
class BadMigrationCommand(MigrationCommand): <NEW_LINE> <INDENT> def run(self): raise RuntimeError("Something went wrong.") | Created for demonstrative purposes only (to test a failure). | 62598fc55fcc89381b2662be |
class ChannelAdmin(FhAdmin): <NEW_LINE> <INDENT> list_display = ('title', 'category', 'is_enabled', 'is_favorite', 'created_at') <NEW_LINE> list_display_links = ('title',) <NEW_LINE> list_filter = ('category', 'is_enabled', 'created_at') <NEW_LINE> fieldsets = ( ('General', { 'fields': ('title', 'category', 'code'), }), ('Settings', { 'classes': ('collapse',), 'fields': ['alternative_code', 'is_favorite', 'is_enabled', 'created_by'], }), ) <NEW_LINE> ordering = ('title',) <NEW_LINE> search_fields = ('id', 'title', 'category') <NEW_LINE> class Media: <NEW_LINE> <INDENT> css = { 'all': ('admin/css/tv.css',), } | Task admin class | 62598fc5a8370b77170f06bd |
class Router(object): <NEW_LINE> <INDENT> def get_db(self, model, **hints): <NEW_LINE> <INDENT> if hasattr(model, 'owner_name'): <NEW_LINE> <INDENT> return model.owner_name <NEW_LINE> <DEDENT> elif hints.get('instance') and hasattr(hints['instance'], 'owner_name'): <NEW_LINE> <INDENT> return hints['instance'].owner_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'default' <NEW_LINE> <DEDENT> <DEDENT> def db_for_read(self, model, **hints): <NEW_LINE> <INDENT> return self.get_db(model, **hints) <NEW_LINE> <DEDENT> def db_for_write(self, model, **hints): <NEW_LINE> <INDENT> return self.get_db(model, **hints) | A router to control all database operations on models | 62598fc55fdd1c0f98e5e275 |
class CustomRequiredModelForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> required = {} <NEW_LINE> disabled = {} <NEW_LINE> <DEDENT> def set_field_required(self, field, is_required): <NEW_LINE> <INDENT> self.fields[field].required = is_required <NEW_LINE> <DEDENT> def set_field_disabled(self, field, is_disabled): <NEW_LINE> <INDENT> self.fields[field].disabled = is_disabled <NEW_LINE> <DEDENT> def _check_meta(self, field): <NEW_LINE> <INDENT> req = {} <NEW_LINE> if hasattr(self, 'Meta'): <NEW_LINE> <INDENT> if hasattr(self.Meta, field): <NEW_LINE> <INDENT> req = getattr(self.Meta, field) <NEW_LINE> <DEDENT> <DEDENT> return req <NEW_LINE> <DEDENT> def get_fields_required(self): <NEW_LINE> <INDENT> return self._check_meta('required') <NEW_LINE> <DEDENT> def get_fields_disabled(self): <NEW_LINE> <INDENT> return self._check_meta('disabled') <NEW_LINE> <DEDENT> def set_fields_required(self, required_obj=None): <NEW_LINE> <INDENT> if required_obj is None: <NEW_LINE> <INDENT> required_obj = self.get_fields_required() <NEW_LINE> <DEDENT> for field in required_obj: <NEW_LINE> <INDENT> self.set_field_required(field, required_obj[field]) <NEW_LINE> <DEDENT> <DEDENT> def set_fields_disabled(self, disabled_obj=None): <NEW_LINE> <INDENT> if disabled_obj is None: <NEW_LINE> <INDENT> disabled_obj = self.get_fields_disabled() <NEW_LINE> <DEDENT> for field in disabled_obj: <NEW_LINE> <INDENT> self.set_field_disabled(field, disabled_obj[field]) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(forms.ModelForm, self).__init__(*args, **kwargs) <NEW_LINE> self.set_fields_required() <NEW_LINE> self.set_fields_disabled() | This class allows us to create form fields corresponding to models but with the
optional overriding of the required attribute of fields. Other implementations
would allow NULL values in the database tables due to the way Django handles
optional fields, i.e. Django requires null=True, blank=True Django models which
implies that optional values would be entered as NULL, even when NULL is
inappropriate.
Usage:
- Extend this class and provide a Meta class as an attribute of the class itself.
- The Meta.required field should specify what required attributes should change,
as a dictionary of <Field_Name : String, Required_Value : Boolean>. | 62598fc55fdd1c0f98e5e276 |
class FastqSolexaWriter(SequenceWriter): <NEW_LINE> <INDENT> def write_record(self, record): <NEW_LINE> <INDENT> assert self._header_written <NEW_LINE> assert not self._footer_written <NEW_LINE> self._record_written = True <NEW_LINE> if record.seq is None: <NEW_LINE> <INDENT> raise ValueError("No sequence for record %s" % record.id) <NEW_LINE> <DEDENT> seq_str = str(record.seq) <NEW_LINE> qualities_str = _get_solexa_quality_str(record) <NEW_LINE> if len(qualities_str) != len(seq_str): <NEW_LINE> <INDENT> raise ValueError( "Record %s has sequence length %i but %i quality scores" % (record.id, len(seq_str), len(qualities_str)) ) <NEW_LINE> <DEDENT> id = self.clean(record.id) <NEW_LINE> description = self.clean(record.description) <NEW_LINE> if description and description.split(None, 1)[0] == id: <NEW_LINE> <INDENT> title = description <NEW_LINE> <DEDENT> elif description: <NEW_LINE> <INDENT> title = "%s %s" % (id, description) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> title = id <NEW_LINE> <DEDENT> self.handle.write("@%s\n%s\n+\n%s\n" % (title, seq_str, qualities_str)) | Write old style Solexa/Illumina FASTQ format files (with Solexa qualities) (OBSOLETE).
This outputs FASTQ files like those from the early Solexa/Illumina
pipeline, using Solexa scores and an ASCII offset of 64. These are
NOT compatible with the standard Sanger style PHRED FASTQ files.
If your records contain a "solexa_quality" entry under letter_annotations,
this is used, otherwise any "phred_quality" entry will be used after
conversion using the solexa_quality_from_phred function. If neither style
of quality scores are present, an exception is raised.
Although you can use this class directly, you are strongly encouraged
to use the ``as_fastq_solexa`` function, or top-level ``Bio.SeqIO.write()``
function instead. For example, this code reads in a FASTQ file and re-saves
it as another FASTQ file:
>>> from Bio import SeqIO
>>> record_iterator = SeqIO.parse("Quality/solexa_example.fastq", "fastq-solexa")
>>> with open("Quality/temp.fastq", "w") as out_handle:
... SeqIO.write(record_iterator, out_handle, "fastq-solexa")
5
You might want to do this if the original file included extra line breaks,
which (while valid) may not be supported by all tools. The output file
from Biopython will have each sequence on a single line, and each quality
string on a single line (which is considered desirable for maximum
compatibility).
This code is also called if you use the .format("fastq-solexa") method of
a SeqRecord. For example,
>>> record = SeqIO.read("Quality/sanger_faked.fastq", "fastq-sanger")
>>> print(record.format("fastq-solexa"))
@Test PHRED qualities from 40 to 0 inclusive
ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTN
+
hgfedcba`_^]\[ZYXWVUTSRQPONMLKJHGFECB@>;;
<BLANKLINE>
Note that Solexa FASTQ files have an upper limit of Solexa quality 62, which is
encoded as ASCII 126, the tilde. If your quality scores must be truncated to fit,
a warning is issued.
P.S. Don't forget to delete the temp file if you don't need it anymore:
>>> import os
>>> os.remove("Quality/temp.fastq") | 62598fc5ff9c53063f51a92f |
class State(object): <NEW_LINE> <INDENT> def __init__(self, grid, mustShuffle=True): <NEW_LINE> <INDENT> self.grid = grid <NEW_LINE> self.next_component = 0 <NEW_LINE> self.components = {} <NEW_LINE> self.cells = {} <NEW_LINE> self.initialize() <NEW_LINE> self.configure() <NEW_LINE> self.queue = list(self.components.keys()) <NEW_LINE> if mustShuffle: <NEW_LINE> <INDENT> shuffle(self.queue) <NEW_LINE> <DEDENT> self.requeue = self.queue[:] <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> for cell in self.grid.each(): <NEW_LINE> <INDENT> self.components[cell] = self.next_component <NEW_LINE> self.cells[self.next_component] = [cell] <NEW_LINE> self.next_component += 1 <NEW_LINE> <DEDENT> <DEDENT> def configure(self): <NEW_LINE> <INDENT> for cell in self.grid.each(): <NEW_LINE> <INDENT> for nbr in cell.passages(): <NEW_LINE> <INDENT> self.recolor(cell, nbr) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def recolor(self, cell, nbr): <NEW_LINE> <INDENT> color1 = self.components[cell] <NEW_LINE> color2 = self.components[nbr] <NEW_LINE> if color1 > color2: <NEW_LINE> <INDENT> self.recolor(nbr, cell) <NEW_LINE> return <NEW_LINE> <DEDENT> assert color1 != color2, "recoloring error" <NEW_LINE> for u in self.cells[color2]: <NEW_LINE> <INDENT> self.cells[color1].append(u) <NEW_LINE> self.components[u] = color1 <NEW_LINE> <DEDENT> del self.cells[color2] <NEW_LINE> <DEDENT> def merge(self, cell, nbr): <NEW_LINE> <INDENT> color1 = self.components[cell] <NEW_LINE> color2 = self.components[nbr] <NEW_LINE> if color1 == color2: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.recolor(cell, nbr) <NEW_LINE> cell.makePassage(nbr) <NEW_LINE> return True <NEW_LINE> <DEDENT> def neighbors(self, cell): <NEW_LINE> <INDENT> players = [] <NEW_LINE> for nbr in cell.neighbors(): <NEW_LINE> <INDENT> color1 = self.components[cell] <NEW_LINE> color2 = self.components[nbr] <NEW_LINE> if color1 != color2: <NEW_LINE> <INDENT> players.append(nbr) <NEW_LINE> <DEDENT> <DEDENT> return players <NEW_LINE> <DEDENT> def play_one_round(self, cell): <NEW_LINE> <INDENT> players = self.neighbors(cell) <NEW_LINE> if players: <NEW_LINE> <INDENT> winner = choice(players) <NEW_LINE> if winner: <NEW_LINE> <INDENT> self.merge(cell, winner) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def replenish_if(self, prevcomponents): <NEW_LINE> <INDENT> curr = len(self.cells) <NEW_LINE> if curr > 1 and curr != prevcomponents: <NEW_LINE> <INDENT> self.queue = self.requeue[:] <NEW_LINE> <DEDENT> return curr | state matrix for the high-card-wins algorithm | 62598fc592d797404e388cd3 |
class UnicodeReader: <NEW_LINE> <INDENT> def __init__(self, f, delimiter=',', quotechar='\"', encoding="utf-8", **kwds): <NEW_LINE> <INDENT> f = UTF8Recoder(f, encoding) <NEW_LINE> self.reader = csv.reader(f, delimiter=delimiter, quotechar=quotechar, **kwds) <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> row = self.reader.next() <NEW_LINE> return [unicode(s, "utf-8") for s in row] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self | A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding. | 62598fc5a219f33f346c6aea |
class PointCollection(FeatureCollection): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(PointCollection,self).__init__(**kwargs) <NEW_LINE> <DEDENT> def filter_by_time(self, starting, ending): <NEW_LINE> <INDENT> if not isinstance(starting, datetime) or not isinstance(ending, datetime): <NEW_LINE> <INDENT> raise ValueError("Starting and ending must both be datetime objects") <NEW_LINE> <DEDENT> return (e for e in self.elements if starting <= e.time <= ending) <NEW_LINE> <DEDENT> def filter_by_variable(self, variable, elements=None): <NEW_LINE> <INDENT> if elements is None: <NEW_LINE> <INDENT> elements = self.elements <NEW_LINE> <DEDENT> return (m for m in elements for h in m.members if h.get("name","") == variable or h.get("standard","") == variable) <NEW_LINE> <DEDENT> def calculate_bounds(self): <NEW_LINE> <INDENT> stuff = [[x.time, x.location] for x in self._elements] <NEW_LINE> self.time_range = sorted([x[0] for x in stuff]) <NEW_LINE> points = [x[1] for x in stuff] <NEW_LINE> try: <NEW_LINE> <INDENT> filtered_stuff = self.__filter_depths(stuff) <NEW_LINE> self.depth_range = sorted([x[1].z for x in filtered_stuff]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.depth_range = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.bbox = MultiPoint(points).envelope <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.bbox = None <NEW_LINE> <DEDENT> self.size = len(self._elements) <NEW_LINE> <DEDENT> def __filter_depths(self, list_to_filter): <NEW_LINE> <INDENT> retval = list() <NEW_LINE> for item in list_to_filter: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> getz = item[1].z <NEW_LINE> retval.append(item) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return retval | A collection of Points | 62598fc5fff4ab517ebcdaca |
class IYLPublication(Interface): <NEW_LINE> <INDENT> pass | Marker interface that defines a Zope 3 browser layer.
| 62598fc5099cdd3c63675553 |
class Combinator(Enum): <NEW_LINE> <INDENT> DESCENDANT = 1 <NEW_LINE> CHILD = 2 <NEW_LINE> NEXT_SIBLING = 3 <NEW_LINE> SUBSEQUENT_SIBLING = 4 | Combinator types.
Members correspond to the following combinators:
- :attr:`DESCENDANT`: ``A B``;
- :attr:`CHILD`: ``A > B``;
- :attr:`NEXT_SIBLING`: ``A + B``;
- :attr:`SUBSEQUENT_SIBLING`: ``A ~ B``. | 62598fc5f548e778e596b881 |
class TimeLagsFeatureProducer(FeatureProducerOHLC): <NEW_LINE> <INDENT> def __init__(self, lags=5, feature='close', feature_label='tlag'): <NEW_LINE> <INDENT> FeatureProducerOHLC.__init__(self, feature_label) <NEW_LINE> self.lags = lags <NEW_LINE> self.feature = feature <NEW_LINE> <DEDENT> def produce(self, df): <NEW_LINE> <INDENT> result = df.copy() <NEW_LINE> for i in range(self.lags): <NEW_LINE> <INDENT> result['%s_%s_%d' % (self.feature, self.feature_label, i + 1)] = result[self.feature].shift(1 + i) <NEW_LINE> <DEDENT> return result | Produce time lags feature.
Time lag of feature X traces the previous value of X several time points ago.
e.g.
If a stock has close prices of [1, 2, 3, 4, 5, 6, 7], then the 3 time lag of it will be:
[NaN, NaN, NaN, 1, 2, 3, 4] | 62598fc54a966d76dd5ef1b9 |
class AddRankInput(graphene.InputObjectType, RankAttribute): <NEW_LINE> <INDENT> pass | Arguments to create Rank. | 62598fc560cbc95b06364621 |
class RosOdomSubscriber(object): <NEW_LINE> <INDENT> def __init__(self, node_name, channel_name, stream_type=Odometry, anonymous=True): <NEW_LINE> <INDENT> self.data = Point() <NEW_LINE> rospy.init_node(node_name, anonymous=anonymous) <NEW_LINE> self.sub = rospy.Subscriber(channel_name, stream_type, self.on_data_recv) <NEW_LINE> <DEDENT> def on_data_recv(self, data): <NEW_LINE> <INDENT> self.data = data.pose.pose.position <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> return {'x': self.data.x, 'y': self.data.y} | A ROS node to subscribe to a data stream | 62598fc5ec188e330fdf8b78 |
class ConnectionStateSnapshot(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'hops': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'connection_state': {'key': 'connectionState', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'evaluation_state': {'key': 'evaluationState', 'type': 'str'}, 'avg_latency_in_ms': {'key': 'avgLatencyInMs', 'type': 'int'}, 'min_latency_in_ms': {'key': 'minLatencyInMs', 'type': 'int'}, 'max_latency_in_ms': {'key': 'maxLatencyInMs', 'type': 'int'}, 'probes_sent': {'key': 'probesSent', 'type': 'int'}, 'probes_failed': {'key': 'probesFailed', 'type': 'int'}, 'hops': {'key': 'hops', 'type': '[ConnectivityHop]'}, } <NEW_LINE> def __init__( self, *, connection_state: Optional[Union[str, "ConnectionState"]] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, evaluation_state: Optional[Union[str, "EvaluationState"]] = None, avg_latency_in_ms: Optional[int] = None, min_latency_in_ms: Optional[int] = None, max_latency_in_ms: Optional[int] = None, probes_sent: Optional[int] = None, probes_failed: Optional[int] = None, **kwargs ): <NEW_LINE> <INDENT> super(ConnectionStateSnapshot, self).__init__(**kwargs) <NEW_LINE> self.connection_state = connection_state <NEW_LINE> self.start_time = start_time <NEW_LINE> self.end_time = end_time <NEW_LINE> self.evaluation_state = evaluation_state <NEW_LINE> self.avg_latency_in_ms = avg_latency_in_ms <NEW_LINE> self.min_latency_in_ms = min_latency_in_ms <NEW_LINE> self.max_latency_in_ms = max_latency_in_ms <NEW_LINE> self.probes_sent = probes_sent <NEW_LINE> self.probes_failed = probes_failed <NEW_LINE> self.hops = None | Connection state snapshot.
Variables are only populated by the server, and will be ignored when sending a request.
:param connection_state: The connection state. Possible values include: "Reachable",
"Unreachable", "Unknown".
:type connection_state: str or ~azure.mgmt.network.v2020_05_01.models.ConnectionState
:param start_time: The start time of the connection snapshot.
:type start_time: ~datetime.datetime
:param end_time: The end time of the connection snapshot.
:type end_time: ~datetime.datetime
:param evaluation_state: Connectivity analysis evaluation state. Possible values include:
"NotStarted", "InProgress", "Completed".
:type evaluation_state: str or ~azure.mgmt.network.v2020_05_01.models.EvaluationState
:param avg_latency_in_ms: Average latency in ms.
:type avg_latency_in_ms: int
:param min_latency_in_ms: Minimum latency in ms.
:type min_latency_in_ms: int
:param max_latency_in_ms: Maximum latency in ms.
:type max_latency_in_ms: int
:param probes_sent: The number of sent probes.
:type probes_sent: int
:param probes_failed: The number of failed probes.
:type probes_failed: int
:ivar hops: List of hops between the source and the destination.
:vartype hops: list[~azure.mgmt.network.v2020_05_01.models.ConnectivityHop] | 62598fc563b5f9789fe85458 |
class ECProvinceSelect(Select): <NEW_LINE> <INDENT> def __init__(self, attrs=None): <NEW_LINE> <INDENT> super().__init__(attrs, choices=PROVINCE_CHOICES) | A Select widget that uses a list of Ecuador provinces as its choices. | 62598fc50fa83653e46f51cb |
class SchemaRpcHandler(mano_dts.AbstractRpcHandler): <NEW_LINE> <INDENT> def __init__(self, log, dts, loop, proxy): <NEW_LINE> <INDENT> super().__init__(log, dts, loop) <NEW_LINE> self.proxy = proxy <NEW_LINE> <DEDENT> @property <NEW_LINE> def xpath(self): <NEW_LINE> <INDENT> return "/rw-pkg-mgmt:get-package-schema" <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def callback(self, ks_path, msg): <NEW_LINE> <INDENT> package_type = msg.package_type.lower() <NEW_LINE> schema = yield from self.proxy.schema(msg.package_type) <NEW_LINE> rpc_op = RPC_SCHEMA_ENDPOINT() <NEW_LINE> for dirname in schema: <NEW_LINE> <INDENT> rpc_op.schema.append(dirname) <NEW_LINE> <DEDENT> return rpc_op | RPC handler to generate the schema for the packages.
| 62598fc55fcc89381b2662bf |
class LEP_LeoTextEdit(QtWidgets.QTextEdit): <NEW_LINE> <INDENT> lep_type = "EDITOR" <NEW_LINE> lep_name = "Leo Text Edit" <NEW_LINE> def __init__(self, c=None, lep=None, *args, **kwargs): <NEW_LINE> <INDENT> super(LEP_LeoTextEdit, self).__init__(*args, **kwargs) <NEW_LINE> self.c = c <NEW_LINE> self.lep = lep <NEW_LINE> self.textChanged.connect(self.text_changed) <NEW_LINE> self.wrapper = qt_text.QTextEditWrapper(self, name='edit_pane', c=c) <NEW_LINE> self.wrapper.widget = self <NEW_LINE> self.highlighter = JEditColorizer(c, self, self.wrapper) <NEW_LINE> <DEDENT> def focusInEvent (self, event): <NEW_LINE> <INDENT> QtWidgets.QTextEdit.focusInEvent(self, event) <NEW_LINE> DBG("focusin()") <NEW_LINE> self.lep.edit_widget_focus() <NEW_LINE> <DEDENT> def focusOutEvent (self, event): <NEW_LINE> <INDENT> QtWidgets.QTextEdit.focusOutEvent(self, event) <NEW_LINE> DBG("focusout()") <NEW_LINE> <DEDENT> def new_text(self, text): <NEW_LINE> <INDENT> self.setPlainText(text) <NEW_LINE> <DEDENT> def text_changed(self): <NEW_LINE> <INDENT> if QtWidgets.QApplication.focusWidget() == self: <NEW_LINE> <INDENT> DBG("text changed, focused") <NEW_LINE> self.lep.text_changed(self.toPlainText()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> DBG("text changed, NOT focused") <NEW_LINE> <DEDENT> <DEDENT> def update_text(self, text): <NEW_LINE> <INDENT> DBG("update editor text") <NEW_LINE> self.setPlainText(text) | LEP_LeoTextEdit - Leo LeoEditorPane editor
| 62598fc5a8370b77170f06bf |
class SponsorDetail(APIView): <NEW_LINE> <INDENT> def get_object(self, pk): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return Sponsor.objects.get(pk=pk) <NEW_LINE> <DEDENT> except Sponsor.DoesNotExist: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> <DEDENT> def get(self, request, pk, format=None): <NEW_LINE> <INDENT> snippet = self.get_object(pk) <NEW_LINE> serializer = SponsorSerializer(snippet) <NEW_LINE> return Response(serializer.data) | Retrieve, update or delete a snippet instance. | 62598fc55fdd1c0f98e5e278 |
class ForwardVoucherExpenses(models.Model): <NEW_LINE> <INDENT> forward_voucher = models.ForeignKey(ForwardVoucher, on_delete=models.CASCADE,) <NEW_LINE> additional_service = models.ForeignKey(AdditionalService, on_delete=models.CASCADE, ) <NEW_LINE> price = models.DecimalField(max_digits=16, decimal_places=9) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name | Forward expenses / Дополнительные затраты | 62598fc57c178a314d78d784 |
class TestMultiLinesWithBlock: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def method(): <NEW_LINE> <INDENT> return RPCMethod(multiline_doc_with_block) <NEW_LINE> <DEDENT> def test_raw_doc(self): <NEW_LINE> <INDENT> method = self.method() <NEW_LINE> assert method.raw_docstring == ( "This method has *multi-lines* **documentation**.\n\nHere is a quote block:\n\n abcdef 123456" ) <NEW_LINE> assert self.method().html_doc == ( "<p>This method has *multi-lines* **documentation**.</p>" "<p>Here is a quote block:</p>" "<p> abcdef 123456</p>" ) <NEW_LINE> <DEDENT> def test_markdown_to_html(self, settings): <NEW_LINE> <INDENT> settings.MODERNRPC_DOC_FORMAT = "md" <NEW_LINE> assert self.method().html_doc == ( "<p>This method has <em>multi-lines</em> <strong>documentation</strong>.</p>\n" "<p>Here is a quote block:</p>\n" "<pre><code>abcdef 123456\n</code></pre>" ) <NEW_LINE> <DEDENT> def test_rst_to_html(self, settings): <NEW_LINE> <INDENT> settings.MODERNRPC_DOC_FORMAT = "rst" <NEW_LINE> assert self.method().html_doc == ( "<p>This method has <em>multi-lines</em> <strong>documentation</strong>.</p>\n" "<p>Here is a quote block:</p>\n" "<blockquote>\nabcdef 123456</blockquote>\n" ) | Standard multi-line docstring with an indented block | 62598fc55fc7496912d483ed |
@implements_specification('3.2.4', 'tosca-simple-1.0') <NEW_LINE> class List(list): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _create(context, presentation, entry_schema, constraints, value, aspect): <NEW_LINE> <INDENT> if not isinstance(value, list): <NEW_LINE> <INDENT> raise ValueError('"list" data type value is not a list: %s' % safe_repr(value)) <NEW_LINE> <DEDENT> entry_schema_type = entry_schema._get_type(context) <NEW_LINE> entry_schema_constraints = entry_schema.constraints <NEW_LINE> the_list = List() <NEW_LINE> for v in value: <NEW_LINE> <INDENT> v = coerce_value(context, presentation, entry_schema_type, None, entry_schema_constraints, v, aspect) <NEW_LINE> if v is not None: <NEW_LINE> <INDENT> the_list.append(v) <NEW_LINE> <DEDENT> <DEDENT> return the_list <NEW_LINE> <DEDENT> def as_raw(self): <NEW_LINE> <INDENT> return list(self) | The list type allows for specifying multiple values for a parameter of property. For example, if
an application allows for being configured to listen on multiple ports, a list of ports could be
configured using the list data type.
See the `TOSCA Simple Profile v1.0 cos01 specification <http://docs.oasis-open.org/tosca
/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html
#TYPE_TOSCA_LIST>`__ | 62598fc5be7bc26dc9251fce |
class OutlierSolver(Enum): <NEW_LINE> <INDENT> kSigma = "k-sigma" <NEW_LINE> TukeyTest = "tukey-test" <NEW_LINE> MAD = "mad" <NEW_LINE> SMA = "sma" <NEW_LINE> GMM = "gmm" <NEW_LINE> KMeans = "kmeans" <NEW_LINE> DBSCAN = "dbscan" <NEW_LINE> LOF = "lof" <NEW_LINE> IForest = "iforest" <NEW_LINE> PCA = "pca" <NEW_LINE> AutoEncoder = "auto-encoder" <NEW_LINE> def default_params(self): <NEW_LINE> <INDENT> from sparkdq.outliers.params.AutoEncoderParams import AutoEncoderParams <NEW_LINE> from sparkdq.outliers.params.DBSCANParams import DBSCANParams <NEW_LINE> from sparkdq.outliers.params.GMMParams import GMMParams <NEW_LINE> from sparkdq.outliers.params.IForestParams import IForestParams <NEW_LINE> from sparkdq.outliers.params.KMeansParams import KMeansParams <NEW_LINE> from sparkdq.outliers.params.KSigmaParams import KSigmaParams <NEW_LINE> from sparkdq.outliers.params.LOFParams import LOFParams <NEW_LINE> from sparkdq.outliers.params.MADParams import MADParams <NEW_LINE> from sparkdq.outliers.params.SMAParams import SMAParams <NEW_LINE> from sparkdq.outliers.params.PCAParams import PCAParams <NEW_LINE> from sparkdq.outliers.params.TukeyTestParams import TukeyTestParams <NEW_LINE> if self == OutlierSolver.kSigma: <NEW_LINE> <INDENT> return KSigmaParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.TukeyTest: <NEW_LINE> <INDENT> return TukeyTestParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.MAD: <NEW_LINE> <INDENT> return MADParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.MA: <NEW_LINE> <INDENT> return SMAParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.GMM: <NEW_LINE> <INDENT> return GMMParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.KMeans: <NEW_LINE> <INDENT> return KMeansParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.DBSCAN: <NEW_LINE> <INDENT> return DBSCANParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.LOF: <NEW_LINE> <INDENT> return LOFParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.IForest: <NEW_LINE> <INDENT> return IForestParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.PCA: <NEW_LINE> <INDENT> return PCAParams() <NEW_LINE> <DEDENT> elif self == OutlierSolver.AutoEncoder: <NEW_LINE> <INDENT> return AutoEncoderParams() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Unknown outlier model {}!".format(self)) | All the models supplied by our system.
1. K-Sigma rule is the general form of 3-sigma rule, which means values lie within a band around the mean in a
normal distribution with a width of k stand deviations.
2. Tukey test utilizes quantiles and IQR(interquartile range) which equals to the difference between 75th and 25th
percentiles(IQR=Q3-Q1), most values lie with in the range of [Q1-k*IQR, Q3+k*IQR]
3. MAD(median absolute deviation) utilizes MAD as the width of band centered on median, MAD is the median of all
absolute deviations from the data's median
4. SMA(simple moving average) calculates the average of a fixed-size window moving along the data, if the difference
between SMA and current data is too large, current data may be a outlier
5. GMM(Gaussian mixture model) based on clustering algorithm
6. KMeans based on clustering algorithm
7. LOF(local outlier factor) based on density
8. IsolationForest based on ensemble algorithm
9. PCA based on feature reduction
10.AutoEncoder based on data reconstruction | 62598fc5aad79263cf42eabb |
class Ping(web.View): <NEW_LINE> <INDENT> async def get(self) -> web.Response: <NEW_LINE> <INDENT> return web.Response(text='OK') | ping | 62598fc5ec188e330fdf8b7a |
class BaseMathMutation(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def mutation(cls, data): <NEW_LINE> <INDENT> raise NotImplementedError | Базовый класс для преобразования данных | 62598fc59f288636728189ef |
class GenericSPEC(NormalizedCut): <NEW_LINE> <INDENT> def _calc_spec_scores(self, degree, laplacian, normalised_features, normaliser): <NEW_LINE> <INDENT> normalised_cut = super()._calc_spec_scores( degree, laplacian, normalised_features, normaliser ) <NEW_LINE> trivial_eugenvector = normaliser.dot(np.ones([normalised_features.shape[0], 1])) <NEW_LINE> norm = 1 - normalised_features.transpose().dot(trivial_eugenvector).squeeze().transpose() <NEW_LINE> return normalised_cut / norm | Feature selection algorithm that represents samples as vertices of graph.
Weights of edges of this graph are equal to RBF-distances between points.
Algorithm uses Spectral Graph Theory to find features with the best separability.
To do this, algorithm finds the trivial eugenvector of the Laplacian of the graph and uses it to
normalise scores computed using :class:`NormalizedCut`. Such normalisation helps to improve
accuracy of feature selection according to the article SPEC family is based on.
Algorithm selects top k features according to this scores
Parameters
----------
k: int
Number of features to select | 62598fc5091ae35668704f0f |
class VoteOnThreadInputSet(InputSet): <NEW_LINE> <INDENT> def set_Forum(self, value): <NEW_LINE> <INDENT> super(VoteOnThreadInputSet, self)._set_input('Forum', value) <NEW_LINE> <DEDENT> def set_PublicKey(self, value): <NEW_LINE> <INDENT> super(VoteOnThreadInputSet, self)._set_input('PublicKey', value) <NEW_LINE> <DEDENT> def set_ResponseFormat(self, value): <NEW_LINE> <INDENT> super(VoteOnThreadInputSet, self)._set_input('ResponseFormat', value) <NEW_LINE> <DEDENT> def set_ThreadID(self, value): <NEW_LINE> <INDENT> super(VoteOnThreadInputSet, self)._set_input('ThreadID', value) <NEW_LINE> <DEDENT> def set_ThreadIdentifier(self, value): <NEW_LINE> <INDENT> super(VoteOnThreadInputSet, self)._set_input('ThreadIdentifier', value) <NEW_LINE> <DEDENT> def set_ThreadLink(self, value): <NEW_LINE> <INDENT> super(VoteOnThreadInputSet, self)._set_input('ThreadLink', value) <NEW_LINE> <DEDENT> def set_Vote(self, value): <NEW_LINE> <INDENT> super(VoteOnThreadInputSet, self)._set_input('Vote', value) | An InputSet with methods appropriate for specifying the inputs to the VoteOnThread
Choreo. The InputSet object is used to specify input parameters when executing this Choreo. | 62598fc5dc8b845886d538a2 |
class UniformProbDist(ProbDistI): <NEW_LINE> <INDENT> def __init__(self, samples): <NEW_LINE> <INDENT> if len(samples) == 0: <NEW_LINE> <INDENT> raise ValueError('A Uniform probability distribution must '+ 'have at least one sample.') <NEW_LINE> <DEDENT> self._sampleset = set(samples) <NEW_LINE> self._prob = 1.0/len(self._sampleset) <NEW_LINE> self._samples = list(self._sampleset) <NEW_LINE> <DEDENT> def prob(self, sample): <NEW_LINE> <INDENT> return (self._prob if sample in self._sampleset else 0) <NEW_LINE> <DEDENT> def max(self): <NEW_LINE> <INDENT> return self._samples[0] <NEW_LINE> <DEDENT> def samples(self): <NEW_LINE> <INDENT> return self._samples <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<UniformProbDist with %d samples>' % len(self._sampleset) | A probability distribution that assigns equal probability to each
sample in a given set; and a zero probability to all other
samples. | 62598fc55fcc89381b2662c0 |
class HashErrors(InstallationError): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> self.errors: List["HashError"] = [] <NEW_LINE> <DEDENT> def append(self, error: "HashError") -> None: <NEW_LINE> <INDENT> self.errors.append(error) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> lines = [] <NEW_LINE> self.errors.sort(key=lambda e: e.order) <NEW_LINE> for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): <NEW_LINE> <INDENT> lines.append(cls.head) <NEW_LINE> lines.extend(e.body() for e in errors_of_cls) <NEW_LINE> <DEDENT> if lines: <NEW_LINE> <INDENT> return "\n".join(lines) <NEW_LINE> <DEDENT> return "" <NEW_LINE> <DEDENT> def __bool__(self) -> bool: <NEW_LINE> <INDENT> return bool(self.errors) | Multiple HashError instances rolled into one for reporting | 62598fc5f548e778e596b884 |
class GUID(object): <NEW_LINE> <INDENT> def __init__(self, keys=None, use_C_lib=False): <NEW_LINE> <INDENT> if keys is None: <NEW_LINE> <INDENT> if use_C_lib: <NEW_LINE> <INDENT> self.privkey = None <NEW_LINE> self.signing_key = nacl.signing.SigningKey(self.privkey) <NEW_LINE> self.verify_key = verify_key = self.signing_key.verify_key <NEW_LINE> h = nacl.hash.sha512(verify_key.encode()) <NEW_LINE> self.guid = unhexlify(h[:40]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.generate() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.signing_key, self.verify_key, self.guid = keys <NEW_LINE> <DEDENT> <DEDENT> def generate(self): <NEW_LINE> <INDENT> valid_pow = False <NEW_LINE> while not valid_pow: <NEW_LINE> <INDENT> signing_key = nacl.signing.SigningKey.generate() <NEW_LINE> verify_key = signing_key.verify_key <NEW_LINE> h = nacl.hash.sha512(verify_key.encode()) <NEW_LINE> pow_hash = h[40:] <NEW_LINE> valid_pow = _testpow(pow_hash[:6]) <NEW_LINE> <DEDENT> self.signing_key = signing_key <NEW_LINE> self.verify_key = verify_key <NEW_LINE> self.guid = unhexlify(h[:40]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_privkey(cls, privkey): <NEW_LINE> <INDENT> signing_key = nacl.signing.SigningKey(privkey, encoder=nacl.encoding.HexEncoder) <NEW_LINE> verify_key = signing_key.verify_key <NEW_LINE> h = nacl.hash.sha512(verify_key.encode()) <NEW_LINE> pow_hash = h[40:] <NEW_LINE> if _testpow(pow_hash[:6]): <NEW_LINE> <INDENT> return GUID((signing_key, verify_key, unhexlify(h[:40]))) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "privkey: %s\npubkey: %s\nguid: %s" % ( self.signing_key.encode(encoder=nacl.encoding.HexEncoder), self.verify_key.encode(encoder=nacl.encoding.HexEncoder), hexlify(self.guid)) | Class for generating the guid. It can be generated using C code for a modest
speed boost but it is currently disabled to make it easier to compile the app. | 62598fc5e1aae11d1e7ce998 |
class Interactions(enum.Enum): <NEW_LINE> <INDENT> Up = 1 <NEW_LINE> Down = 2 <NEW_LINE> Delete = 3 <NEW_LINE> Edit = 4 <NEW_LINE> Add = 5 <NEW_LINE> Count = 6 | Enumeration of possible interactions. | 62598fc5851cf427c66b859c |
class PyramidROIAlign(): <NEW_LINE> <INDENT> def __init__(self, pool_shape, image_shape): <NEW_LINE> <INDENT> self.pool_shape = tuple(pool_shape) <NEW_LINE> self.image_shape = tuple(image_shape) <NEW_LINE> <DEDENT> def run(self, inputs): <NEW_LINE> <INDENT> boxes = inputs[0] <NEW_LINE> feature_maps = inputs[1:] <NEW_LINE> y1, x1, y2, x2 = tf.split(boxes, 4, axis=2) <NEW_LINE> h = y2 - y1 <NEW_LINE> w = x2 - x1 <NEW_LINE> image_area = tf.cast( self.image_shape[0] * self.image_shape[1], tf.float32) <NEW_LINE> roi_level = log2_graph(tf.sqrt(h * w) / (224.0 / tf.sqrt(image_area))) <NEW_LINE> roi_level = tf.minimum(5, tf.maximum(2, 4 + tf.cast(tf.round(roi_level), tf.int32) ) ) <NEW_LINE> roi_level = tf.squeeze(roi_level, 2) <NEW_LINE> pooled = [] <NEW_LINE> box_to_level = [] <NEW_LINE> for i, level in enumerate(range(2, 6)): <NEW_LINE> <INDENT> ix = tf.where(tf.equal(roi_level, level)) <NEW_LINE> level_boxes = tf.gather_nd(boxes, ix) <NEW_LINE> box_indices = tf.cast(ix[:, 0], tf.int32) <NEW_LINE> box_to_level.append(ix) <NEW_LINE> level_boxes = tf.stop_gradient(level_boxes) <NEW_LINE> box_indices = tf.stop_gradient(box_indices) <NEW_LINE> pooled.append(tf.image.crop_and_resize( feature_maps[i], level_boxes, box_indices, self.pool_shape, method="bilinear")) <NEW_LINE> <DEDENT> pooled = tf.concat(pooled, axis=0) <NEW_LINE> box_to_level = tf.concat(box_to_level, axis=0) <NEW_LINE> box_range = tf.expand_dims(tf.range(tf.shape(box_to_level)[0]), 1) <NEW_LINE> box_to_level = tf.concat([tf.cast(box_to_level, tf.int32), box_range], axis=1) <NEW_LINE> sorting_tensor = box_to_level[:, 0] * 10000 + box_to_level[:, 1] <NEW_LINE> ix = tf.nn.top_k(sorting_tensor, k=tf.shape( box_to_level)[0]).indices[::-1] <NEW_LINE> ix = tf.gather(box_to_level[:, 2], ix) <NEW_LINE> pooled = tf.gather(pooled, ix) <NEW_LINE> return pooled | Implements ROI Pooling on multiple levels of the feature pyramid.
Params:
- pool_shape: [height, width] of the output pooled regions. Usually [7, 7]
- image_shape: [height, width, channels]. Shape of input image in pixels
Inputs:
- boxes: [batch, num_boxes, (y1, x1, y2, x2)] in normalized
coordinates. Possibly padded with zeros if not enough
boxes to fill the array.
- Feature maps: List of feature maps from different levels of the pyramid.
Each is [batch, height, width, channels]
Output:
Pooled regions in the shape: [batch, num_boxes, height, width, channels].
The width and height are those specific in the pool_shape in the layer
constructor. | 62598fc5a05bb46b3848ab52 |
class GlobalMenuModifier(Modifier): <NEW_LINE> <INDENT> def modify(self, request, nodes, namespace, root_id, post_cut, breadcrumb): <NEW_LINE> <INDENT> if breadcrumb or not post_cut or not request.user.is_authenticated(): <NEW_LINE> <INDENT> return nodes <NEW_LINE> <DEDENT> trim_nodes = ['/login/', '/register/'] <NEW_LINE> return filter(lambda node: node.url not in trim_nodes, nodes) | If the user is logged in, remove certain menu options.
Django cms provides a way to hide an option from a non logged in user, but no
way to hide something from someone who is logged in. This menu modifier adds
this capability. | 62598fc57b180e01f3e491c3 |
class Bot99(player.Bot): <NEW_LINE> <INDENT> def ask(self, prompt): <NEW_LINE> <INDENT> possibles = self.get_possibles() <NEW_LINE> if possibles: <NEW_LINE> <INDENT> possibles.sort() <NEW_LINE> return '{1} {0}'.format(*possibles[-1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'pass' <NEW_LINE> <DEDENT> <DEDENT> def get_possibles(self): <NEW_LINE> <INDENT> total = self.game.total <NEW_LINE> hand = self.game.hands[self.name] <NEW_LINE> possibles = [] <NEW_LINE> for card in hand.cards: <NEW_LINE> <INDENT> for value in self.game.card_values[card.rank]: <NEW_LINE> <INDENT> if total + value < 100: <NEW_LINE> <INDENT> possibles.append((total + value, card)) <NEW_LINE> <DEDENT> elif value == 99: <NEW_LINE> <INDENT> possibles.append((99, card)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return possibles <NEW_LINE> <DEDENT> def tell(self, text = ''): <NEW_LINE> <INDENT> pass | A bot for Ninety-Nine. (player.Bot)
Methods:
get_possibles: Get the possible plays. (list of tuple)
Overridden Methods:
ask
tell | 62598fc5f9cc0f698b1c5445 |
class Cell(object): <NEW_LINE> <INDENT> all_possibilities = set(range(1, 10)) <NEW_LINE> def __init__(self, pos, solution=None): <NEW_LINE> <INDENT> assert len(pos) == 2 <NEW_LINE> self.pos = pos <NEW_LINE> self.possibilities = self.all_possibilities.copy() <NEW_LINE> if solution is not None: <NEW_LINE> <INDENT> assert solution in self.all_possibilities <NEW_LINE> self.keep_possibilities(set([solution])) <NEW_LINE> <DEDENT> <DEDENT> def remove_possibilities(self, rm_set): <NEW_LINE> <INDENT> if self.possibilities.isdisjoint(rm_set): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.possibilities -= rm_set <NEW_LINE> if len(self.possibilities) < 1: <NEW_LINE> <INDENT> raise ValueError("Removing %s from Cell %s makes it empty!" % (rm_set, self.pos)) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def keep_possibilities(self, kp_set): <NEW_LINE> <INDENT> if self.possibilities.isdisjoint(kp_set): <NEW_LINE> <INDENT> raise ValueError("Keeping only %s from Cell %s makes it empty!" % (kp_set, self.pos)) <NEW_LINE> <DEDENT> if self.possibilities.issubset(kp_set): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.possibilities.intersection_update(kp_set) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> def is_solved(self): <NEW_LINE> <INDENT> return len(self.possibilities) == 1 <NEW_LINE> <DEDENT> def solution(self): <NEW_LINE> <INDENT> if self.is_solved(): <NEW_LINE> <INDENT> return tuple(self.possibilities)[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> sol = self.solution() <NEW_LINE> if sol is None: <NEW_LINE> <INDENT> return "." <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return str(sol) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> sol = self.solution() <NEW_LINE> if sol is None: <NEW_LINE> <INDENT> return 'Cell((%d,%d))' % self.pos <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'Cell((%d,%d), %s)' % (self.pos + (str(sol),)) | represents a Sudoku cell | 62598fc560cbc95b06364625 |
class BashDriver(AbstractMqWorker): <NEW_LINE> <INDENT> def __init__(self, process_name): <NEW_LINE> <INDENT> super(BashDriver, self).__init__(process_name) <NEW_LINE> self.is_alive = False <NEW_LINE> self.initial_thread_count = threading.active_count() <NEW_LINE> <DEDENT> def _mq_callback(self, message): <NEW_LINE> <INDENT> while threading.active_count() > settings['bash_runnable_count'] + self.initial_thread_count: <NEW_LINE> <INDENT> time.sleep(0.01) <NEW_LINE> <DEDENT> t = BashRunnable(self.logger, message, self.consumer, self.performance_ticker) <NEW_LINE> t.daemon = True <NEW_LINE> t.start() | Process facilitates threads running local or remote bash scripts | 62598fc5ec188e330fdf8b7c |
class RemoteDigiPointDevice(RemoteXBeeDevice): <NEW_LINE> <INDENT> def __init__(self, local_xbee_device, x64bit_addr=None, node_id=None): <NEW_LINE> <INDENT> if local_xbee_device.get_protocol() != XBeeProtocol.DIGI_POINT: <NEW_LINE> <INDENT> raise XBeeException("Invalid protocol.") <NEW_LINE> <DEDENT> super().__init__(local_xbee_device, x64bit_addr, None, node_id) <NEW_LINE> <DEDENT> def get_protocol(self): <NEW_LINE> <INDENT> return XBeeProtocol.DIGI_POINT | This class represents a remote DigiPoint XBee device. | 62598fc5a219f33f346c6af0 |
class GammaFramework(equations_data.GammaData, EquationFramework): <NEW_LINE> <INDENT> pass | This class exists to add framework methods to GammaData | 62598fc57b180e01f3e491c4 |
class StaticClass(_StaticClass): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _cascade_method(cls, method, *args, **kwargs): <NEW_LINE> <INDENT> bases=[cls] <NEW_LINE> while bases: <NEW_LINE> <INDENT> cls2=bases.pop(0) <NEW_LINE> bases.extend(cls2.__bases__) <NEW_LINE> if hasattr(cls2, method): <NEW_LINE> <INDENT> r=getattr(cls2, method)(*args,**kwargs) <NEW_LINE> if r!=None: <NEW_LINE> <INDENT> return r <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return None | Static class: a class not intended to have instances.
Methods are automatically bound as class methods.
_StaticClassMetaclass is the metaclass, and is not
accessible directly, whereas StaticClass is an instance
of that metaclass (and therefore a class) and should be
inherited from. | 62598fc5d486a94d0ba2c2ba |
class ColumnMismatchInPatchError(PatchError): <NEW_LINE> <INDENT> pass | Thrown when creating a patch with a list of dictionaries where the dictionary keys
don't match with the column names provided
For example, this code will throw this error because the "col1" column is being specified
in the row data of a patch but not the columns:
.. code-block:: python
pgmock.patch(pgmock.table('table'), rows=[{'col1': 'value'}], cols=['col2']) | 62598fc5f9cc0f698b1c5446 |
class ComplexJsonEncoder(JSONEncoder): <NEW_LINE> <INDENT> def default(self, o): <NEW_LINE> <INDENT> if hasattr(o, 'toJson'): <NEW_LINE> <INDENT> return o.toJson() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return JSONEncoder.default(self, o) | Basic JSON encoder for 'complex (nested)' Python objects. | 62598fc53617ad0b5ee06430 |
class RPMDistributorTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_update_checksum_type(self): <NEW_LINE> <INDENT> cfg = config.get_config() <NEW_LINE> if cfg.pulp_version < version.Version('2.9'): <NEW_LINE> <INDENT> raise unittest.SkipTest('This test requires Pulp 2.9 or above.') <NEW_LINE> <DEDENT> client = api.Client(cfg, api.json_handler) <NEW_LINE> distributor = gen_distributor() <NEW_LINE> body = gen_repo() <NEW_LINE> body['distributors'] = [distributor] <NEW_LINE> repo = client.post(REPOSITORY_PATH, body) <NEW_LINE> self.addCleanup(client.delete, repo['_href']) <NEW_LINE> for checksum_type in (None, 'sha256', None): <NEW_LINE> <INDENT> client.put(repo['_href'], { 'distributor_configs': { distributor['distributor_id']: { 'checksum_type': checksum_type, } } }) <NEW_LINE> repo = client.get(repo['_href'], params={'details': True}) <NEW_LINE> self.assertEqual( repo['distributors'][0]['config'].get('checksum_type'), checksum_type ) | RPM distributor tests. | 62598fc55fc7496912d483ef |
class ObserverAttr(object): <NEW_LINE> <INDENT> def __init__(self, name, *args, **kwargs): <NEW_LINE> <INDENT> self.name, self.args, self.kwargs = name, args, kwargs <NEW_LINE> <DEDENT> def __get__(self, owner, ownertype): <NEW_LINE> <INDENT> if not owner: return self <NEW_LINE> _debug("got request for observer", self.name, "args =", str(self.args), "kwargs =", str(self.kwargs)) <NEW_LINE> if self.name not in list(owner.__dict__.keys()): <NEW_LINE> <INDENT> owner.__dict__[self.name] = Observer(*self.args, **self.kwargs) <NEW_LINE> <DEDENT> return owner.__dict__[self.name] | Wrapper for Observers within Models. Will auto-vivify an Observer
within a Model instance the first time it's called. | 62598fc54a966d76dd5ef1bf |
class Event(object): <NEW_LINE> <INDENT> def __init__(self, message, refs, started_at=None): <NEW_LINE> <INDENT> self._message = message <NEW_LINE> self._refs = refs <NEW_LINE> self.started_at = started_at if started_at else time.time() <NEW_LINE> self.id = None <NEW_LINE> self.update_duration_event() <NEW_LINE> self._time_remaining_str = "(time remaining: N/A)" <NEW_LINE> <DEDENT> def _refresh(self): <NEW_LINE> <INDENT> global _module <NEW_LINE> _module.log.debug('refreshing mgr for %s (%s) at %f' % (self.id, self._message, self.progress)) <NEW_LINE> self.update_duration_event() <NEW_LINE> self.update_time_remaining() <NEW_LINE> _module.update_progress_event( self.id, self.twoline_progress(6), self.progress) <NEW_LINE> <DEDENT> @property <NEW_LINE> def message(self): <NEW_LINE> <INDENT> return self._message <NEW_LINE> <DEDENT> @property <NEW_LINE> def refs(self): <NEW_LINE> <INDENT> return self._refs <NEW_LINE> <DEDENT> @property <NEW_LINE> def progress(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @property <NEW_LINE> def duration_str(self): <NEW_LINE> <INDENT> return self._duration_str <NEW_LINE> <DEDENT> @property <NEW_LINE> def failed(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def failure_message(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def summary(self): <NEW_LINE> <INDENT> return "{0} {1} {2}".format(self.progress, self.message, self.duration_str) <NEW_LINE> <DEDENT> def _progress_str(self, width): <NEW_LINE> <INDENT> inner_width = width - 2 <NEW_LINE> out = "[" <NEW_LINE> done_chars = int(self.progress * inner_width) <NEW_LINE> out += done_chars * '=' <NEW_LINE> out += (inner_width - done_chars) * '.' <NEW_LINE> out += "]" <NEW_LINE> return out <NEW_LINE> <DEDENT> def twoline_progress(self, indent=4): <NEW_LINE> <INDENT> return "{0} {1}\n{2}{3} {4}".format(self._message, self._duration_str, " " * indent, self._progress_str(30), self._time_remaining_str) <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return { "id": self.id, "message": self.message, "duration": self.duration_str, "refs": self._refs, "progress": self.progress, "started_at": self.started_at, "time_remaining": self.estimated_time_remaining() } <NEW_LINE> <DEDENT> def update_duration_event(self): <NEW_LINE> <INDENT> duration = time.time() - self.started_at <NEW_LINE> self._duration_str = time.strftime("(since %Hh %Mm %Ss)", time.gmtime(duration)) <NEW_LINE> <DEDENT> def estimated_time_remaining(self): <NEW_LINE> <INDENT> elapsed = time.time() - self.started_at <NEW_LINE> progress = self.progress <NEW_LINE> if progress == 0.0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return int(elapsed * (1 - progress) / progress) <NEW_LINE> <DEDENT> def update_time_remaining(self): <NEW_LINE> <INDENT> time_remaining = self.estimated_time_remaining() <NEW_LINE> if time_remaining: <NEW_LINE> <INDENT> self._time_remaining_str = time.strftime( "(time remaining: %Hh %Mm %Ss)", time.gmtime(time_remaining)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._time_remaining_str = "(time remaining: N/A)" | A generic "event" that has a start time, completion percentage,
and a list of "refs" that are (type, id) tuples describing which
objects (osds, pools) this relates to. | 62598fc57047854f4633f6bc |
class MyMemoryTranslation(MachineTranslation): <NEW_LINE> <INDENT> name = 'MyMemory' <NEW_LINE> def convert_language(self, language): <NEW_LINE> <INDENT> return language.replace('_', '-').lower() <NEW_LINE> <DEDENT> def is_supported(self, source, language): <NEW_LINE> <INDENT> return self.lang_supported(source) and self.lang_supported(language) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def lang_supported(language): <NEW_LINE> <INDENT> if language in ('ia', 'tt', 'ug'): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return '@' not in language and len(language) == 2 <NEW_LINE> <DEDENT> def format_match(self, match): <NEW_LINE> <INDENT> if isinstance(match['quality'], int): <NEW_LINE> <INDENT> quality = match['quality'] <NEW_LINE> <DEDENT> elif match['quality'] is not None and match['quality'].isdigit(): <NEW_LINE> <INDENT> quality = int(match['quality']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> quality = 0 <NEW_LINE> <DEDENT> if match['last-updated-by'] != '': <NEW_LINE> <INDENT> source = '%s (%s)' % ( self.name, match['last-updated-by'] ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> source = self.name <NEW_LINE> <DEDENT> return ( match['translation'], quality * match['match'], source, match['segment'], ) <NEW_LINE> <DEDENT> def download_translations(self, source, language, text, unit, user): <NEW_LINE> <INDENT> args = { 'q': text.split('. ')[0][:500], 'langpair': '%s|%s' % (source, language), } <NEW_LINE> if appsettings.MT_MYMEMORY_EMAIL is not None: <NEW_LINE> <INDENT> args['de'] = appsettings.MT_MYMEMORY_EMAIL <NEW_LINE> <DEDENT> if appsettings.MT_MYMEMORY_USER is not None: <NEW_LINE> <INDENT> args['user'] = appsettings.MT_MYMEMORY_USER <NEW_LINE> <DEDENT> if appsettings.MT_MYMEMORY_KEY is not None: <NEW_LINE> <INDENT> args['key'] = appsettings.MT_MYMEMORY_KEY <NEW_LINE> <DEDENT> response = self.json_status_req( 'http://mymemory.translated.net/api/get', **args ) <NEW_LINE> return [self.format_match(match) for match in response['matches']] | MyMemory machine translation support. | 62598fc5be7bc26dc9251fd0 |
class FamilyPictures(models.Model): <NEW_LINE> <INDENT> from product.models import Product <NEW_LINE> name = models.CharField(_("Nome da imagem"), max_length=255, blank=False) <NEW_LINE> image = models.ImageField(_("Imagem"), upload_to="family/pictures/%y/%m", blank=False) <NEW_LINE> product = models.ForeignKey(Product, verbose_name=_("Produto"), blank=False, related_name="product_images") <NEW_LINE> uploaded_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Imagem da família") <NEW_LINE> verbose_name_plural = _("Imagens das famílias") <NEW_LINE> ordering = ['-uploaded_at'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | 4.2
Familia: item.10
linha: familia.fotos | 62598fc55fdd1c0f98e5e27d |
class SelfAttention(nn.Cell): <NEW_LINE> <INDENT> def __init__(self, batch_size, hidden_size, num_attention_heads=16, attention_probs_dropout_prob=0.1, use_one_hot_embeddings=False, initializer_range=0.02, hidden_dropout_prob=0.1, has_attention_mask=True, is_encdec_att=False, compute_type=mstype.float32): <NEW_LINE> <INDENT> super(SelfAttention, self).__init__() <NEW_LINE> if hidden_size % num_attention_heads != 0: <NEW_LINE> <INDENT> raise ValueError("The hidden size (%d) is not a multiple of the number " "of attention heads (%d)" % (hidden_size, num_attention_heads)) <NEW_LINE> <DEDENT> self.size_per_head = int(hidden_size / num_attention_heads) <NEW_LINE> self.is_encdec_att = is_encdec_att <NEW_LINE> self.attention = MultiheadAttention( batch_size=batch_size, from_tensor_width=hidden_size, to_tensor_width=hidden_size, out_tensor_width=hidden_size, num_attention_heads=num_attention_heads, size_per_head=self.size_per_head, attention_probs_dropout_prob=attention_probs_dropout_prob, use_one_hot_embeddings=use_one_hot_embeddings, initializer_range=initializer_range, has_attention_mask=has_attention_mask, do_return_2d_tensor=True, compute_type=compute_type) <NEW_LINE> self.preprocess = LayerPreprocess(in_channels=hidden_size) <NEW_LINE> self.postprocess = LayerPostprocess(dropout_prob=hidden_dropout_prob) <NEW_LINE> self.reshape = P.Reshape() <NEW_LINE> self.shape = (-1, hidden_size) <NEW_LINE> <DEDENT> def construct(self, input_tensor, memory_tensor, attention_mask, seq_length, enc_seq_length): <NEW_LINE> <INDENT> input_tensor = self.reshape(input_tensor, self.shape) <NEW_LINE> memory_tensor = self.reshape(memory_tensor, self.shape) <NEW_LINE> output = self.preprocess(input_tensor) <NEW_LINE> if not self.is_encdec_att: <NEW_LINE> <INDENT> memory_tensor = output <NEW_LINE> <DEDENT> attention_output = self.attention(output, memory_tensor, seq_length, enc_seq_length, attention_mask) <NEW_LINE> output = self.postprocess(attention_output, input_tensor) <NEW_LINE> return output | Apply self-attention.
Args:
batch_size (int): Batch size of input dataset.
from_seq_length (int): Length of query sequence.
to_seq_length (int): Length of memory sequence.
hidden_size (int): Size of attention layers.
num_attention_heads (int): Number of attention heads. Default: 16.
attention_probs_dropout_prob (float): The dropout probability for
SelfAttention. Default: 0.1.
use_one_hot_embeddings (bool): Specifies whether to use one_hot encoding form. Default: False.
initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02.
hidden_dropout_prob (float): The dropout probability for hidden outputs. Default: 0.1.
has_attention_mask (bool): Specifies whether has attention mask. Default: True.
is_encdec_att (bool): Specifies whether query sequence and memory sequence are different. Default: False.
compute_type (:class:`mindspore.dtype`): Compute type in MultiheadAttention. Default: mstype.float32. | 62598fc55166f23b2e2436cc |
class IAM(object): <NEW_LINE> <INDENT> def __init__(self, stack_name): <NEW_LINE> <INDENT> self.client = boto3.client('iam', region_name="us-east-1") <NEW_LINE> self.role_name = '{}-lambda-execution-role'.format(stack_name) <NEW_LINE> self.policy_name = '{}-api-lambda-permissions'.format(stack_name) <NEW_LINE> <DEDENT> def create_role(self): <NEW_LINE> <INDENT> with open(os.path.join(resource_filename("manage", "configs"), "attach_policy.json"), 'rt') as policy: <NEW_LINE> <INDENT> attach_policy = json.load(policy) <NEW_LINE> <DEDENT> with open(os.path.join(resource_filename("manage", "configs"), "assume_policy.json"), 'rt') as policy: <NEW_LINE> <INDENT> assume_policy = json.load(policy) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> response = self.client.create_role(RoleName=self.role_name, AssumeRolePolicyDocument=json.dumps(assume_policy, sort_keys=True, indent=2)) <NEW_LINE> <DEDENT> except botocore.exceptions.ClientError as e: <NEW_LINE> <INDENT> print(" Role exists. Deleting and re-creating") <NEW_LINE> self.delete_role() <NEW_LINE> response = self.client.create_role(RoleName=self.role_name, AssumeRolePolicyDocument=json.dumps(assume_policy, sort_keys=True, indent=2)) <NEW_LINE> <DEDENT> if response["ResponseMetadata"]["HTTPStatusCode"] != 200: <NEW_LINE> <INDENT> raise Exception("Failed to create Role.") <NEW_LINE> <DEDENT> response = self.client.put_role_policy(RoleName=self.role_name, PolicyName=self.policy_name, PolicyDocument=json.dumps(attach_policy, sort_keys=True, indent=2)) <NEW_LINE> if response["ResponseMetadata"]["HTTPStatusCode"] != 200: <NEW_LINE> <INDENT> raise Exception("Failed to attached policy to bucket.") <NEW_LINE> <DEDENT> <DEDENT> def delete_role(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> iam = boto3.resource('iam') <NEW_LINE> role_policy = iam.RolePolicy(self.role_name, self.policy_name) <NEW_LINE> role_policy.delete() <NEW_LINE> <DEDENT> except botocore.exceptions.ClientError as e: <NEW_LINE> <INDENT> print(" No Role found. Skipping") <NEW_LINE> return <NEW_LINE> <DEDENT> response = self.client.delete_role(RoleName=self.role_name) <NEW_LINE> if response["ResponseMetadata"]["HTTPStatusCode"] != 200: <NEW_LINE> <INDENT> raise Exception("Failed to delete policy.") | Class to manage IAM roles for a stack | 62598fc54527f215b58ea1ba |
class OpenSCADWorkbench ( Workbench ): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__class__.Icon = FreeCAD.getResourceDir() + "Mod/OpenSCAD/Resources/icons/OpenSCADWorkbench.svg" <NEW_LINE> self.__class__.MenuText = "OpenSCAD" <NEW_LINE> self.__class__.ToolTip = ( "OpenSCAD is an application for creating solid 3D CAD.\n" "FreeCAD utizes OpenSCAD's capability as a script-only based modeller that uses its own description language\n" "Note: the Mesh workbench heavily uses the boolean operations of this workbench because they are quite robust" ) <NEW_LINE> <DEDENT> def Initialize(self): <NEW_LINE> <INDENT> def QT_TRANSLATE_NOOP(scope, text): <NEW_LINE> <INDENT> return text <NEW_LINE> <DEDENT> import OpenSCAD_rc,OpenSCADCommands <NEW_LINE> commands = ['OpenSCAD_ReplaceObject','OpenSCAD_RemoveSubtree', 'OpenSCAD_RefineShapeFeature','OpenSCAD_MirrorMeshFeature', 'OpenSCAD_ScaleMeshFeature','OpenSCAD_ResizeMeshFeature','OpenSCAD_IncreaseToleranceFeature', 'OpenSCAD_Edgestofaces', 'OpenSCAD_ExpandPlacements','OpenSCAD_ExplodeGroup'] <NEW_LINE> toolbarcommands = ['OpenSCAD_ReplaceObject','OpenSCAD_RemoveSubtree', 'OpenSCAD_ExplodeGroup','OpenSCAD_RefineShapeFeature', 'OpenSCAD_IncreaseToleranceFeature'] <NEW_LINE> import PartGui <NEW_LINE> parttoolbarcommands = ['Part_CheckGeometry','Part_Primitives', 'Part_Builder','Part_Cut','Part_Fuse','Part_Common', 'Part_Extrude','Part_Revolve'] <NEW_LINE> import FreeCAD <NEW_LINE> param = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/OpenSCAD") <NEW_LINE> openscadfilename = param.GetString('openscadexecutable') <NEW_LINE> if not openscadfilename: <NEW_LINE> <INDENT> import OpenSCADUtils <NEW_LINE> openscadfilename = OpenSCADUtils.searchforopenscadexe() <NEW_LINE> if openscadfilename: <NEW_LINE> <INDENT> FreeCAD.addImportType("OpenSCAD Format (*.scad)","importCSG") <NEW_LINE> param.SetString('openscadexecutable',openscadfilename) <NEW_LINE> <DEDENT> <DEDENT> if openscadfilename: <NEW_LINE> <INDENT> commands.extend(['OpenSCAD_AddOpenSCADElement', 'OpenSCAD_MeshBoolean', 'OpenSCAD_Hull','OpenSCAD_Minkowski']) <NEW_LINE> toolbarcommands.extend(['OpenSCAD_AddOpenSCADElement', 'OpenSCAD_MeshBoolean', 'OpenSCAD_Hull','OpenSCAD_Minkowski']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> FreeCAD.Console.PrintWarning('OpenSCAD executable not found\n') <NEW_LINE> <DEDENT> self.appendToolbar(QT_TRANSLATE_NOOP('Workbench','OpenSCADTools'),toolbarcommands) <NEW_LINE> self.appendMenu('OpenSCAD',commands) <NEW_LINE> self.appendToolbar(QT_TRANSLATE_NOOP('Workbech','OpenSCAD Part tools'),parttoolbarcommands) <NEW_LINE> FreeCADGui.addIconPath(":/icons") <NEW_LINE> FreeCADGui.addLanguagePath(":/translations") <NEW_LINE> FreeCADGui.addPreferencePage(":/ui/openscadprefs-base.ui","OpenSCAD") <NEW_LINE> <DEDENT> def GetClassName(self): <NEW_LINE> <INDENT> return "Gui::PythonWorkbench" | OpenSCAD workbench object | 62598fc571ff763f4b5e7a69 |
class Solution: <NEW_LINE> <INDENT> def isSameTree(self, p: TreeNode, q: TreeNode) -> bool: <NEW_LINE> <INDENT> return self.dfs(p, q) <NEW_LINE> <DEDENT> def dfs(self, node1, node2): <NEW_LINE> <INDENT> if not node1 and not node2: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if not node1 or not node2: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return (node1.val == node2.val) and self.dfs(node1.left, node2.left) and self.dfs(node1.right, node2.right) | 方法1:dfs递归 | 62598fc55fcc89381b2662c2 |
class ConstantDeclaration(FieldDeclaration): <NEW_LINE> <INDENT> attrs = () | only in interface | 62598fc57c178a314d78d78a |
class PublicSubmission(Submission): <NEW_LINE> <INDENT> __tablename__ = 'submission_public' <NEW_LINE> id = util.pk() <NEW_LINE> enumerator_user_id = sa.Column(pg.UUID, util.fk('auth_user.id')) <NEW_LINE> enumerator = relationship('User') <NEW_LINE> survey_type = sa.Column(survey_type_enum, nullable=False) <NEW_LINE> __table_args__ = ( sa.ForeignKeyConstraint( ['id', 'survey_type'], ['submission.id', 'submission.survey_type'], onupdate='CASCADE', ondelete='CASCADE' ), sa.CheckConstraint("survey_type::TEXT = 'public'"), ) <NEW_LINE> __mapper_args__ = {'polymorphic_identity': 'public_submission'} <NEW_LINE> def _asdict(self): <NEW_LINE> <INDENT> result = super()._default_asdict() <NEW_LINE> if self.enumerator_user_id is not None: <NEW_LINE> <INDENT> result['enumerator_user_id'] = self.enumerator_user_id <NEW_LINE> result['enumerator_user_name'] = self.enumerator.name <NEW_LINE> <DEDENT> return result | A PublicSubmission might have an enumerator.
Use a PublicSubmission for a Survey. | 62598fc57d847024c075c6a8 |
class Cave(object): <NEW_LINE> <INDENT> number = None <NEW_LINE> def __init__(self, number): <NEW_LINE> <INDENT> self.number = number | A Cave.
A cave has a number::
>>> hasattr(Cave, 'number')
True | 62598fc53317a56b869be6c6 |
class DataplaneChecker(object): <NEW_LINE> <INDENT> def __init__(self, event_dag, slop_buffer=10): <NEW_LINE> <INDENT> self.events = list(event_dag.events) <NEW_LINE> self.stats = DataplaneCheckerStats(self.events) <NEW_LINE> self.current_dp_fingerprints = [] <NEW_LINE> self.fingerprint_2_event_idx = {} <NEW_LINE> self.slop_buffer = slop_buffer <NEW_LINE> <DEDENT> def decide_drop(self, dp_event): <NEW_LINE> <INDENT> dp_fingerprint = (DPFingerprint.from_pkt(dp_event.packet), dp_event.node.dpid, dp_event.port.port_no) <NEW_LINE> event_fingerprint = find(lambda f: f[1:] == dp_fingerprint, self.current_dp_fingerprints) <NEW_LINE> if event_fingerprint is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.current_dp_fingerprints.remove(event_fingerprint) <NEW_LINE> event_idx = self.fingerprint_2_event_idx[event_fingerprint] <NEW_LINE> self.events.pop(event_idx) <NEW_LINE> if event_fingerprint[0] == "DataplanePermit": <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.stats.record_drop(event_fingerprint) <NEW_LINE> return True <NEW_LINE> <DEDENT> def update_window(self, current_round): <NEW_LINE> <INDENT> self.current_dp_fingerprints = [] <NEW_LINE> self.fingerprint_2_event_idx = {} <NEW_LINE> head_idx = find_index(lambda e: e.round == current_round - self.slop_buffer, self.events) <NEW_LINE> head_idx = max(head_idx, 0) <NEW_LINE> tail_idx = find_index(lambda e: e.round == current_round + self.slop_buffer, self.events) <NEW_LINE> if tail_idx is None: <NEW_LINE> <INDENT> tail_idx = len(self.events) <NEW_LINE> <DEDENT> for i in xrange(head_idx, tail_idx): <NEW_LINE> <INDENT> if (type(self.events[i]) == DataplanePermit or type(self.events[i]) == DataplaneDrop): <NEW_LINE> <INDENT> fingerprint = self.events[i].fingerprint <NEW_LINE> self.current_dp_fingerprints.append(fingerprint) <NEW_LINE> self.fingerprint_2_event_idx[fingerprint] = i <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def check_dataplane(self, current_round, simulation): <NEW_LINE> <INDENT> self.update_window(current_round) <NEW_LINE> for dp_event in simulation.patch_panel.queued_dataplane_events: <NEW_LINE> <INDENT> if not simulation.topology.ok_to_send(dp_event): <NEW_LINE> <INDENT> log.warn("Not valid to send dp_event %s" % str(dp_event)) <NEW_LINE> simulation.patch_panel.drop_dp_event(dp_event) <NEW_LINE> <DEDENT> elif self.decide_drop(dp_event): <NEW_LINE> <INDENT> simulation.patch_panel.drop_dp_event(dp_event) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> simulation.patch_panel.permit_dp_event(dp_event) | Dataplane permits are the default, *unless* they were explicitly dropped in the
initial run. This class keeps track of whether pending dataplane events should
be dropped or forwarded during replay.
Note that whenever DataplaneChecker is in use, it should always be the case
that DataplanePermit and DataplaneDrop events are set to "passive" mode,
where their proceed() method always returns True. | 62598fc5d486a94d0ba2c2bc |
class Betais_silaServicer(object): <NEW_LINE> <INDENT> def device_identification(self, request, context): <NEW_LINE> <INDENT> context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) <NEW_LINE> <DEDENT> def supported_features(self, request, context): <NEW_LINE> <INDENT> context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) | Feature "is_sila"
Version: 3
In this version the description of features is simplified | 62598fc5656771135c48995a |
class PostManager(models.Manager): <NEW_LINE> <INDENT> def get_query_set(self): <NEW_LINE> <INDENT> log.debug('Creating query set for Posts...') <NEW_LINE> ret = super(PostManager, self).get_query_set() <NEW_LINE> now = datetime.datetime.now() <NEW_LINE> log.debug('Will filter out by date: %s' % now) <NEW_LINE> ret = ret.filter(pub_date__lte=now) <NEW_LINE> ret = ret.exclude(exp_date__lte=now) <NEW_LINE> return ret | Returns published posts via their date | 62598fc5d8ef3951e32c7fd1 |
class PyKeyboardMeta(object): <NEW_LINE> <INDENT> def press_key(self, character=''): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def release_key(self, character=''): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def tap_key(self, character='', n=1, interval=0): <NEW_LINE> <INDENT> for i in range(n): <NEW_LINE> <INDENT> self.press_key(character) <NEW_LINE> self.release_key(character) <NEW_LINE> time.sleep(interval) <NEW_LINE> <DEDENT> <DEDENT> def type_string(self, char_string, interval=0): <NEW_LINE> <INDENT> shift = False <NEW_LINE> for char in char_string: <NEW_LINE> <INDENT> if self.is_char_shifted(char): <NEW_LINE> <INDENT> if not shift: <NEW_LINE> <INDENT> time.sleep(interval) <NEW_LINE> self.press_key(self.shift_key) <NEW_LINE> shift = True <NEW_LINE> <DEDENT> if char in '<>?:"{}|~!@#$%^&*()_+': <NEW_LINE> <INDENT> ch_index = '<>?:"{}|~!@#$%^&*()_+'.index(char) <NEW_LINE> unshifted_char = ",./;'[]\\`1234567890-="[ch_index] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> unshifted_char = char.lower() <NEW_LINE> <DEDENT> time.sleep(interval) <NEW_LINE> self.tap_key(unshifted_char) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if shift and char != ' ': <NEW_LINE> <INDENT> self.release_key(self.shift_key) <NEW_LINE> shift = False <NEW_LINE> <DEDENT> time.sleep(interval) <NEW_LINE> self.tap_key(char) <NEW_LINE> <DEDENT> <DEDENT> if shift: <NEW_LINE> <INDENT> self.release_key(self.shift_key) <NEW_LINE> <DEDENT> <DEDENT> def special_key_assignment(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def lookup_character_value(self, character): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def is_char_shifted(self, character): <NEW_LINE> <INDENT> if character.isupper(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if character in '<>?:"{}|~!@#$%^&*()_+': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | The base class for PyKeyboard. Represents basic operational model. | 62598fc5aad79263cf42eac0 |
class AuthorizationError(Exception): <NEW_LINE> <INDENT> def __init__(self, annotations): <NEW_LINE> <INDENT> self.annotations = annotations <NEW_LINE> super(AuthorizationError, self).__init__(', '.join(annotations)) | Authorization error. | 62598fc53d592f4c4edbb19d |
class OrderGoods(BaseModel): <NEW_LINE> <INDENT> order = models.ForeignKey(OrderInfo, verbose_name='订单', on_delete=models.CASCADE) <NEW_LINE> count = models.IntegerField(default=1, verbose_name='商品数目') <NEW_LINE> price = models.DecimalField(max_digits=10, decimal_places=2, verbose_name='商品价格') <NEW_LINE> comment = models.CharField(max_length=256, default='', verbose_name='评论') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'demo_order_goods' <NEW_LINE> verbose_name = '订单商品' | 订单商品模型类 | 62598fc5aad79263cf42eac1 |
class MessageReaderWriter(object): <NEW_LINE> <INDENT> def __init__(self, socket_stream): <NEW_LINE> <INDENT> self._stream = socket_stream <NEW_LINE> self._msg = None <NEW_LINE> <DEDENT> def _read_message(self): <NEW_LINE> <INDENT> hdr = self._stream.read(5) <NEW_LINE> msg_len, msg_type = struct.unpack("<LB", hdr) <NEW_LINE> if msg_type == 10: <NEW_LINE> <INDENT> raise ProgrammingError("The connected server does not have the " "MySQL X protocol plugin enabled") <NEW_LINE> <DEDENT> payload = self._stream.read(msg_len - 1) <NEW_LINE> msg_type_name = SERVER_MESSAGES.get(msg_type) <NEW_LINE> if not msg_type_name: <NEW_LINE> <INDENT> raise ValueError("Unknown msg_type: {0}".format(msg_type)) <NEW_LINE> <DEDENT> msg = Message.from_server_message(msg_type, payload) <NEW_LINE> return msg <NEW_LINE> <DEDENT> def read_message(self): <NEW_LINE> <INDENT> if self._msg is not None: <NEW_LINE> <INDENT> msg = self._msg <NEW_LINE> self._msg = None <NEW_LINE> return msg <NEW_LINE> <DEDENT> return self._read_message() <NEW_LINE> <DEDENT> def push_message(self, msg): <NEW_LINE> <INDENT> if self._msg is not None: <NEW_LINE> <INDENT> raise OperationalError("Message push slot is full") <NEW_LINE> <DEDENT> self._msg = msg <NEW_LINE> <DEDENT> def write_message(self, msg_id, msg): <NEW_LINE> <INDENT> msg_str = encode_to_bytes(msg.serialize_to_string()) <NEW_LINE> header = struct.pack("<LB", len(msg_str) + 1, msg_id) <NEW_LINE> self._stream.sendall(b"".join([header, msg_str])) | Implements a Message Reader/Writer.
Args:
socket_stream (mysqlx.connection.SocketStream): `SocketStream` object. | 62598fc526068e7796d4cc48 |
class TurtlePT(object): <NEW_LINE> <INDENT> def __init__(self, x, y): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.angle = 0 <NEW_LINE> self._stack = [] <NEW_LINE> <DEDENT> def push(self): <NEW_LINE> <INDENT> self._stack.append( (self.x, self.y, self.angle) ) <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> self.x, self.y, self.angle = self._stack.pop() <NEW_LINE> <DEDENT> def turn_left(self, by): <NEW_LINE> <INDENT> self.angle = (self.angle - by) % 360 <NEW_LINE> <DEDENT> def turn_right(self, by): <NEW_LINE> <INDENT> self.angle = (self.angle + by) % 360 <NEW_LINE> <DEDENT> def forward(self, by): <NEW_LINE> <INDENT> c_angle = math.radians( float(self.angle) ) <NEW_LINE> self.x += by * math.cos(c_angle) <NEW_LINE> self.y += by * math.sin(c_angle) | Overview: Turtle graphics object that can be moved, turned left and right and moved forward. This is the basis of an LSystem drawing so this
object is provided for use with various drawing systems.
public parameters:
x: current x coordinate (dimensionless)
y: current y coordinate (dimensionless)
angle: current angle of rotation in degrees. | 62598fc5cc40096d6161a34e |
class Fabu(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'T_Fabu_Data' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> goodsid = db.Column(db.String(50)) <NEW_LINE> premium=db.Column(db.String(50)) <NEW_LINE> invalidTime=db.Column(db.String(50)) <NEW_LINE> issueTime=db.Column(db.String(50)) <NEW_LINE> cancelTime=db.Column(db.String(50)) <NEW_LINE> username = db.Column(db.String(50)) <NEW_LINE> stockCode = db.Column(db.String(50)) <NEW_LINE> factoryname = db.Column(db.String(50)) <NEW_LINE> weight = db.Column(db.String(50)) <NEW_LINE> material = db.Column(db.String(50)) <NEW_LINE> standard_thickness = db.Column(db.String(50)) <NEW_LINE> standard_width = db.Column(db.String(50)) <NEW_LINE> format = db.Column(db.String(50)) <NEW_LINE> width = db.Column(db.String(50)) <NEW_LINE> edge = db.Column(db.String(50)) <NEW_LINE> level = db.Column(db.String(50)) <NEW_LINE> thickness = db.Column(db.String(50)) <NEW_LINE> warehouse = db.Column(db.String(50)) <NEW_LINE> applyid=db.Column(db.String(50)) <NEW_LINE> stockid=db.column(db.String(50)) <NEW_LINE> contract=db.Column(db.String(50)) <NEW_LINE> status=db.Column(db.Integer) <NEW_LINE> def __init__(self, username, goodsid, stockCode, factoryname, weight, material, standard_thickness, standard_width, format, width, edge, level, thickness, warehouse, applyid,stockid,status,premium,invalidtime,contract): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.goodsid = goodsid <NEW_LINE> self.stockCode = stockCode <NEW_LINE> self.factoryname = factoryname <NEW_LINE> self.weight = weight <NEW_LINE> self.material = material <NEW_LINE> self.standard_thickness = standard_thickness <NEW_LINE> self.standard_width = standard_width <NEW_LINE> self.format = format <NEW_LINE> self.width = width <NEW_LINE> self.edge = edge <NEW_LINE> self.level = level <NEW_LINE> self.thickness = thickness <NEW_LINE> self.warehouse = warehouse <NEW_LINE> self.applyid=applyid <NEW_LINE> self.stockid=stockid <NEW_LINE> self.status=status <NEW_LINE> self.premium=premium <NEW_LINE> self.invalidTime=invalidtime <NEW_LINE> self.contract=contract | 发布模型 | 62598fc5a219f33f346c6af4 |
class CellTest(GspreadTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(CellTest, self).setUp() <NEW_LINE> title = self.config.get('Spreadsheet', 'title') <NEW_LINE> self.sheet = self.gc.open(title).sheet1 <NEW_LINE> <DEDENT> def test_properties(self): <NEW_LINE> <INDENT> update_value = gen_value() <NEW_LINE> self.sheet.update_acell('A1', update_value) <NEW_LINE> cell = self.sheet.acell('A1') <NEW_LINE> self.assertEqual(cell.value, update_value) <NEW_LINE> self.assertEqual(cell.row, 1) <NEW_LINE> self.assertEqual(cell.col, 1) <NEW_LINE> <DEDENT> def test_numeric_value(self): <NEW_LINE> <INDENT> numeric_value = 1.0 / 1024 <NEW_LINE> self.sheet.update_acell('A1', '= 1 / 1024') <NEW_LINE> cell = self.sheet.acell('A1') <NEW_LINE> self.assertEqual(cell.numeric_value, numeric_value) <NEW_LINE> self.assertTrue(isinstance(cell.numeric_value, float)) <NEW_LINE> self.sheet.update_acell('A1', 'Non-numeric value') <NEW_LINE> cell = self.sheet.acell('A1') <NEW_LINE> self.assertEqual(cell.numeric_value, None) | Test for gspread.Cell. | 62598fc57c178a314d78d78c |
class StdOutListener(streaming.StreamListener): <NEW_LINE> <INDENT> def on_data(self, data): <NEW_LINE> <INDENT> data = json.loads(data) <NEW_LINE> if 'retweeted_status' not in data and data['lang'] == 'en': <NEW_LINE> <INDENT> nouns = [word for (word, pos) in nltk.pos_tag(nltk.word_tokenize(data['text'])) if pos[0] == 'N'] <NEW_LINE> for i in range(len(nouns)): <NEW_LINE> <INDENT> nouns[i] = nouns[i].lower() <NEW_LINE> nouns[i] = re.sub('[^\x00-\x7F]', '', nouns[i]) <NEW_LINE> nouns[i] = nouns[i].replace('@', '') <NEW_LINE> nouns[i] = nouns[i].replace('http', '') <NEW_LINE> nouns[i] = nouns[i].replace('https', '') <NEW_LINE> nouns[i] = nouns[i].replace('s', '') <NEW_LINE> <DEDENT> nouns = list(filter(None, nouns)) <NEW_LINE> data = {'_id': str(data['id']), 'nouns': nouns} <NEW_LINE> db['tweets'].insert_one(data) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def on_error(self, status): <NEW_LINE> <INDENT> print(status) | A listener handles tweets that are received from the stream.
This is a basic listener that just prints received tweets to stdout. | 62598fc5f9cc0f698b1c5448 |
class DomainNetworkFeature(ManagedObject): <NEW_LINE> <INDENT> consts = DomainNetworkFeatureConsts() <NEW_LINE> naming_props = set([u'name']) <NEW_LINE> mo_meta = MoMeta("DomainNetworkFeature", "domainNetworkFeature", "network-feature-[name]", VersionMeta.Version112a, "InputOutput", 0x3f, [], ["admin"], [u'computeSystem', u'domainFeatureCatalog', u'extpolDomain'], [u'domainEnvironmentParam', u'domainNetworkParam', u'domainServerParam', u'domainStorageParam'], ["Get"]) <NEW_LINE> prop_meta = { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version112a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, 0x2, 0, 256, None, [], []), "flt_aggr": MoPropertyMeta("flt_aggr", "fltAggr", "ulong", VersionMeta.Version112a, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "functional_state": MoPropertyMeta("functional_state", "functionalState", "string", VersionMeta.Version112a, MoPropertyMeta.READ_WRITE, 0x4, None, None, None, ["disabled", "enabled"], []), "name": MoPropertyMeta("name", "name", "string", VersionMeta.Version112a, MoPropertyMeta.NAMING, 0x8, None, None, r"""[\-\.:_a-zA-Z0-9]{1,64}""", [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version112a, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "type": MoPropertyMeta("type", "type", "string", VersionMeta.Version112a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["major", "minor"], []), } <NEW_LINE> prop_map = { "childAction": "child_action", "dn": "dn", "fltAggr": "flt_aggr", "functionalState": "functional_state", "name": "name", "rn": "rn", "status": "status", "type": "type", } <NEW_LINE> def __init__(self, parent_mo_or_dn, name, **kwargs): <NEW_LINE> <INDENT> self._dirty_mask = 0 <NEW_LINE> self.name = name <NEW_LINE> self.child_action = None <NEW_LINE> self.flt_aggr = None <NEW_LINE> self.functional_state = None <NEW_LINE> self.status = None <NEW_LINE> self.type = None <NEW_LINE> ManagedObject.__init__(self, "DomainNetworkFeature", parent_mo_or_dn, **kwargs) | This is DomainNetworkFeature class. | 62598fc5ad47b63b2c5a7b45 |
class Management(base.ManagerWithFind): <NEW_LINE> <INDENT> resource_class = Instance <NEW_LINE> def list(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _list(self, url, response_key, limit=None, marker=None): <NEW_LINE> <INDENT> resp, body = self.api.client.get(limit_url(url, limit, marker)) <NEW_LINE> if not body: <NEW_LINE> <INDENT> raise Exception("Call to " + url + " did not return a body.") <NEW_LINE> <DEDENT> links = body.get('links', []) <NEW_LINE> next_links = [link['href'] for link in links if link['rel'] == 'next'] <NEW_LINE> next_marker = None <NEW_LINE> for link in next_links: <NEW_LINE> <INDENT> parsed_url = urlutils.urlparse(link) <NEW_LINE> query_dict = dict(urlutils.parse_qsl(parsed_url.query)) <NEW_LINE> next_marker = query_dict.get('marker', None) <NEW_LINE> <DEDENT> instances = body[response_key] <NEW_LINE> instances = [self.resource_class(self, res) for res in instances] <NEW_LINE> return Paginated(instances, next_marker=next_marker, links=links) <NEW_LINE> <DEDENT> def show(self, instance): <NEW_LINE> <INDENT> return self._get("/mgmt/instances/%s" % base.getid(instance), 'instance') <NEW_LINE> <DEDENT> def index(self, deleted=None, limit=None, marker=None): <NEW_LINE> <INDENT> form = '' <NEW_LINE> if deleted is not None: <NEW_LINE> <INDENT> if deleted: <NEW_LINE> <INDENT> form = "?deleted=true" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> form = "?deleted=false" <NEW_LINE> <DEDENT> <DEDENT> url = "/mgmt/instances%s" % form <NEW_LINE> return self._list(url, "instances", limit, marker) <NEW_LINE> <DEDENT> def root_enabled_history(self, instance): <NEW_LINE> <INDENT> url = "/mgmt/instances/%s/root" % base.getid(instance) <NEW_LINE> resp, body = self.api.client.get(url) <NEW_LINE> if not body: <NEW_LINE> <INDENT> raise Exception("Call to " + url + " did not return a body.") <NEW_LINE> <DEDENT> return RootHistory(self, body['root_history']) <NEW_LINE> <DEDENT> def _action(self, instance_id, body): <NEW_LINE> <INDENT> url = "/mgmt/instances/%s/action" % instance_id <NEW_LINE> resp, body = self.api.client.post(url, body=body) <NEW_LINE> check_for_exceptions(resp, body) <NEW_LINE> <DEDENT> def stop(self, instance_id): <NEW_LINE> <INDENT> body = {'stop': {}} <NEW_LINE> self._action(instance_id, body) <NEW_LINE> <DEDENT> def reboot(self, instance_id): <NEW_LINE> <INDENT> body = {'reboot': {}} <NEW_LINE> self._action(instance_id, body) <NEW_LINE> <DEDENT> def migrate(self, instance_id, host=None): <NEW_LINE> <INDENT> if host: <NEW_LINE> <INDENT> body = {'migrate': {'host': host}} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> body = {'migrate': {}} <NEW_LINE> <DEDENT> self._action(instance_id, body) <NEW_LINE> <DEDENT> def update(self, instance_id): <NEW_LINE> <INDENT> body = {'update': {}} <NEW_LINE> self._action(instance_id, body) <NEW_LINE> <DEDENT> def reset_task_status(self, instance_id): <NEW_LINE> <INDENT> body = {'reset-task-status': {}} <NEW_LINE> self._action(instance_id, body) | Manage :class:`Instances` resources. | 62598fc5f548e778e596b88a |
class CustomLocationOperationsList(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'value': {'required': True}, } <NEW_LINE> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[CustomLocationOperation]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(CustomLocationOperationsList, self).__init__(**kwargs) <NEW_LINE> self.next_link = kwargs.get('next_link', None) <NEW_LINE> self.value = kwargs['value'] | Lists of Custom Locations operations.
All required parameters must be populated in order to send to Azure.
:param next_link: Next page of operations.
:type next_link: str
:param value: Required. Array of customLocationOperation.
:type value: list[~azure.mgmt.extendedlocation.v2021_08_15.models.CustomLocationOperation] | 62598fc5956e5f7376df57f4 |
class FeatureViewSet(ListFeaturesMixin, mixins.ListModelMixin, GenericViewSet): <NEW_LINE> <INDENT> serializer_class = FeatureSerializer <NEW_LINE> filter_fields = ('ftype', 'build', 'chr', 'start', 'end') | Given a feature type (e.g. gene, marker, region), build and genomic range
return the location(s) & basic details or all features in the region.
---
list:
response_serializer: FeatureSerializer
parameters:
- name: ftype
description: gene, marker or region.
required: true
type: string
paramType: query
- name: build
description: genome build (e.g. hg38).
required: true
type: string
paramType: query
- name: chr
description: chromosome (e.g. chr1).
required: true
type: string
paramType: query
- name: start
description: start position.
required: false
type: integer
paramType: query
- name: end
description: end position.
required: false
type: integer
paramType: query | 62598fc550812a4eaa620d5b |
class FieldViewDialog(): <NEW_LINE> <INDENT> def __init__(self, parent, window_caption, fields_dict, status_enable, status_inactivate): <NEW_LINE> <INDENT> self.toplevel = parent <NEW_LINE> self.fields_dict = copy.deepcopy(fields_dict) <NEW_LINE> self.fieldviews = [] <NEW_LINE> self.notebook = None <NEW_LINE> self.dialog_window = Gtk.Dialog(window_caption, parent, Gtk.DialogFlags.MODAL, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK)) <NEW_LINE> self.dialog_window.set_title(window_caption) <NEW_LINE> self.dialog_window.set_resizable(True) <NEW_LINE> self.dialog_window.set_border_width(5) <NEW_LINE> self.dialog_window.set_size_request(int(self.toplevel.get_size_request()[0]*0.6),int(self.toplevel.get_size_request()[1]*0.6)) <NEW_LINE> self.dialog_window.set_default_response(Gtk.ResponseType.OK) <NEW_LINE> content_area = self.dialog_window.get_content_area() <NEW_LINE> action_area = self.dialog_window.get_action_area() <NEW_LINE> self.notebook = Gtk.Notebook() <NEW_LINE> content_area.pack_start(self.notebook, True, True, 0) <NEW_LINE> action_area.props.margin_top = 12 <NEW_LINE> action_area.props.margin_bottom = 6 <NEW_LINE> for title, fields in self.fields_dict.items(): <NEW_LINE> <INDENT> def get_field(code): <NEW_LINE> <INDENT> return fields[code] <NEW_LINE> <DEDENT> def set_field(code, value): <NEW_LINE> <INDENT> fields[code]['value'] = value <NEW_LINE> <DEDENT> scrolled_window = Gtk.ScrolledWindow() <NEW_LINE> listbox = Gtk.ListBox() <NEW_LINE> listbox.props.margin_top = 6 <NEW_LINE> scrolled_window.add_with_viewport(listbox) <NEW_LINE> tab_label = Gtk.Label(title) <NEW_LINE> self.notebook.append_page(scrolled_window, tab_label) <NEW_LINE> field_view = FieldView(self.toplevel, listbox, 'status_enable', 'status_inactivate') <NEW_LINE> field_view.update(fields, None, get_field, set_field) <NEW_LINE> self.fieldviews.append(field_view) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.dialog_window.show_all() <NEW_LINE> response = self.dialog_window.run() <NEW_LINE> if response == Gtk.ResponseType.OK: <NEW_LINE> <INDENT> self.dialog_window.destroy() <NEW_LINE> return self.fields_dict <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dialog_window.destroy() <NEW_LINE> return None | Creates a dialog box for entry of custom data fields
Arguments:
parent: Parent Window
fields: Field values to display | 62598fc5d8ef3951e32c7fd2 |
class TemporaryBuildDirectory: <NEW_LINE> <INDENT> def __init__(self, output_file_name): <NEW_LINE> <INDENT> self.orig_cwd = os.getcwd() <NEW_LINE> self.tmpdir = None <NEW_LINE> self.output_file_name = output_file_name <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.tmpdir = self.get_temp_directory() <NEW_LINE> shutil.copytree(os.getcwd(), self.tmpdir) <NEW_LINE> os.chdir(self.tmpdir) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, _a, _b, _c): <NEW_LINE> <INDENT> os.chdir(self.orig_cwd) <NEW_LINE> shutil.copy( os.path.join(self.tmpdir, self.output_file_name), self.output_file_name ) <NEW_LINE> shutil.rmtree(self.tmpdir, onerror=self.__onerror) <NEW_LINE> <DEDENT> def get_temp_directory(self): <NEW_LINE> <INDENT> tmp_base = None <NEW_LINE> if os.path.exists("/tmp"): <NEW_LINE> <INDENT> tmp_base = "/tmp" <NEW_LINE> <DEDENT> elif os.path.exists("\\temp"): <NEW_LINE> <INDENT> tmp_base = "\\temp" <NEW_LINE> <DEDENT> elif os.path.exists("\\windows\\temp"): <NEW_LINE> <INDENT> tmp_base = "\\windows\\temp" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> import tempfile <NEW_LINE> tmp_base = tempfile.gettempdir() <NEW_LINE> <DEDENT> tmpdir = os.path.join(tmp_base, "gladtex.build") <NEW_LINE> if os.path.exists(tmpdir): <NEW_LINE> <INDENT> shutil.rmtree(tmpdir, onerror=self.__onerror) <NEW_LINE> <DEDENT> return tmpdir <NEW_LINE> <DEDENT> def __onerror(self, func, path, exc_info): <NEW_LINE> <INDENT> if not os.access(path, os.W_OK): <NEW_LINE> <INDENT> os.chmod(path, stat.S_IWUSR) <NEW_LINE> func(path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exc_info | Context handler to guard the build process.
Upon entering the context, the source is copied to a temporary directory and
the program changes to this directory. After all build actions have been
done, the output file is copied back to the original directory, the program
resets the current working directory and deletes the temporary directory. | 62598fc55fdd1c0f98e5e280 |
class DummySkipOperator(DummyOperator): <NEW_LINE> <INDENT> ui_color = '#e8b7e4' <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> raise AirflowSkipException | Dummy operator which always skips the task. | 62598fc555399d3f05626805 |
class RedshiftVPCConfig(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.clusters = {} | Redshift configuration for a single VPC
:ivar clusters: Dictionary of clusters [name] | 62598fc55fc7496912d483f1 |
class LocationDetailAPIView(DetailViewUpdateDelete): <NEW_LINE> <INDENT> queryset = Location.objects.all() <NEW_LINE> serializer_class = location_serializers['LocationDetailSerializer'] <NEW_LINE> permission_classes = [IsAuthenticated, IsAdminUser] <NEW_LINE> lookup_field = 'slug' | Updates a record. | 62598fc53d592f4c4edbb19f |
class CheckSyscall(common.LinuxPlugin): <NEW_LINE> <INDENT> __name = "check_syscall" <NEW_LINE> def Find_sys_call_table_size(self): <NEW_LINE> <INDENT> for func_name, rewind in [("system_call_fastpath", 0), ("ret_from_sys_call", 40)]: <NEW_LINE> <INDENT> func = self.profile.get_constant_object( func_name, target="Function").Rewind(rewind) <NEW_LINE> for instruction in func.Decompose(10): <NEW_LINE> <INDENT> if instruction.mnemonic == "CMP": <NEW_LINE> <INDENT> return 1 + (instruction.operands[1].value & 0xffffffff) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return len(filter(lambda x: x.startswith("__syscall_meta__"), self.profile.constants)) or 0x300 <NEW_LINE> <DEDENT> def Find_ia32_sys_call_table_size(self): <NEW_LINE> <INDENT> func = self.profile.get_constant_object( "sysenter_do_call", target="Function").Rewind(20) <NEW_LINE> for instruction in func.Decompose(10): <NEW_LINE> <INDENT> if instruction.mnemonic == "CMP": <NEW_LINE> <INDENT> return (instruction.operands[1].value & 0xffffffff) + 1 <NEW_LINE> <DEDENT> <DEDENT> import pdb; pdb.set_trace() <NEW_LINE> return len(filter(lambda x: x.startswith("__syscall_meta__"), self.profile.constants)) or 0x300 <NEW_LINE> <DEDENT> def CheckSyscallTables(self): <NEW_LINE> <INDENT> lsmod = self.session.plugins.lsmod(session=self.session) <NEW_LINE> for table_name, size_finder in [ ("ia32_sys_call_table", self.Find_ia32_sys_call_table_size), ("sys_call_table", self.Find_sys_call_table_size)]: <NEW_LINE> <INDENT> table = self.profile.get_constant_object( table_name, target="Array", target_args=dict( count=size_finder(), target="Pointer", target_args=dict( target="Function" ) ) ) <NEW_LINE> for i, entry in enumerate(table): <NEW_LINE> <INDENT> yield table_name, i, entry, lsmod.ResolveSymbolName(entry.deref()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def render(self, renderer): <NEW_LINE> <INDENT> renderer.table_header([ ("Table Name", "table", "6"), ("Index", "index", "[addr]"), ("Address", "address", "[addrpad]"), ("Symbol", "symbol", "<30")]) <NEW_LINE> for table_name, i, call_addr, sym_name in self.CheckSyscallTables(): <NEW_LINE> <INDENT> renderer.table_row(table_name, i, call_addr, sym_name or "Unknown", highlight=None if sym_name else "important") | Checks if the system call table has been altered. | 62598fc54428ac0f6e658813 |
class DownloadedUser(db.DynamicDocument): <NEW_LINE> <INDENT> meta = { 'indexes': ['email'] } <NEW_LINE> downloaded_on = db.DateTimeField(required=False, default=datetime.utcnow()) <NEW_LINE> email = db.StringField(required=True, max_length=255) <NEW_LINE> phone = db.StringField(required=True, max_length=255) <NEW_LINE> has_been_emailed = db.BooleanField(default=False) | When a user downloads Pallet and we take their number and email | 62598fc5099cdd3c63675559 |
class PubsubProjectsSubscriptionsModifyPushConfigRequest(_messages.Message): <NEW_LINE> <INDENT> modifyPushConfigRequest = _messages.MessageField('ModifyPushConfigRequest', 1) <NEW_LINE> subscription = _messages.StringField(2, required=True) | A PubsubProjectsSubscriptionsModifyPushConfigRequest object.
Fields:
modifyPushConfigRequest: A ModifyPushConfigRequest resource to be passed
as the request body.
subscription: The name of the subscription. Format is
`projects/{project}/subscriptions/{sub}`. | 62598fc53617ad0b5ee06436 |
class MainTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.app = main.app.test_client() <NEW_LINE> <DEDENT> def test_hello_world(self): <NEW_LINE> <INDENT> rv = self.app.get('/') <NEW_LINE> print(rv.data) <NEW_LINE> assert("smartgrid1" in rv.data.lower()) | This class uses the Flask tests app to run an integration test against a
local instance of the server. | 62598fc57c178a314d78d78e |
class AutoLoader(object): <NEW_LINE> <INDENT> pass | Base class for automatic loaders (e.g. Git) | 62598fc5851cf427c66b85a4 |
class TestMongoengineFix(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> app = Eve(settings=SETTINGS) <NEW_LINE> app.debug = True <NEW_LINE> ext = EveMongoengine(app) <NEW_LINE> ext.add_model(TwoFaceDoc) <NEW_LINE> cls.app = app <NEW_LINE> cls.client = app.test_client() <NEW_LINE> <DEDENT> def test_switch_queryset(self): <NEW_LINE> <INDENT> t1 = TwoFaceDoc(s='x').save() <NEW_LINE> t2 = TwoFaceDoc(s='a').save() <NEW_LINE> t3 = TwoFaceDoc(s='b').save() <NEW_LINE> t4 = TwoFaceDoc(s='abc').save() <NEW_LINE> self.app.data.default_queryset = 'all_objects' <NEW_LINE> r = self.client.get('/twofacedoc/').get_json() <NEW_LINE> returned = set([x['s'] for x in r['_items']]) <NEW_LINE> self.assertSetEqual(set(['x', 'abc']), returned) <NEW_LINE> self.app.data.default_queryset = 'objects' <NEW_LINE> r = self.client.get('/twofacedoc/').get_json() <NEW_LINE> returned = set([x['s'] for x in r['_items']]) <NEW_LINE> self.assertSetEqual(set(['x', 'abc', 'a', 'b']), returned) | Test if non-standard querysets defined in datalayer work as expected. | 62598fc55fdd1c0f98e5e282 |
class EventHandler(logging.Handler): <NEW_LINE> <INDENT> def emit(self, record: Type[logging.LogRecord]) -> None: <NEW_LINE> <INDENT> print('EVENT EMITTER', record.getMessage()) | A custom handler class that emits each log entry as event | 62598fc5aad79263cf42eac4 |
class TanhLayer(Layer): <NEW_LINE> <INDENT> def fprop(self, inputs): <NEW_LINE> <INDENT> return np.tanh(inputs) <NEW_LINE> <DEDENT> def bprop(self, inputs, outputs, grads_wrt_outputs): <NEW_LINE> <INDENT> return (1. - outputs ** 2) * grads_wrt_outputs <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'TanhLayer' | Layer implementing an element-wise hyperbolic tangent transformation. | 62598fc5283ffb24f3cf3b74 |
class LocationEFS(AWSObject): <NEW_LINE> <INDENT> resource_type = "AWS::DataSync::LocationEFS" <NEW_LINE> props: PropsDictType = { "Ec2Config": (Ec2Config, True), "EfsFilesystemArn": (str, True), "Subdirectory": (str, False), "Tags": (Tags, False), } | `LocationEFS <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datasync-locationefs.html>`__ | 62598fc53346ee7daa3377c0 |
class UserForm(forms.Form): <NEW_LINE> <INDENT> email = forms.EmailField(label=_("Email Address")) <NEW_LINE> password = forms.CharField(label=_("Password"), widget=forms.PasswordInput(render_value=False)) <NEW_LINE> def __init__(self, request, *args, **kwargs): <NEW_LINE> <INDENT> initial = {} <NEW_LINE> for value in request.COOKIES.values(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> validate_email(value) <NEW_LINE> <DEDENT> except ValidationError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> initial["email"] = value <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> super(UserForm, self).__init__(initial=initial, *args, **kwargs) <NEW_LINE> <DEDENT> def authenticate(self): <NEW_LINE> <INDENT> self._user = authenticate(username=self.cleaned_data.get("email", ""), password=self.cleaned_data.get("password", "")) <NEW_LINE> <DEDENT> def login(self, request): <NEW_LINE> <INDENT> login(request, self._user) | Fields for signup & login. | 62598fc5ec188e330fdf8b84 |
class ChangePasswordView(generics.UpdateAPIView): <NEW_LINE> <INDENT> serializer_class = ChangePasswordSerializer <NEW_LINE> permission_classes = (PrivateTokenAccessPermission, ) <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> obj = self.request.user <NEW_LINE> return obj <NEW_LINE> <DEDENT> def update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> serializer = self.get_serializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> if not self.object.check_password(serializer.data.get("old_password")): <NEW_LINE> <INDENT> return Response({"old_password": [message.OLD_PASSOWRD_MISMATCH]}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> self.object.set_password(serializer.data.get("new_password")) <NEW_LINE> self.object.save() <NEW_LINE> return Response([message.MESSAGE_CHANGE_PASSWORD_SUCCESS], status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> return Response([serializer.errors], status=status.HTTP_400_BAD_REQUEST) | An endpoint for changing password. | 62598fc526068e7796d4cc4c |
class MRVBCodeGenerator(TD2CodeGenerator): <NEW_LINE> <INDENT> def __init__(self, document_type: str, country_code: str, surname: str, given_names: str, document_number: str, nationality: str, birth_date: str, sex: str, expiry_date: str, optional_data="", transliteration=dictionary.latin_based(), force=False): <NEW_LINE> <INDENT> TD2CodeGenerator.__init__(self, document_type, country_code, surname, given_names, document_number, nationality, birth_date, sex, expiry_date, optional_data, transliteration, force) <NEW_LINE> <DEDENT> @property <NEW_LINE> def optional_data(self) -> str: <NEW_LINE> <INDENT> return self._optional_data <NEW_LINE> <DEDENT> @optional_data.setter <NEW_LINE> def optional_data(self, value: str): <NEW_LINE> <INDENT> self._optional_data = check.field(transliterate(value, self.transliteration), 8, "id number", "<") <NEW_LINE> <DEDENT> @property <NEW_LINE> def final_hash(self) -> None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def _line2(self): <NEW_LINE> <INDENT> return (self.document_number + self.document_number_hash + self.nationality + self.birth_date + self.birth_date_hash + self.sex + self.expiry_date + self.expiry_date_hash + self.optional_data) | Calculate the string code for machine readable zone visas of type B (MRVB)
Params:
document_type (str): The First letter must be 'V'
country_code (str): 3 letters code (ISO 3166-1) or country name (in English)
surname (str): Primary identifier(s)
given_names (str): Secondary identifier(s)
document_number (str): Document number
nationality (str): 3 letters code (ISO 3166-1) or country name
birth_date (str): YYMMDD
sex (str): Genre. Male: 'M', Female: 'F' or Undefined: 'X', "<" or ""
expiry_date (str): YYMMDD
optional_data (str): Optional personal data at the discretion of the issuing State.
Non-mandatory field. Empty string by default.
transliteration (dict): Transliteration dictionary for non-ascii chars. Latin based by default
force (bool): Disables checks for country, nationality and document_type fields.
Allows to use 3-letter-codes not included in the countries dictionary
and to use document_type codes without restrictions. | 62598fc599fddb7c1ca62f65 |
class SnsIdUserStatus(object): <NEW_LINE> <INDENT> def __init__(self, userExisting=None, phoneNumberRegistered=None, sameDevice=None, accountMigrationCheckType=None,): <NEW_LINE> <INDENT> self.userExisting = userExisting <NEW_LINE> self.phoneNumberRegistered = phoneNumberRegistered <NEW_LINE> self.sameDevice = sameDevice <NEW_LINE> self.accountMigrationCheckType = accountMigrationCheckType <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.BOOL: <NEW_LINE> <INDENT> self.userExisting = iprot.readBool() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.BOOL: <NEW_LINE> <INDENT> self.phoneNumberRegistered = iprot.readBool() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 3: <NEW_LINE> <INDENT> if ftype == TType.BOOL: <NEW_LINE> <INDENT> self.sameDevice = iprot.readBool() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 4: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.accountMigrationCheckType = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('SnsIdUserStatus') <NEW_LINE> if self.userExisting is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('userExisting', TType.BOOL, 1) <NEW_LINE> oprot.writeBool(self.userExisting) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.phoneNumberRegistered is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('phoneNumberRegistered', TType.BOOL, 2) <NEW_LINE> oprot.writeBool(self.phoneNumberRegistered) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.sameDevice is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('sameDevice', TType.BOOL, 3) <NEW_LINE> oprot.writeBool(self.sameDevice) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.accountMigrationCheckType is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('accountMigrationCheckType', TType.I32, 4) <NEW_LINE> oprot.writeI32(self.accountMigrationCheckType) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- userExisting
- phoneNumberRegistered
- sameDevice
- accountMigrationCheckType | 62598fc54527f215b58ea1c0 |
class TestSequence(unittest.TestCase): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> rptable = RPTable() <NEW_LINE> oh_init_rpt(rptable) <NEW_LINE> error, update_count, update_timestamp = oh_get_rpt_info(rptable) <NEW_LINE> self.assertEqual(error, SA_OK) <NEW_LINE> self.assertEqual(oh_add_resource(rptable, rptentries[0], None, 0), 0) <NEW_LINE> error, update_count, update_timestamp = oh_get_rpt_info(None) <NEW_LINE> self.assertEqual(error != SA_OK, True) | runTest : Starting with an empty RPTable, adds 1 resource to it.
Checks rpt info to see if update count was updated, but it passes
NULL for a table.
If oh_get_rpt_info returns error, the test passes, otherwise it failed.
Return value: 0 on success, 1 on failure | 62598fc5f548e778e596b88e |
class Msg: <NEW_LINE> <INDENT> def __init__(self, code_rate=None, sample_rate=None): <NEW_LINE> <INDENT> self.code_rate = 1023000 if code_rate is None else code_rate <NEW_LINE> self.sample_rate = self.code_rate * 10 if sample_rate is None else sample_rate <NEW_LINE> self.initial_time = fractions.Fraction(0) <NEW_LINE> self.msg = -1 <NEW_LINE> <DEDENT> def sample(self, time): <NEW_LINE> <INDENT> time_step = fractions.Fraction(1, self.code_rate) <NEW_LINE> if (time - self.initial_time) > time_step: <NEW_LINE> <INDENT> self.msg = 1 if np.random.random() < 0.5 else -1 <NEW_LINE> self.initial_time = self.initial_time + time_step <NEW_LINE> <DEDENT> return self.msg | Msg generate msg randomly | 62598fc5956e5f7376df57f6 |
class MyQueue(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.input_stack = [] <NEW_LINE> self.output_stack = [] <NEW_LINE> <DEDENT> def push(self, x): <NEW_LINE> <INDENT> self.input_stack.append(x) <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if self.output_stack == []: <NEW_LINE> <INDENT> while self.input_stack != []: <NEW_LINE> <INDENT> self.output_stack.append(self.input_stack.pop()) <NEW_LINE> <DEDENT> <DEDENT> return self.output_stack.pop() <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> if self.output_stack == []: <NEW_LINE> <INDENT> while self.input_stack != []: <NEW_LINE> <INDENT> self.output_stack.append(self.input_stack.pop()) <NEW_LINE> <DEDENT> <DEDENT> return self.output_stack[-1] <NEW_LINE> <DEDENT> def empty(self): <NEW_LINE> <INDENT> return self.input_stack == [] and self.output_stack == [] | 双栈法
input_stack 用来处理push的数据
output_stack 专门用来处理弹出和返回队列头的操作 | 62598fc54a966d76dd5ef1c6 |
class TTFontMaker: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.tables = {} <NEW_LINE> <DEDENT> def add(self, tag, data): <NEW_LINE> <INDENT> if tag == 'head': <NEW_LINE> <INDENT> data = splice(data, 8, b'\0\0\0\0') <NEW_LINE> <DEDENT> self.tables[tag] = data <NEW_LINE> <DEDENT> def makeStream(self): <NEW_LINE> <INDENT> stm = getBytesIO() <NEW_LINE> write = stm.write <NEW_LINE> tables = self.tables <NEW_LINE> numTables = len(tables) <NEW_LINE> searchRange = 1 <NEW_LINE> entrySelector = 0 <NEW_LINE> while searchRange * 2 <= numTables: <NEW_LINE> <INDENT> searchRange = searchRange * 2 <NEW_LINE> entrySelector = entrySelector + 1 <NEW_LINE> <DEDENT> searchRange = searchRange * 16 <NEW_LINE> rangeShift = numTables * 16 - searchRange <NEW_LINE> write(pack(">lHHHH", 0x00010000, numTables, searchRange, entrySelector, rangeShift)) <NEW_LINE> offset = 12 + numTables * 16 <NEW_LINE> wStr = lambda x:write(bytes(tag,'latin1')) <NEW_LINE> tables_items = list(sorted(tables.items())) <NEW_LINE> for tag, data in tables_items: <NEW_LINE> <INDENT> if tag == 'head': <NEW_LINE> <INDENT> head_start = offset <NEW_LINE> <DEDENT> checksum = calcChecksum(data) <NEW_LINE> wStr(tag) <NEW_LINE> write(pack(">LLL", checksum, offset, len(data))) <NEW_LINE> paddedLength = (len(data)+3)&~3 <NEW_LINE> offset = offset + paddedLength <NEW_LINE> <DEDENT> for tag, data in tables_items: <NEW_LINE> <INDENT> data += b"\0\0\0" <NEW_LINE> write(data[:len(data)&~3]) <NEW_LINE> <DEDENT> checksum = calcChecksum(stm.getvalue()) <NEW_LINE> checksum = add32(0xB1B0AFBA, -checksum) <NEW_LINE> stm.seek(head_start + 8) <NEW_LINE> write(pack('>L', checksum)) <NEW_LINE> return stm.getvalue() | Basic TTF file generator | 62598fc5aad79263cf42eac6 |
class QuantityError(AttribDict): <NEW_LINE> <INDENT> defaults = {"uncertainty": None, "lower_uncertainty": None, "upper_uncertainty": None, "confidence_level": None} <NEW_LINE> warn_on_non_default_key = True <NEW_LINE> def __init__(self, uncertainty=None, lower_uncertainty=None, upper_uncertainty=None, confidence_level=None): <NEW_LINE> <INDENT> super(QuantityError, self).__init__() <NEW_LINE> self.uncertainty = uncertainty <NEW_LINE> self.lower_uncertainty = lower_uncertainty <NEW_LINE> self.upper_uncertainty = upper_uncertainty <NEW_LINE> self.confidence_level = confidence_level <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return any([getattr(self, key) is not None for key in self.defaults]) <NEW_LINE> <DEDENT> __nonzero__ = __bool__ | Uncertainty information for a physical quantity.
:type uncertainty: float
:param uncertainty: Uncertainty as the absolute value of symmetric
deviation from the main value.
:type lower_uncertainty: float
:param lower_uncertainty: Uncertainty as the absolute value of deviation
from the main value towards smaller values.
:type upper_uncertainty: float
:param upper_uncertainty: Uncertainty as the absolute value of deviation
from the main value towards larger values.
:type confidence_level: float
:param confidence_level: Confidence level of the uncertainty, given in
percent (0-100). | 62598fc5adb09d7d5dc0a86d |
class Repository(object): <NEW_LINE> <INDENT> def __init__(self, **query): <NEW_LINE> <INDENT> self.query = query <NEW_LINE> <DEDENT> def all(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def find(self, id): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def where(self, **query): <NEW_LINE> <INDENT> return self.filter(**query).all() <NEW_LINE> <DEDENT> def filter(self, **query): <NEW_LINE> <INDENT> new_query = self.query.copy() <NEW_LINE> new_query.update(query) <NEW_LINE> return self.__class__(**new_query) | Repository.
Base contract for repository classes. | 62598fc526068e7796d4cc4e |
class PurchaseRequestEditableListTestCase(ModuleTestCase): <NEW_LINE> <INDENT> module = 'purchase_request_editable_list' | Test Purchase Request Editable List module | 62598fc5fff4ab517ebcdada |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.