code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Page11(ToughSchedulingCasesPage): <NEW_LINE> <INDENT> def __init__(self, page_set): <NEW_LINE> <INDENT> super(Page11, self).__init__( url='file://tough_scheduling_cases/touch_handler_scrolling.html?super_slow_handler', page_set=page_set) <NEW_LINE> self.synthetic_delays = {'blink.HandleInputEvent': {'target_duration': 0.2}}
Why: Super expensive touch handler causes browser to scroll after a timeout.
62598fa6498bea3a75a57a0f
class Window: <NEW_LINE> <INDENT> _JAVA_MIN_LONG = -(1 << 63) <NEW_LINE> _JAVA_MAX_LONG = (1 << 63) - 1 <NEW_LINE> _PRECEDING_THRESHOLD = max(-sys.maxsize, _JAVA_MIN_LONG) <NEW_LINE> _FOLLOWING_THRESHOLD = min(sys.maxsize, _JAVA_MAX_LONG) <NEW_LINE> unboundedPreceding: int = _JAVA_MIN_LONG <NEW_LINE> unboundedFollowing: int = _JAVA_MAX_LONG <NEW_LINE> currentRow: int = 0 <NEW_LINE> @staticmethod <NEW_LINE> @since(1.4) <NEW_LINE> def partitionBy(*cols: Union["ColumnOrName", List["ColumnOrName_"]]) -> "WindowSpec": <NEW_LINE> <INDENT> sc = SparkContext._active_spark_context <NEW_LINE> assert sc is not None and sc._jvm is not None <NEW_LINE> jspec = sc._jvm.org.apache.spark.sql.expressions.Window.partitionBy(_to_java_cols(cols)) <NEW_LINE> return WindowSpec(jspec) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @since(1.4) <NEW_LINE> def orderBy(*cols: Union["ColumnOrName", List["ColumnOrName_"]]) -> "WindowSpec": <NEW_LINE> <INDENT> sc = SparkContext._active_spark_context <NEW_LINE> assert sc is not None and sc._jvm is not None <NEW_LINE> jspec = sc._jvm.org.apache.spark.sql.expressions.Window.orderBy(_to_java_cols(cols)) <NEW_LINE> return WindowSpec(jspec) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def rowsBetween(start: int, end: int) -> "WindowSpec": <NEW_LINE> <INDENT> if start <= Window._PRECEDING_THRESHOLD: <NEW_LINE> <INDENT> start = Window.unboundedPreceding <NEW_LINE> <DEDENT> if end >= Window._FOLLOWING_THRESHOLD: <NEW_LINE> <INDENT> end = Window.unboundedFollowing <NEW_LINE> <DEDENT> sc = SparkContext._active_spark_context <NEW_LINE> assert sc is not None and sc._jvm is not None <NEW_LINE> jspec = sc._jvm.org.apache.spark.sql.expressions.Window.rowsBetween(start, end) <NEW_LINE> return WindowSpec(jspec) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def rangeBetween(start: int, end: int) -> "WindowSpec": <NEW_LINE> <INDENT> if start <= Window._PRECEDING_THRESHOLD: <NEW_LINE> <INDENT> start = Window.unboundedPreceding <NEW_LINE> <DEDENT> if end >= Window._FOLLOWING_THRESHOLD: <NEW_LINE> <INDENT> end = Window.unboundedFollowing <NEW_LINE> <DEDENT> sc = SparkContext._active_spark_context <NEW_LINE> assert sc is not None and sc._jvm is not None <NEW_LINE> jspec = sc._jvm.org.apache.spark.sql.expressions.Window.rangeBetween(start, end) <NEW_LINE> return WindowSpec(jspec)
Utility functions for defining window in DataFrames. .. versionadded:: 1.4 Notes ----- When ordering is not defined, an unbounded window frame (rowFrame, unboundedPreceding, unboundedFollowing) is used by default. When ordering is defined, a growing window frame (rangeFrame, unboundedPreceding, currentRow) is used by default. Examples -------- >>> # ORDER BY date ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW >>> window = Window.orderBy("date").rowsBetween(Window.unboundedPreceding, Window.currentRow) >>> # PARTITION BY country ORDER BY date RANGE BETWEEN 3 PRECEDING AND 3 FOLLOWING >>> window = Window.orderBy("date").partitionBy("country").rangeBetween(-3, 3)
62598fa61b99ca400228f4a6
class MeshParser(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> raise NotImplementedError("This function must be defined by an " + "inheriting class.") <NEW_LINE> <DEDENT> def savetxt(self): <NEW_LINE> <INDENT> np.savetxt("nodes.txt", self.nodes) <NEW_LINE> np.savetxt("elements.txt", self.elements, fmt="%d") <NEW_LINE> for name, collection in self.edge_collections.items(): <NEW_LINE> <INDENT> np.savetxt(name + '_nodes.txt', np.fromiter(collection, int), fmt='%d') <NEW_LINE> <DEDENT> <DEDENT> def _parse_section(self, pattern, line_parse_function): <NEW_LINE> <INDENT> raise NotImplementedError("This function must be defined by an " + "inheriting class.")
Properties ---------- * elements : A numpy array listing the node numbers of every element; for example, print(t.elements) => [[ 1 9 4 10 11 8] [ 1 2 9 5 12 10] [ 2 3 9 6 13 12] [ 3 4 9 7 11 13]] for a quadratic mesh with four elements. * nodes : a array of every node's coordinates, where row number corresponds to node number - 1. * edges : print(t.edge_collections) => {'boundary': set([(3, 4, 7, 3), (4, 1, 8, 4), (2, 3, 6, 2), (1, 2, 5, 1)])} Methods ------- * savetxt() : Save the mesh data (edge collections, nodes, elements).
62598fa6851cf427c66b81b6
class DescribeTextStatRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.AuditType = None <NEW_LINE> self.Filters = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.AuditType = params.get("AuditType") <NEW_LINE> if params.get("Filters") is not None: <NEW_LINE> <INDENT> self.Filters = [] <NEW_LINE> for item in params.get("Filters"): <NEW_LINE> <INDENT> obj = Filters() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Filters.append(obj)
DescribeTextStat请求参数结构体
62598fa6d268445f26639afa
class LocalClass: <NEW_LINE> <INDENT> def __init__(self, local_vars, columns=80): <NEW_LINE> <INDENT> self.var_names = list(local_vars.keys()) <NEW_LINE> self.types = [type(local_vars[key]).__name__ for key in self.var_names] <NEW_LINE> self.values = [str(local_vars[key]) for key in self.var_names] <NEW_LINE> self._add_headers() <NEW_LINE> self._scale_columns(columns) <NEW_LINE> <DEDENT> def _scale_columns(self, columns): <NEW_LINE> <INDENT> cur_max_key = max(len(val) for val in self.var_names) <NEW_LINE> cur_max_type = max(len(val) for val in self.types) <NEW_LINE> cur_max_value = max(len(val) for val in self.values) <NEW_LINE> cur = cur_max_key + cur_max_type + cur_max_value <NEW_LINE> factor = float(columns) / float(cur) <NEW_LINE> self.MAX_KEY = math.floor(factor * cur_max_key) - 2 <NEW_LINE> self.MAX_TYPE = math.floor(factor * cur_max_type) - 2 <NEW_LINE> self.MAX_VALUES = math.floor(factor * cur_max_value) - 2 <NEW_LINE> <DEDENT> def _add_headers(self): <NEW_LINE> <INDENT> self.var_names.insert(0, 'VARIABLE') <NEW_LINE> self.types.insert(0, 'TYPE') <NEW_LINE> self.values.insert(0, 'VALUE') <NEW_LINE> <DEDENT> def _make_row(self, key, tp, val): <NEW_LINE> <INDENT> return '{key} | {tp} | {val}'.format( key=left_string(key, self.MAX_KEY), tp=left_string(tp, self.MAX_TYPE), val=left_string(val, self.MAX_VALUES) ) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> res = [] <NEW_LINE> for idx, key in enumerate(self.var_names): <NEW_LINE> <INDENT> res.append( self._make_row(key, self.types[idx], self.values[idx]) ) <NEW_LINE> <DEDENT> return '\n'.join(res)
Formats the ``locale`` object for display in the tty. Can handle any kind of object to format it in a +-----+------+-------+ | KEY | TYPE | VALUE | +-----+------+-------+ like table Args ---- local_vars : object Object to display in table. key will be one row columns : int Width (in columns) of the output stream. Default: 80
62598fa645492302aabfc3bf
class Category(BaseModel): <NEW_LINE> <INDENT> IdCategory = models.AutoField(_("ID city"), primary_key=True) <NEW_LINE> Description = models.CharField( _("Description"), max_length=255, blank=False, null=False, unique=True) <NEW_LINE> historical = HistoricalRecords() <NEW_LINE> @property <NEW_LINE> def _history_user(self): <NEW_LINE> <INDENT> return self.changed_by <NEW_LINE> <DEDENT> @_history_user.setter <NEW_LINE> def _history_user(self, value): <NEW_LINE> <INDENT> self.changed_by = value <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Category") <NEW_LINE> verbose_name_plural = _("Categories") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.description
Model definition for Category.
62598fa6a8370b77170f02c9
class TestingConfig(Config): <NEW_LINE> <INDENT> TESTING = True <NEW_LINE> DEBUG = True
Configurations for Testing
62598fa68e71fb1e983bb9a0
class NetworkRuleSet(Resource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'trusted_service_access_enabled': {'key': 'properties.trustedServiceAccessEnabled', 'type': 'bool'}, 'default_action': {'key': 'properties.defaultAction', 'type': 'str'}, 'virtual_network_rules': {'key': 'properties.virtualNetworkRules', 'type': '[NWRuleSetVirtualNetworkRules]'}, 'ip_rules': {'key': 'properties.ipRules', 'type': '[NWRuleSetIpRules]'}, } <NEW_LINE> def __init__( self, *, trusted_service_access_enabled: Optional[bool] = None, default_action: Optional[Union[str, "DefaultAction"]] = None, virtual_network_rules: Optional[List["NWRuleSetVirtualNetworkRules"]] = None, ip_rules: Optional[List["NWRuleSetIpRules"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(NetworkRuleSet, self).__init__(**kwargs) <NEW_LINE> self.trusted_service_access_enabled = trusted_service_access_enabled <NEW_LINE> self.default_action = default_action <NEW_LINE> self.virtual_network_rules = virtual_network_rules <NEW_LINE> self.ip_rules = ip_rules
Description of topic resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts". :vartype type: str :param trusted_service_access_enabled: Value that indicates whether Trusted Service Access is Enabled or not. :type trusted_service_access_enabled: bool :param default_action: Default Action for Network Rule Set. Possible values include: "Allow", "Deny". :type default_action: str or ~azure.mgmt.eventhub.v2018_01_01_preview.models.DefaultAction :param virtual_network_rules: List VirtualNetwork Rules. :type virtual_network_rules: list[~azure.mgmt.eventhub.v2018_01_01_preview.models.NWRuleSetVirtualNetworkRules] :param ip_rules: List of IpRules. :type ip_rules: list[~azure.mgmt.eventhub.v2018_01_01_preview.models.NWRuleSetIpRules]
62598fa6435de62698e9bce4
class InfaError(Exception): <NEW_LINE> <INDENT> pass
Baseclass for all Infa errors.
62598fa6b7558d589546351e
class BindingWrapper(Binding): <NEW_LINE> <INDENT> def __init__(self, id, target): <NEW_LINE> <INDENT> self.target = target <NEW_LINE> super(BindingWrapper, self).__init__(id) <NEW_LINE> <DEDENT> def validateItemType(self, item, bindingConfig): <NEW_LINE> <INDENT> if hasattr(self.target, 'validateItemType'): <NEW_LINE> <INDENT> self.target.validateItemType(item, bindingConfig) <NEW_LINE> <DEDENT> <DEDENT> def processBindingConfiguration(self, context, item, bindingConfig): <NEW_LINE> <INDENT> self.target.processBindingConfiguration(context, item, bindingConfig) <NEW_LINE> <DEDENT> def addBindingConfiguration(self, context, bindingConfig): <NEW_LINE> <INDENT> self._binding_configs[context].append(bindingConfig) <NEW_LINE> <DEDENT> def removeConfigurations(self, context): <NEW_LINE> <INDENT> if context in self._binding_configs: <NEW_LINE> <INDENT> self.log.debug("{}.removeConfigurations({})".format(self, context)) <NEW_LINE> del(self._binding_configs[context]) <NEW_LINE> <DEDENT> <DEDENT> def findBindingConfiguration(self, predicate): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return next(self.findBindingConfigurations(predicate)) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def findBindingConfigurations(self, predicate): <NEW_LINE> <INDENT> for cfgs in self._binding_configs.values(): <NEW_LINE> <INDENT> for cfg in cfgs: <NEW_LINE> <INDENT> if predicate(cfg): <NEW_LINE> <INDENT> yield cfg <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def receiveCommand(self, item_name, command): <NEW_LINE> <INDENT> if hasattr(self.target, 'receiveCommand'): <NEW_LINE> <INDENT> self.target.receiveCommand(item_name, command) <NEW_LINE> <DEDENT> <DEDENT> def receiveUpdate(self, item_name, state): <NEW_LINE> <INDENT> if hasattr(self.target, 'receiveUpdate'): <NEW_LINE> <INDENT> self.target.receiveCommand(item_name, state) <NEW_LINE> <DEDENT> <DEDENT> def updated(self, dictionary): <NEW_LINE> <INDENT> if hasattr(self.target, 'updated'): <NEW_LINE> <INDENT> self.target.updated(dictionary) <NEW_LINE> <DEDENT> <DEDENT> def poll(self): <NEW_LINE> <INDENT> if hasattr(self.target, 'poll'): <NEW_LINE> <INDENT> self.target.poll() <NEW_LINE> <DEDENT> <DEDENT> def start_polling(self, period_millis): <NEW_LINE> <INDENT> self._polling_job_key = openhab.quartz.schedule_periodic_callback(self.poll, period_millis) <NEW_LINE> <DEDENT> def stop_polling(self): <NEW_LINE> <INDENT> if self._polling_job_key: <NEW_LINE> <INDENT> openhab.quartz.get_scheduler().deleteJob(self._polling_job_key) <NEW_LINE> self._polling_job_key = None <NEW_LINE> <DEDENT> <DEDENT> def dispose(self): <NEW_LINE> <INDENT> for registration in self._registrations: <NEW_LINE> <INDENT> registration.unregister() <NEW_LINE> <DEDENT> self._registrations = [] <NEW_LINE> self.stop_polling() <NEW_LINE> if hasattr(self.target, 'dispose'): <NEW_LINE> <INDENT> self.target.dispose()
Wraps a binding implementation. The goal is to remove OSGI dependencies in the binding implementation for easier testing.
62598fa666656f66f7d5a2df
class Pattern(object): <NEW_LINE> <INDENT> def __init__(self, config, config_global): <NEW_LINE> <INDENT> if not self.config_defaults: <NEW_LINE> <INDENT> self.config_defaults = {} <NEW_LINE> <DEDENT> self.config = config <NEW_LINE> configdict.extend_deep(self.config, self.config_defaults.copy()) <NEW_LINE> self.config_global = config_global <NEW_LINE> self.values = config_global['value'] <NEW_LINE> self.update_config() <NEW_LINE> <DEDENT> def update_config(self): <NEW_LINE> <INDENT> self.channel_count = self.config_global['channel_count'] <NEW_LINE> self.pixel_count = self.config_global['pixel_count'] <NEW_LINE> self.pixel_index_max = self.pixel_count - 1 <NEW_LINE> self.repeat_count = self.config_global['repeat_count'] <NEW_LINE> self.repeat_snake = self.config_global['repeat_snake'] <NEW_LINE> self.update_interval = self.config_global['update_interval'] <NEW_LINE> self.mode_16bit = self.config_global['mode_16bit'] <NEW_LINE> self.color_channels = self.config_global['color_channels'] <NEW_LINE> self.color_channels_count = len(self.color_channels) <NEW_LINE> if self.mode_16bit: <NEW_LINE> <INDENT> self.color_channels_count = self.color_channels_count * 2 <NEW_LINE> <DEDENT> self.total_channel_count = ( self.pixel_count * self.color_channels_count ) <NEW_LINE> if self.repeat_count > 0: <NEW_LINE> <INDENT> self.total_channel_count *= self.repeat_count <NEW_LINE> <DEDENT> <DEDENT> def _calculate_16bit_values(self, value): <NEW_LINE> <INDENT> high_byte = 0 <NEW_LINE> low_byte = 0 <NEW_LINE> high_byte, low_byte = calculate_16bit_values(value, self.mode_16bit) <NEW_LINE> return high_byte, low_byte <NEW_LINE> <DEDENT> def _hsv_01_to_rgb_16bit(self, hue, saturation, value): <NEW_LINE> <INDENT> rgb16bit = hsv_01_to_rgb_16bit(hue, saturation, value, self.mode_16bit) <NEW_LINE> return rgb16bit <NEW_LINE> <DEDENT> def _calculate_step(self, universe): <NEW_LINE> <INDENT> self.update_config() <NEW_LINE> data_output = array.array('B') <NEW_LINE> data_output.append(0) <NEW_LINE> data_output *= self.total_channel_count <NEW_LINE> return data_output
Base Pattern Class.
62598fa6d7e4931a7ef3bf89
class Category(models.Model): <NEW_LINE> <INDENT> title = models.CharField('Name Category', max_length=50) <NEW_LINE> slug = models.SlugField(unique=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Category' <NEW_LINE> verbose_name_plural = 'Categories' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title
Categories
62598fa6e5267d203ee6b7fb
class InsufficientStorageError(YaDiskError): <NEW_LINE> <INDENT> pass
Thrown when the server returns code 509.
62598fa6f7d966606f747ed3
class TracedGreenlet(gevent.Greenlet): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._active_span = tracer.active_span <NEW_LINE> super(TracedGreenlet, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def run(self, *args, **kwargs): <NEW_LINE> <INDENT> tracer.active_span_source.make_active(self._active_span) <NEW_LINE> del self._active_span <NEW_LINE> super(TracedGreenlet, self).run()
Helper class OpenTracing-aware, that ensures the context is propagated from a parent greenlet to a child when a new greenlet is initialized.
62598fa656ac1b37e63020dc
class Piece(Marker): <NEW_LINE> <INDENT> def __init__(self,gui, x, y, fill, r, g, b, board, pid): <NEW_LINE> <INDENT> SIDE = 70 <NEW_LINE> self.board, self.fill, self.pid = board, fill, pid <NEW_LINE> self.red, self.green, self.blue = r,g,b <NEW_LINE> self.avatar = gui.image(href=REPO%fill, x=x ,y=y, width=SIDE,height=SIDE) <NEW_LINE> self.avatar.addEventListener('mouseover', self.on_over) <NEW_LINE> self.avatar.addEventListener('mouseout', self.on_out) <NEW_LINE> self.avatar.addEventListener('click', self.on_click) <NEW_LINE> self.house = board <NEW_LINE> <DEDENT> def show(self, x, y): <NEW_LINE> <INDENT> self.avatar.setAttribute("visibility",'hidden') <NEW_LINE> self.avatar.setAttribute('x', x) <NEW_LINE> self.avatar.setAttribute('y', y) <NEW_LINE> self.avatar.setAttribute("visibility",'visible') <NEW_LINE> <DEDENT> def do_markers(self, *a): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _busy(self, *a): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_over(self, ev): <NEW_LINE> <INDENT> self.do_markers(ev) <NEW_LINE> <DEDENT> def _on_over(self, ev): <NEW_LINE> <INDENT> i, j, k = self._ijk <NEW_LINE> self.red.on_over(ev, i, j, k) <NEW_LINE> self.green.on_over(ev, i, j, k) <NEW_LINE> self.blue.on_over(ev, i, j, k) <NEW_LINE> <DEDENT> def next_jig(self): <NEW_LINE> <INDENT> self.board.next_jig() <NEW_LINE> self.next_jig = self._busy <NEW_LINE> <DEDENT> def _next_jig(self): <NEW_LINE> <INDENT> self.board.next_jig() <NEW_LINE> self.next_jig = self._busy <NEW_LINE> <DEDENT> def on_click(self, ev): <NEW_LINE> <INDENT> self.board.drag(self) <NEW_LINE> <DEDENT> def reset(self, x, y): <NEW_LINE> <INDENT> self.house.remove(self) <NEW_LINE> self.house = self.board <NEW_LINE> self.avatar.setAttribute("opacity", 1.0) <NEW_LINE> self.show(x, y) <NEW_LINE> self.do_markers = self._busy <NEW_LINE> self.next_jig = self._next_jig <NEW_LINE> <DEDENT> def place(self, z, y, x, house): <NEW_LINE> <INDENT> self.house.remove(self) <NEW_LINE> self.house = house <NEW_LINE> self.avatar.setAttribute("opacity", 0.4+z*0.3) <NEW_LINE> self._ijk = (z, y, x) <NEW_LINE> OFFX, OFFY = 170-35, 170-35 <NEW_LINE> ax = OFFX+x*100+71*z <NEW_LINE> ay = OFFY+y*100+71*z <NEW_LINE> self.show(ax, ay) <NEW_LINE> self.do_markers = self._on_over <NEW_LINE> self.next_jig() <NEW_LINE> <DEDENT> def on_out(self, ev): <NEW_LINE> <INDENT> self.red.hide() <NEW_LINE> self.green.hide() <NEW_LINE> self.blue.hide()
Represents the user choice when deployed insde the 3D open cube. :ref:`piece`
62598fa6cc0a2c111447aefe
class Conv(nn.Module): <NEW_LINE> <INDENT> def __init__(self, n_in, filter_size, n_out, non_linearity=None, batch_norm=False, weight_norm=False, dropout=0., initialize='glorot_uniform'): <NEW_LINE> <INDENT> super(Conv, self).__init__() <NEW_LINE> self.conv = nn.Conv2d(n_in, n_out, filter_size, padding=int(np.ceil(filter_size/2))) <NEW_LINE> self.bn = None <NEW_LINE> if batch_norm: <NEW_LINE> <INDENT> self.bn = nn.BatchNorm2d(n_out) <NEW_LINE> <DEDENT> if weight_norm: <NEW_LINE> <INDENT> self.conv = nn.utils.weight_norm(self.conv, name='weight') <NEW_LINE> <DEDENT> if non_linearity is None: <NEW_LINE> <INDENT> self.non_linearity = None <NEW_LINE> <DEDENT> elif non_linearity == 'relu': <NEW_LINE> <INDENT> self.non_linearity = nn.ReLU() <NEW_LINE> <DEDENT> elif non_linearity == 'elu': <NEW_LINE> <INDENT> self.non_linearity = nn.ELU() <NEW_LINE> <DEDENT> elif non_linearity == 'selu': <NEW_LINE> <INDENT> self.non_linearity = nn.SELU() <NEW_LINE> <DEDENT> elif non_linearity == 'tanh': <NEW_LINE> <INDENT> self.non_linearity = nn.Tanh() <NEW_LINE> <DEDENT> elif non_linearity == 'sigmoid': <NEW_LINE> <INDENT> self.non_linearity = nn.Sigmoid() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Non-linearity ' + str(non_linearity) + ' not found.') <NEW_LINE> <DEDENT> if dropout > 0.: <NEW_LINE> <INDENT> self.dropout = nn.Dropout2d(dropout) <NEW_LINE> <DEDENT> if initialize == 'normal': <NEW_LINE> <INDENT> init.normal(self.conv.weight) <NEW_LINE> <DEDENT> elif initialize == 'glorot_uniform': <NEW_LINE> <INDENT> init.xavier_uniform(self.conv.weight) <NEW_LINE> <DEDENT> elif initialize == 'glorot_normal': <NEW_LINE> <INDENT> init.xavier_normal(self.conv.weight) <NEW_LINE> <DEDENT> elif initialize == 'kaiming_uniform': <NEW_LINE> <INDENT> init.kaiming_uniform(self.conv.weight) <NEW_LINE> <DEDENT> elif initialize == 'kaiming_normal': <NEW_LINE> <INDENT> init.kaiming_normal(self.conv.weight) <NEW_LINE> <DEDENT> elif initialize == 'orthogonal': <NEW_LINE> <INDENT> init.orthogonal(self.conv.weight) <NEW_LINE> <DEDENT> elif initialize == '': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Parameter initialization ' + str(initialize) + ' not found.') <NEW_LINE> <DEDENT> if batch_norm: <NEW_LINE> <INDENT> init.constant(self.bn.weight, 1.) <NEW_LINE> init.constant(self.bn.bias, 0.) <NEW_LINE> <DEDENT> init.constant(self.conv.bias, 0.) <NEW_LINE> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> output = self.conv(input) <NEW_LINE> if self.bn: <NEW_LINE> <INDENT> output = self.bn(output) <NEW_LINE> <DEDENT> if self.non_linearity: <NEW_LINE> <INDENT> output = self.non_linearity(output) <NEW_LINE> <DEDENT> if self.dropout: <NEW_LINE> <INDENT> output = self.dropout(output) <NEW_LINE> <DEDENT> return output
Basic convolutional layer with optional batch normalization, non-linearity, weight normalization and dropout.
62598fa6a8370b77170f02ca
class StringMock(StringLike): <NEW_LINE> <INDENT> def __init__(self, string): <NEW_LINE> <INDENT> self.string = string <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.text_type(self.string)
A simple mock of built-in strings using the StringLike class.
62598fa6cc0a2c111447aeff
class Dframe(pd.DataFrame): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return super().__getitem__(name) <NEW_LINE> <DEDENT> def __getitem__(self, arg): <NEW_LINE> <INDENT> if not isinstance(arg, tuple) and not len(arg) == 2: <NEW_LINE> <INDENT> raise Exception("Rows and columns must be supplied.") <NEW_LINE> <DEDENT> rows = arg[0] <NEW_LINE> cols = arg[1] <NEW_LINE> print("rows = ", rows) <NEW_LINE> print("cols = ", cols) <NEW_LINE> <DEDENT> @property <NEW_LINE> def nrow(self): <NEW_LINE> <INDENT> return self.shape[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def ncol(self): <NEW_LINE> <INDENT> return self.shape[1]
A data frame that indexes like R. self[] takes two entries, rows and columns.
62598fa6aad79263cf42e6c4
class ProductsStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.GetRecord = channel.unary_unary( '/product.Products/GetRecord', request_serializer=Id.SerializeToString, response_deserializer=Record.FromString, ) <NEW_LINE> self.ListRecords = channel.unary_stream( '/product.Products/ListRecords', request_serializer=AllRecords.SerializeToString, response_deserializer=Record.FromString, )
Interface exported by the server.
62598fa6f548e778e596b494
class RTClientAPIError(Exception): <NEW_LINE> <INDENT> pass
To be raised on RT-related operation/communication failures.
62598fa68c0ade5d55dc3608
class OpenWeather: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.dataset = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def read_openweather_to_xarray(cls, fn_openweather, date_start=None, date_end=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> dataset = cls.read_openweather_data(fn_openweather, date_start, date_end) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise Exception("Problem reading OpenWeather file: " + fn_openweather) <NEW_LINE> <DEDENT> dataset["longitude"] = float(dataset["lat"][0]) <NEW_LINE> dataset["latitude"] = float(dataset["lon"][0]) <NEW_LINE> dataset["site_name"] = str(dataset["city_name"][0]) <NEW_LINE> dataset = dataset.drop_vars(["lon", "lat", "city_name"]) <NEW_LINE> return dataset <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def read_openweather_data(cls, filnam, date_start=None, date_end=None): <NEW_LINE> <INDENT> import datetime <NEW_LINE> dataset = xr.Dataset() <NEW_LINE> custom_date_parser = lambda x: datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S +0000 UTC") <NEW_LINE> data = pd.read_csv(filnam, delimiter=',', parse_dates=['dt_iso'], date_parser=custom_date_parser) <NEW_LINE> data.rename(columns={'dt_iso':'time'}, inplace=True) <NEW_LINE> data.set_index('time', inplace=True) <NEW_LINE> data.drop(columns=['dt', 'timezone', 'temp', 'feels_like', 'temp_min', 'temp_max', 'sea_level', 'grnd_level', 'humidity', 'rain_1h', 'rain_3h', 'snow_1h', 'snow_3h', 'clouds_all', 'weather_id', 'weather_main', 'weather_description', 'weather_icon'], inplace=True) <NEW_LINE> dataset = data.to_xarray() <NEW_LINE> if date_start != None: <NEW_LINE> <INDENT> dataset = dataset.where(dataset.time >= date_start) <NEW_LINE> <DEDENT> if date_end != None: <NEW_LINE> <INDENT> dataset = dataset.where(dataset.time <= date_end) <NEW_LINE> <DEDENT> return dataset
Class to load in an export OpenWeather history file at Hawarden Airport into an xarray dataset.
62598fa6090684286d593653
class GerritAccessor(object): <NEW_LINE> <INDENT> def __init__(self, host): <NEW_LINE> <INDENT> self.host = host <NEW_LINE> self.cache = {} <NEW_LINE> <DEDENT> def _FetchChangeDetail(self, issue): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return gerrit_util.GetChangeDetail( self.host, str(issue), ['ALL_REVISIONS', 'DETAILED_LABELS', 'ALL_COMMITS']) <NEW_LINE> <DEDENT> except gerrit_util.GerritError as e: <NEW_LINE> <INDENT> if e.http_status == 404: <NEW_LINE> <INDENT> raise Exception('Either Gerrit issue %s doesn\'t exist, or ' 'no credentials to fetch issue details' % issue) <NEW_LINE> <DEDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def GetChangeInfo(self, issue): <NEW_LINE> <INDENT> assert issue <NEW_LINE> cache_key = int(issue) <NEW_LINE> if cache_key not in self.cache: <NEW_LINE> <INDENT> self.cache[cache_key] = self._FetchChangeDetail(issue) <NEW_LINE> <DEDENT> return self.cache[cache_key] <NEW_LINE> <DEDENT> def GetChangeDescription(self, issue, patchset=None): <NEW_LINE> <INDENT> info = self.GetChangeInfo(issue) <NEW_LINE> if patchset is not None: <NEW_LINE> <INDENT> for rev, rev_info in info['revisions'].items(): <NEW_LINE> <INDENT> if str(rev_info['_number']) == str(patchset): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('patchset %s doesn\'t exist in issue %s' % ( patchset, issue)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> rev = info['current_revision'] <NEW_LINE> rev_info = info['revisions'][rev] <NEW_LINE> <DEDENT> return rev_info['commit']['message'] <NEW_LINE> <DEDENT> def GetDestRef(self, issue): <NEW_LINE> <INDENT> ref = self.GetChangeInfo(issue)['branch'] <NEW_LINE> if not ref.startswith('refs/'): <NEW_LINE> <INDENT> ref = 'refs/heads/%s' % ref <NEW_LINE> <DEDENT> return ref <NEW_LINE> <DEDENT> def GetChangeOwner(self, issue): <NEW_LINE> <INDENT> return self.GetChangeInfo(issue)['owner']['email'] <NEW_LINE> <DEDENT> def GetChangeReviewers(self, issue, approving_only=True): <NEW_LINE> <INDENT> changeinfo = self.GetChangeInfo(issue) <NEW_LINE> if approving_only: <NEW_LINE> <INDENT> labelinfo = changeinfo.get('labels', {}).get('Code-Review', {}) <NEW_LINE> values = labelinfo.get('values', {}).keys() <NEW_LINE> try: <NEW_LINE> <INDENT> max_value = max(int(v) for v in values) <NEW_LINE> reviewers = [r for r in labelinfo.get('all', []) if r.get('value', 0) == max_value] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> reviewers = [] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> reviewers = changeinfo.get('reviewers', {}).get('REVIEWER', []) <NEW_LINE> <DEDENT> return [r.get('email') for r in reviewers] <NEW_LINE> <DEDENT> def UpdateDescription(self, description, issue): <NEW_LINE> <INDENT> gerrit_util.SetCommitMessage(self.host, issue, description, notify='NONE')
Limited Gerrit functionality for canned presubmit checks to work. To avoid excessive Gerrit calls, caches the results.
62598fa62ae34c7f260aafd1
@inherit_doc <NEW_LINE> class _DecisionTreeClassifierParams(_DecisionTreeParams, _TreeClassifierParams): <NEW_LINE> <INDENT> pass
Params for :py:class:`DecisionTreeClassifier` and :py:class:`DecisionTreeClassificationModel`.
62598fa6bd1bec0571e1503b
class CacheError(Exception): <NEW_LINE> <INDENT> pass
Base exception for all cache related errors.
62598fa645492302aabfc3c1
class InputThetvdbFavorites(object): <NEW_LINE> <INDENT> schema = { 'type': 'object', 'properties': { 'username': {'type': 'string'}, 'account_id': {'type': 'string'}, 'strip_dates': {'type': 'boolean'} }, 'required': ['username', 'account_id'], 'additionalProperties': False } <NEW_LINE> @cached('thetvdb_favorites') <NEW_LINE> @plugin.internet(log) <NEW_LINE> @with_session <NEW_LINE> def on_task_input(self, task, config, session=None): <NEW_LINE> <INDENT> user_favorites = session.query(TVDBUserFavorite).filter(TVDBUserFavorite.username == config['username']).first() <NEW_LINE> if not user_favorites: <NEW_LINE> <INDENT> user_favorites = TVDBUserFavorite(username=config['username']) <NEW_LINE> session.add(user_favorites) <NEW_LINE> <DEDENT> if user_favorites.updated and user_favorites.updated > datetime.now() - timedelta(minutes=10): <NEW_LINE> <INDENT> log.debug('Using cached thetvdb favorite series information for account %s' % config['username']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> req = TVDBRequest(username=config['username'], account_id=config['account_id']).get('user/favorites') <NEW_LINE> user_favorites.series_ids = [int(f_id) for f_id in req['favorites']] <NEW_LINE> <DEDENT> except RequestException as e: <NEW_LINE> <INDENT> log.error('Error retrieving favorites from thetvdb: %s' % str(e)) <NEW_LINE> <DEDENT> user_favorites.updated = datetime.now() <NEW_LINE> <DEDENT> entries = [] <NEW_LINE> for series_id in user_favorites.series_ids: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> series = lookup_series(tvdb_id=series_id) <NEW_LINE> <DEDENT> except LookupError as e: <NEW_LINE> <INDENT> log.error('Error looking up %s from thetvdb: %s' % (series_id, e.args[0])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> series_name = series.name <NEW_LINE> if config.get('strip_dates'): <NEW_LINE> <INDENT> series_name = re.sub(r'\s+\(\d{4}\)$', '', series_name) <NEW_LINE> <DEDENT> entries.append(Entry(series_name, '', tvdb_id=series.id)) <NEW_LINE> <DEDENT> <DEDENT> return entries
Creates a list of entries for your series marked as favorites at thetvdb.com for use in configure_series. Example: configure_series: from: thetvdb_favorites: username: some_username account_id: some_password
62598fa64e4d562566372315
class ON(): <NEW_LINE> <INDENT> def __init__(self, x, y): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.onArg1Dic = {} <NEW_LINE> self.onArg2Dic = {} <NEW_LINE> <DEDENT> def display(self): <NEW_LINE> <INDENT> print(self.x) <NEW_LINE> print(self.y) <NEW_LINE> <DEDENT> def assign_values(self): <NEW_LINE> <INDENT> self.onArg1Dic = {"value": 10, "attribute": "is on top", "placeholder": self.x, "proposition": "on"} <NEW_LINE> self.onArg2Dic = {"value": 1, "attribute": "is on bottom", "placeholder": self.y, "proposition": "on"} <NEW_LINE> return self.onArg1Dic, self.onArg2Dic <NEW_LINE> <DEDENT> def on_test(self): <NEW_LINE> <INDENT> if(self.onArg1Dic["value"] > self.onArg2Dic["value"]): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def on_check(self, toCheckX, toCheckY): <NEW_LINE> <INDENT> if self.x == toCheckX: <NEW_LINE> <INDENT> if self.y == toCheckY: <NEW_LINE> <INDENT> print('both on_check x and y passed') <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('only on_check x passed, but y failed') <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print('both on_check failed') <NEW_LINE> return False
ON Proposition
62598fa6627d3e7fe0e06d9d
class CtdpfCklWfpRecoveredMetadataParticle(CtdpfCklWfpMetadataParticle): <NEW_LINE> <INDENT> _data_particle_type = DataParticleType.RECOVERED_METADATA
Class for the recovered ctdpf_ckl_wfp metadata particle
62598fa663d6d428bbee26a2
@metaclassify(ABCMeta) <NEW_LINE> class NonStringIterable: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def __subclasshook__(cls, C): <NEW_LINE> <INDENT> if cls is NonStringIterable: <NEW_LINE> <INDENT> if (not issubclass(C, (str, bytes)) and issubclass(C, Iterable)): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return NotImplemented
Allows isinstance check for iterable that is not a string
62598fa61f5feb6acb162b12
@admin.register(Hashtag) <NEW_LINE> class HashtagAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> raw_id_fields = ('documents', )
Hashtag admin model
62598fa64428ac0f6e658412
class MessageClientError(Exception): <NEW_LINE> <INDENT> pass
A class for message client exceptions
62598fa6925a0f43d25e7f2e
class SamplerBuilderSimpleFG(SamplerBuilder): <NEW_LINE> <INDENT> def build(self): <NEW_LINE> <INDENT> samplers = list() <NEW_LINE> agg_kernel = calc_avg_kernel(self.patch_size) <NEW_LINE> for orient in self._get_orients(): <NEW_LINE> <INDENT> patches = self._build_patches(orient) <NEW_LINE> w = self._calc_weights(patches, agg_kernel, orient) <NEW_LINE> samplers.append(Sampler(patches, w.weights_flat)) <NEW_LINE> <DEDENT> self._sampler_xy = SamplerCollection(*samplers) <NEW_LINE> self._sampler_z = self._sampler_xy <NEW_LINE> return self <NEW_LINE> <DEDENT> def _calc_weights(self, patches, agg_kernel, orient): <NEW_LINE> <INDENT> calc_mask = CalcHeadMaskSimple(patches) <NEW_LINE> weights = SampleWeights(patches, (calc_mask.fg_mask, )) <NEW_LINE> self._figure_pool.append((orient, calc_mask)) <NEW_LINE> self._figure_pool.append((orient, patches)) <NEW_LINE> self._figure_pool.append((orient, weights)) <NEW_LINE> return weights
Builds a :class:`sssrlib.sample.Sampler` to sample patches in foreground.
62598fa6097d151d1a2c0f18
class BaseError(Exception): <NEW_LINE> <INDENT> pass
Base error class for all things C{codec}.
62598fa616aa5153ce4003f3
class GetFullUser(TLObject): <NEW_LINE> <INDENT> __slots__ = ["id"] <NEW_LINE> ID = 0xca30a5b1 <NEW_LINE> QUALNAME = "functions.users.GetFullUser" <NEW_LINE> def __init__(self, *, id): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(b: BytesIO, *args) -> "GetFullUser": <NEW_LINE> <INDENT> id = TLObject.read(b) <NEW_LINE> return GetFullUser(id=id) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> b = BytesIO() <NEW_LINE> b.write(Int(self.ID, False)) <NEW_LINE> b.write(self.id.write()) <NEW_LINE> return b.getvalue()
Attributes: LAYER: ``112`` Attributes: ID: ``0xca30a5b1`` Parameters: id: Either :obj:`InputUserEmpty <pyrogram.api.types.InputUserEmpty>`, :obj:`InputUserSelf <pyrogram.api.types.InputUserSelf>`, :obj:`InputUser <pyrogram.api.types.InputUser>` or :obj:`InputUserFromMessage <pyrogram.api.types.InputUserFromMessage>` Returns: :obj:`UserFull <pyrogram.api.types.UserFull>`
62598fa68e7ae83300ee8f92
class FetcherInvalidPageError(ValueError): <NEW_LINE> <INDENT> pass
Raised when fetched page is not suitable for further parsing
62598fa632920d7e50bc5f47
class TaskPolicy(object, metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> _schema = {} <NEW_LINE> def before_task_start(self, task): <NEW_LINE> <INDENT> utils.evaluate_object_fields(self, task.get_expression_context()) <NEW_LINE> self._validate() <NEW_LINE> <DEDENT> def after_task_complete(self, task): <NEW_LINE> <INDENT> utils.evaluate_object_fields(self, task.get_expression_context()) <NEW_LINE> self._validate() <NEW_LINE> <DEDENT> def _validate(self): <NEW_LINE> <INDENT> props = inspect_utils.get_public_fields(self) <NEW_LINE> try: <NEW_LINE> <INDENT> jsonschema.validate(props, self._schema) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise exc.InvalidModelException( "Invalid data type in %s: %s. Value(s) can be shown after " "YAQL evaluating. If you use YAQL here, please correct it." % (self.__class__.__name__, str(e)) )
Task policy. Provides interface to perform any work after a task has completed. An example of task policy may be 'retry' policy that makes engine to run a task repeatedly if it finishes with a failure.
62598fa656ac1b37e63020dd
class CoincidenceCoreReconstruction(object): <NEW_LINE> <INDENT> def __init__(self, cluster): <NEW_LINE> <INDENT> self.estimator = CenterMassAlgorithm <NEW_LINE> self.cluster = cluster <NEW_LINE> <DEDENT> def reconstruct_coincidence(self, coincidence, station_numbers=None, initial={}): <NEW_LINE> <INDENT> p, x, y, z = ([], [], [], []) <NEW_LINE> try: <NEW_LINE> <INDENT> self.cluster.set_timestamp(coincidence[0][1]['timestamp']) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return (nan, nan) <NEW_LINE> <DEDENT> for station_number, event in coincidence: <NEW_LINE> <INDENT> if station_numbers is not None: <NEW_LINE> <INDENT> if station_number not in station_numbers: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> station = self.cluster.get_station(station_number) <NEW_LINE> p_station = station_density(event, range(4), station) <NEW_LINE> if not isnan(p_station): <NEW_LINE> <INDENT> sx, sy, sz = station.calc_center_of_mass_coordinates() <NEW_LINE> p.append(p_station) <NEW_LINE> x.append(sx) <NEW_LINE> y.append(sy) <NEW_LINE> z.append(sz) <NEW_LINE> <DEDENT> <DEDENT> if len(p) >= 3: <NEW_LINE> <INDENT> core_x, core_y = self.estimator.reconstruct_common(p, x, y, z, initial) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> core_x, core_y = (nan, nan) <NEW_LINE> <DEDENT> return core_x, core_y <NEW_LINE> <DEDENT> def reconstruct_coincidences(self, coincidences, station_numbers=None, progress=True, initials=[]): <NEW_LINE> <INDENT> coincidences = pbar(coincidences, show=progress) <NEW_LINE> coin_init = izip_longest(coincidences, initials) <NEW_LINE> cores = [self.reconstruct_coincidence(coincidence, station_numbers, initial) for coincidence, initial in coin_init] <NEW_LINE> if len(cores): <NEW_LINE> <INDENT> core_x, core_y = zip(*cores) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> core_x, core_y = ((), ()) <NEW_LINE> <DEDENT> return core_x, core_y
Reconstruct core for coincidences This class is aware of 'coincidences' and 'clusters'. Initialize this class with a 'cluster' and you can reconstruct a coincidence using :meth:`reconstruct_coincidence`. :param cluster: :class:`sapphire.clusters.BaseCluster` object.
62598fa6fff4ab517ebcd6d5
class TrainTestDiff(object): <NEW_LINE> <INDENT> def __init__(self, datasets): <NEW_LINE> <INDENT> self.datasets = datasets <NEW_LINE> <DEDENT> def plot_cont_diff(self, features, kind="box", col_wrap=3, size=4, aspect=1, title=None): <NEW_LINE> <INDENT> return plot_continuous_diff(self.datasets, features, kind, col_wrap, size, aspect, title) <NEW_LINE> <DEDENT> def plot_cat_diff(self, features, col_wrap=3, kind="prop", title=None): <NEW_LINE> <INDENT> return plot_categorical_diff( self.datasets, features, kind=kind, col_wrap=col_wrap, title=title)
Helper class to ease distribution analysis on the same datasets
62598fa6e76e3b2f99fd8927
class CreateCollectionIndexStatement(Statement): <NEW_LINE> <INDENT> def __init__(self, collection, index_name, is_unique): <NEW_LINE> <INDENT> super(CreateCollectionIndexStatement, self).__init__(target=collection) <NEW_LINE> self._index_name = index_name <NEW_LINE> self._is_unique = is_unique <NEW_LINE> self._fields = [] <NEW_LINE> <DEDENT> def field(self, document_path, column_type, is_required): <NEW_LINE> <INDENT> self._fields.append((document_path, column_type, is_required,)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> fields = [item for sublist in self._fields for item in sublist] <NEW_LINE> return self._connection.execute_nonquery( "xplugin", "create_collection_index", True, self._target.schema.name, self._target.name, self._index_name, self._is_unique, *fields)
A statement that creates an index on a collection. Args: collection (mysqlx.Collection): Collection. index_name (string): Index name. is_unique (bool): `True` if the index is unique.
62598fa6009cb60464d01410
class RubricList(AACOnlyMixin,ListView): <NEW_LINE> <INDENT> model = Rubric <NEW_LINE> template_name = "makeReports/Rubric/rubricList.html" <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return Rubric.objects.order_by("-date")
View to list rubrics in reverse chronological order
62598fa6f548e778e596b495
class TokenSequence (ListReplacing, AbstractToken): <NEW_LINE> <INDENT> def __call__(self, parser, origCursor): <NEW_LINE> <INDENT> o = [] <NEW_LINE> only = False <NEW_LINE> onlyVal = None <NEW_LINE> for g in self.desc: <NEW_LINE> <INDENT> if g is Whitespace: <NEW_LINE> <INDENT> parser.skip(parser.whitespace) <NEW_LINE> <DEDENT> r = parser.scan(g) <NEW_LINE> if parser.last is None: <NEW_LINE> <INDENT> raise NotMatched <NEW_LINE> <DEDENT> if isinstance(g, Only): <NEW_LINE> <INDENT> only = True <NEW_LINE> onlyVal = r <NEW_LINE> continue <NEW_LINE> <DEDENT> if not isinstance(g, (Skip, Omit, _Whitespace)) and not only: <NEW_LINE> <INDENT> o.append(r) <NEW_LINE> <DEDENT> <DEDENT> if only: <NEW_LINE> <INDENT> return onlyVal <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return o <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self in rstack: <NEW_LINE> <INDENT> return '...' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rstack.append(self) <NEW_LINE> d = '(' + ' + '.join([repr(t) for t in self.desc]) + ')' <NEW_LINE> rstack.pop() <NEW_LINE> return d <NEW_LINE> <DEDENT> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> if hasattr(other, '__iter__'): <NEW_LINE> <INDENT> return TokenSequence(self.desc + list(other)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return TokenSequence(self.desc + [other]) <NEW_LINE> <DEDENT> <DEDENT> def __sub__(self, other): <NEW_LINE> <INDENT> if hasattr(other, '__iter__'): <NEW_LINE> <INDENT> return TokenSequence(self.desc + [Whitespace] + list(other)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return TokenSequence(self.desc + [Whitespace, other]) <NEW_LINE> <DEDENT> <DEDENT> def __iadd__(self, other): <NEW_LINE> <INDENT> if hasattr(other, '__iter__'): <NEW_LINE> <INDENT> self.desc += list(other) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.desc.append(other) <NEW_LINE> <DEDENT> return self
A class whose instances match a sequence of tokens. Returns a corresponding list of return values from L{ZestyParser.scan}. Some special types, L{Skip}, L{Omit}, and L{Only}, are allowed in the sequence. These are wrappers for other token objects adding special behaviours. If it encounters a L{Skip} token, it will process it with L{ZestyParser.skip}, ignore whether it matched, and not include it in the list. If it encounters a L{Omit} token, it will still require that it match (the default behaviour), but it will not be included in the list. If the sequence contains an L{Only} token, its result will be returned instead of the usual list, though it still requires that subsequent tokens match. Multiple L{Only} tokens are meaningless and L{TokenSequence}'s behavior in that case is undefined. @ivar desc: A list of token objects. @type desc: list
62598fa6baa26c4b54d4f1a1
class RollbackRequest(proto.Message): <NEW_LINE> <INDENT> database = proto.Field(proto.STRING, number=1) <NEW_LINE> transaction = proto.Field(proto.BYTES, number=2)
The request for [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. Attributes: database (str): Required. The database name. In the format: ``projects/{project_id}/databases/{database_id}``. transaction (bytes): Required. The transaction to roll back.
62598fa64428ac0f6e658413
class DirectoryFetcher(Iterable[Story], Sized, Fetcher): <NEW_LINE> <INDENT> prefetch_meta = False <NEW_LINE> prefetch_data = False <NEW_LINE> def __init__( self, meta_path: Union[Path, str] = None, data_path: Union[Path, str] = None, flavors: Iterable[Flavor] = tuple(), ) -> None: <NEW_LINE> <INDENT> self.meta_path = get_path(meta_path) <NEW_LINE> self.data_path = get_path(data_path) <NEW_LINE> self.length: Optional[int] = None <NEW_LINE> self.flavors = frozenset(flavors) <NEW_LINE> <DEDENT> def iter_path_keys(self, path: Optional[Path]) -> Iterator[int]: <NEW_LINE> <INDENT> if path is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not path.is_dir(): <NEW_LINE> <INDENT> raise StorySourceError(f"Path is not a directory: {path}") <NEW_LINE> <DEDENT> for item in Path(path).iterdir(): <NEW_LINE> <INDENT> if not item.is_file(): <NEW_LINE> <INDENT> raise StorySourceError(f"Path is not a file: {item}") <NEW_LINE> <DEDENT> if not item.name.isdigit(): <NEW_LINE> <INDENT> raise StorySourceError(f"Name is not a digit: {item}") <NEW_LINE> <DEDENT> yield int(item.name) <NEW_LINE> <DEDENT> <DEDENT> def list_keys(self) -> Set[int]: <NEW_LINE> <INDENT> meta_keys = self.iter_path_keys(self.meta_path) <NEW_LINE> data_keys = self.iter_path_keys(self.data_path) <NEW_LINE> return set(chain(meta_keys, data_keys)) <NEW_LINE> <DEDENT> def __len__(self) -> int: <NEW_LINE> <INDENT> if self.length is None: <NEW_LINE> <INDENT> self.length = len(self.list_keys()) <NEW_LINE> <DEDENT> return self.length <NEW_LINE> <DEDENT> def __iter__(self) -> Iterator[Story]: <NEW_LINE> <INDENT> for key in sorted(self.list_keys()): <NEW_LINE> <INDENT> yield self.fetch(key) <NEW_LINE> <DEDENT> <DEDENT> def read_file(self, path: Path) -> bytes: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return path.read_bytes() <NEW_LINE> <DEDENT> except FileNotFoundError as e: <NEW_LINE> <INDENT> raise InvalidStoryError("File does not exist.") from e <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise StorySourceError("Unable to read file.") from e <NEW_LINE> <DEDENT> <DEDENT> def fetch_data(self, key: int) -> bytes: <NEW_LINE> <INDENT> if self.data_path is None: <NEW_LINE> <INDENT> raise StorySourceError("Data path is undefined.") <NEW_LINE> <DEDENT> path = self.data_path / str(key) <NEW_LINE> raw = self.read_file(path) <NEW_LINE> return raw <NEW_LINE> <DEDENT> def fetch_meta(self, key: int) -> Dict[str, Any]: <NEW_LINE> <INDENT> if self.meta_path is None: <NEW_LINE> <INDENT> raise StorySourceError("Meta path is undefined.") <NEW_LINE> <DEDENT> path = self.meta_path / str(key) <NEW_LINE> raw = self.read_file(path) <NEW_LINE> return json.loads(raw.decode())
Fetches stories from file system.
62598fa6090684286d593654
class TokenVarExtractor(object): <NEW_LINE> <INDENT> def __init__(self, token): <NEW_LINE> <INDENT> self.token_content = token.split_contents() <NEW_LINE> self.tag_name = self.token_content.pop(0) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def split(item): <NEW_LINE> <INDENT> key, sep, value = item.rpartition('=') <NEW_LINE> if key: <NEW_LINE> <INDENT> key = key.strip() <NEW_LINE> if re.match('\w+', key): <NEW_LINE> <INDENT> return key, value.strip() <NEW_LINE> <DEDENT> <DEDENT> return None, item <NEW_LINE> <DEDENT> def has_more(self): <NEW_LINE> <INDENT> return bool(self.token_content) <NEW_LINE> <DEDENT> def pop(self, key=None): <NEW_LINE> <INDENT> if not self.token_content: <NEW_LINE> <INDENT> raise template.TemplateSyntaxError( u'Template tag argument is missing: {0}'.format(key)) <NEW_LINE> <DEDENT> if key: <NEW_LINE> <INDENT> for item in self.token_content: <NEW_LINE> <INDENT> ret_key, ret_value = self.split(item) <NEW_LINE> if ret_key == key: <NEW_LINE> <INDENT> self.token_content.remove(item) <NEW_LINE> return ret_value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> item = self.token_content.pop(0) <NEW_LINE> ret_key, ret_value = self.split(item) <NEW_LINE> if ret_key: <NEW_LINE> <INDENT> if key: <NEW_LINE> <INDENT> raise template.TemplateSyntaxError( u'Keyword argument not found: {0}'.format(key)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise template.TemplateSyntaxError( u'Positional argument expected; keyword found: {0}' .format(item)) <NEW_LINE> <DEDENT> <DEDENT> return item <NEW_LINE> <DEDENT> def kwargs(self): <NEW_LINE> <INDENT> ret = {} <NEW_LINE> for item in self.token_content: <NEW_LINE> <INDENT> key, value = self.split(item) <NEW_LINE> if not key: <NEW_LINE> <INDENT> raise template.TemplateSyntaxError( 'Unexpected positional argument: {0}'.format(item)) <NEW_LINE> <DEDENT> ret[key] = value <NEW_LINE> <DEDENT> return ret
Extracts variables from split content of the token. Used to extract both positional and keyword arguments. :param token: The token object, passed to template function
62598fa663d6d428bbee26a3
class SessionStateSuccessSchema(BaseSchema): <NEW_LINE> <INDENT> level = fields.Int( required=True, description="User level", example=0, ) <NEW_LINE> picture_uri = fields.Str( required=False, description="User picture", example="https://c8.patreon.com/2/200/561356054", ) <NEW_LINE> name = fields.Str( required=False, description="User name", example="ExampleUsername123", ) <NEW_LINE> email = fields.Str( required=False, description="User email", example="example@gmail.com", ) <NEW_LINE> created = fields.Number( required=False, description="Session time created", example=1627793093.676484, ) <NEW_LINE> uuid = fields.Str( required=True, description="Installation uuid", example="b429fcc7-9ce1-bcb3-2b8a-b094747f226e", )
Schema for returning session data
62598fa66aa9bd52df0d4dbb
class FoilPresenterImport(SongImport): <NEW_LINE> <INDENT> def __init__(self, manager, **kwargs): <NEW_LINE> <INDENT> log.debug('initialise FoilPresenterImport') <NEW_LINE> SongImport.__init__(self, manager, **kwargs) <NEW_LINE> self.FoilPresenter = FoilPresenter(self.manager, self) <NEW_LINE> <DEDENT> def doImport(self): <NEW_LINE> <INDENT> self.import_wizard.progress_bar.setMaximum(len(self.import_source)) <NEW_LINE> parser = etree.XMLParser(remove_blank_text=True) <NEW_LINE> for file_path in self.import_source: <NEW_LINE> <INDENT> if self.stop_import_flag: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.import_wizard.increment_progress_bar(WizardStrings.ImportingType % os.path.basename(file_path)) <NEW_LINE> try: <NEW_LINE> <INDENT> parsed_file = etree.parse(file_path, parser) <NEW_LINE> xml = etree.tostring(parsed_file).decode() <NEW_LINE> self.FoilPresenter.xml_to_song(xml) <NEW_LINE> <DEDENT> except etree.XMLSyntaxError: <NEW_LINE> <INDENT> self.logError(file_path, SongStrings.XMLSyntaxError) <NEW_LINE> log.exception('XML syntax error in file %s' % file_path)
This provides the Foilpresenter import.
62598fa6d486a94d0ba2bec0
class APIClientMetaclass(type): <NEW_LINE> <INDENT> config_class = ConfigClass <NEW_LINE> @staticmethod <NEW_LINE> def add_methods_for_endpoint(methods, name, endpoint, config): <NEW_LINE> <INDENT> methods[name] = classmethod(method_factory(endpoint, '_make_request')) <NEW_LINE> <DEDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> base_config = get_base_attr('Config', bases, attrs) <NEW_LINE> attrs['_config'] = config = cls.config_class(base_config) <NEW_LINE> methods = {} <NEW_LINE> attr_list = list(attrs.items()) <NEW_LINE> for name, endpoint in attr_list: <NEW_LINE> <INDENT> if not isinstance(endpoint, APIEndpoint): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> cls.add_methods_for_endpoint(methods, name, endpoint, config) <NEW_LINE> del attrs[name] <NEW_LINE> <DEDENT> attrs.update(methods) <NEW_LINE> return type.__new__(cls, name, bases, attrs)
Makes API call methods from APIEndpoint definitions on the APIClient class (this 'metaclass magic' is similar to Django Model class where fields are defined on the class, and transformed by the metaclass into usable attrs) eg class ThingServiceClient(APIClient): class Config: base_url = 'http://things.depop.com' get_things = GetEndpoint( '/things/{user_id}/', # url format string (('type', True),), # required querystring param (validated on call) ) Results in a client method you can call like: data = MyAPI.get_things(user_id=2345, type='cat') Which will perform a request like: GET http://things.depop.com/things/2345/?type=cat If response was "Content-Type: application/json" then `data` is already deserialized. We use `raise_for_status` so anything >= 400 will raise a `requests.HTTPError`.
62598fa62ae34c7f260aafd3
class PackageViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> queryset = Package.objects.all().order_by("-id") <NEW_LINE> serializer_class = PackageSerializer <NEW_LINE> paginate_by = 20
API endpoint that allows packages to be viewed or edited.
62598fa6eab8aa0e5d30bc7c
class DiscreteModule(torch.nn.Module): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self, probs=None, logits=None): <NEW_LINE> <INDENT> super(DiscreteModule, self).__init__() <NEW_LINE> if probs is None and logits is None: <NEW_LINE> <INDENT> raise ValueError("Expectingt the given 'probs' xor 'logits' to be different than None.") <NEW_LINE> <DEDENT> if probs is not None and logits is not None: <NEW_LINE> <INDENT> raise ValueError("Expecting the given 'probs' xor 'logits' to be None.") <NEW_LINE> <DEDENT> if probs is not None: <NEW_LINE> <INDENT> if not isinstance(probs, torch.nn.Module): <NEW_LINE> <INDENT> raise TypeError("Expecting the probs to be an instance of `torch.nn.Module`, instead got: " "{}".format(type(probs))) <NEW_LINE> <DEDENT> self._probs = probs <NEW_LINE> self._logits = lambda x: None <NEW_LINE> <DEDENT> if logits is not None: <NEW_LINE> <INDENT> if not isinstance(logits, torch.nn.Module): <NEW_LINE> <INDENT> raise TypeError("Expecting the logits to be an instance of `torch.nn.Module`, instead got: " "{}".format(type(logits))) <NEW_LINE> <DEDENT> self._logits = logits <NEW_LINE> self._probs = lambda x: None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def logits(self): <NEW_LINE> <INDENT> return self._logits <NEW_LINE> <DEDENT> @property <NEW_LINE> def probs(self): <NEW_LINE> <INDENT> return self._probs
Discrete probability module. Discrete probability module from which several discrete probability distributions (such as Bernoulli, Categorical, and others) inherit from.
62598fa6be8e80087fbbef54
class TestGetInboundEmailEventsByUuid(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testGetInboundEmailEventsByUuid(self): <NEW_LINE> <INDENT> pass
GetInboundEmailEventsByUuid unit test stubs
62598fa64428ac0f6e658414
class News(BaseModel, db.Model): <NEW_LINE> <INDENT> __tablename__ = "info_news" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> title = db.Column(db.String(256), nullable=False) <NEW_LINE> source = db.Column(db.String(64), nullable=False) <NEW_LINE> digest = db.Column(db.String(512), nullable=False) <NEW_LINE> content = db.Column(db.Text, nullable=False) <NEW_LINE> clicks = db.Column(db.Integer, default=0) <NEW_LINE> index_image_url = db.Column(db.String(256)) <NEW_LINE> category_id = db.Column(db.Integer, db.ForeignKey("info_category.id")) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey("info_user.id")) <NEW_LINE> status = db.Column(db.Integer, default=0) <NEW_LINE> reason = db.Column(db.String(256)) <NEW_LINE> comments = db.relationship("Comment", lazy="dynamic") <NEW_LINE> def to_review_dict(self): <NEW_LINE> <INDENT> resp_dict = { "id": self.id, "title": self.title, "create_time": self.create_time.strftime("%Y-%m-%d %H:%M:%S"), "status": self.status, "reason": self.reason if self.reason else "" } <NEW_LINE> return resp_dict <NEW_LINE> <DEDENT> def to_basic_dict(self): <NEW_LINE> <INDENT> resp_dict = { "id": self.id, "title": self.title, "source": self.source, "digest": self.digest, "create_time": self.create_time.strftime("%Y-%m-%d %H:%M:%S"), "index_image_url": self.index_image_url, "clicks": self.clicks, } <NEW_LINE> return resp_dict <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> resp_dict = { "id": self.id, "title": self.title, "source": self.source, "digest": self.digest, "create_time": self.create_time.strftime("%Y-%m-%d %H:%M:%S"), "content": self.content, "comments_count": self.comments.count(), "clicks": self.clicks, "category": self.category.to_dict(), "index_image_url": self.index_image_url, "author": self.user.to_dict() if self.user else None } <NEW_LINE> return resp_dict
新闻
62598fa601c39578d7f12c72
class WebcastEncoder(models.Model): <NEW_LINE> <INDENT> name = models.CharField( max_length = 100, help_text = "Enter the stream type name", ) <NEW_LINE> description = models.CharField( max_length = 200, help_text = "Enter a short description", ) <NEW_LINE> vendor = models.ForeignKey( HardwareVendor, ) <NEW_LINE> ip_address = models.CharField( max_length = 12, help_text = "ex. 192.168.0.1", blank = True, null = True, ) <NEW_LINE> active = models.BooleanField( default=True, ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Model representing a stream type
62598fa64f88993c371f0483
class Translate: <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> <DEDENT> @commands.command() <NEW_LINE> async def sweet(self, input): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> priceUSD = result['data']['prices'][1]['price'] <NEW_LINE> await self.bot.say("BTC PRICE " + priceUSD) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> await self.bot.say("Error.")
Translate to Sweetish for trigger
62598fa6925a0f43d25e7f30
class State: <NEW_LINE> <INDENT> def __init__(self, jug_1: int, jug_2: int): <NEW_LINE> <INDENT> self._jug_1 = jug_1 <NEW_LINE> self._jug_2 = jug_2 <NEW_LINE> <DEDENT> def __eq__(self, other: object): <NEW_LINE> <INDENT> return self._jug_1 == other._jug_1 and self._jug_2 == other._jug_2 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '(' + str(self._jug_1) + ', ' + str(self._jug_2) + ')' <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> return State(self._jug_1, self._jug_2) <NEW_LINE> <DEDENT> def fill_jug_1(self): <NEW_LINE> <INDENT> self._jug_1 = JUG_1_MAX <NEW_LINE> return self <NEW_LINE> <DEDENT> def fill_jug_2(self): <NEW_LINE> <INDENT> self._jug_2 = JUG_2_MAX <NEW_LINE> return self <NEW_LINE> <DEDENT> def empty_jug_1(self): <NEW_LINE> <INDENT> self._jug_1 = 0 <NEW_LINE> return self <NEW_LINE> <DEDENT> def empty_jug_2(self): <NEW_LINE> <INDENT> self._jug_2 = 0 <NEW_LINE> return self <NEW_LINE> <DEDENT> def pour_jug_1_to_jug_2(self): <NEW_LINE> <INDENT> while self._jug_1 > 0 and self._jug_2 < 3: <NEW_LINE> <INDENT> self._jug_1 -=1 <NEW_LINE> self._jug_2 +=1 <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def pour_jug_2_to_jug_1(self): <NEW_LINE> <INDENT> while self._jug_1 < 5 and self._jug_2 > 0: <NEW_LINE> <INDENT> self._jug_1 +=1 <NEW_LINE> self._jug_2 -=1 <NEW_LINE> <DEDENT> return self
State of the jugs
62598fa68e7ae83300ee8f94
class Calculator: <NEW_LINE> <INDENT> def __init__(self, a, b=25): <NEW_LINE> <INDENT> self.a = a <NEW_LINE> self.b = b <NEW_LINE> <DEDENT> def addition(self): <NEW_LINE> <INDENT> return self.a + self.b <NEW_LINE> <DEDENT> def subtraction(self): <NEW_LINE> <INDENT> return self.a - self.b <NEW_LINE> <DEDENT> def multiplication(self): <NEW_LINE> <INDENT> return self.a * self.b <NEW_LINE> <DEDENT> def division(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.a / self.b <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> return "It is impossible to divide by zero."
Do addition, subtraction, multiplication and division.
62598fa6d7e4931a7ef3bf8d
class IdentAlloc(object): <NEW_LINE> <INDENT> def __init__(self, idrange): <NEW_LINE> <INDENT> self.__used = [] <NEW_LINE> self.__free = [x for x in range(idrange)] <NEW_LINE> <DEDENT> def free(self, oldid): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> used_index = self.__used.index(oldid) <NEW_LINE> self.__free.append(oldid) <NEW_LINE> del self.__used[used_index] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> if len(self.__free) < 1: <NEW_LINE> <INDENT> raise IdentFetchError("no more ID's in range") <NEW_LINE> <DEDENT> newid = self.__free.pop(0) <NEW_LINE> self.__used.append(newid) <NEW_LINE> return newid
Manage unique identity numbers in range
62598fa6cb5e8a47e493c0f1
class PlugnhackAPI(object): <NEW_LINE> <INDENT> def __init__(self,core): <NEW_LINE> <INDENT> self.Core = core <NEW_LINE> self.action_monitor = "monitor" <NEW_LINE> self.action_start_monitoring = "startMonitoring" <NEW_LINE> self.action_stop_monitoring = "stopMonitoring" <NEW_LINE> self.action_oracle = "oracle" <NEW_LINE> self.param_message = "message" <NEW_LINE> self.param_id = "id" <NEW_LINE> self.param_url = "url" <NEW_LINE> self.extension = plugnhack_extension.PlugnhackExtension() <NEW_LINE> <DEDENT> def handle_api_action(self, command_name, message): <NEW_LINE> <INDENT> response = tuple() <NEW_LINE> if command_name == self.action_monitor: <NEW_LINE> <INDENT> monitor_message = message.get(self.param_message) <NEW_LINE> page_id = message.get(self.param_id) <NEW_LINE> json_msg = json.loads(monitor_message) <NEW_LINE> resp = self.extension.message_received(client_message.ClientMessage(page_id,json_msg)) <NEW_LINE> if resp is not None: <NEW_LINE> <INDENT> response = resp <NEW_LINE> <DEDENT> <DEDENT> elif command_name == self.action_oracle: <NEW_LINE> <INDENT> self.extension.oracle_invoked(message.get(self.param_id)) <NEW_LINE> <DEDENT> elif command_name == self.action_start_monitoring: <NEW_LINE> <INDENT> monitor_url = message.get(self.param_url) <NEW_LINE> try: <NEW_LINE> <INDENT> page_id = self.extension.start_monitoring(monitor_url) <NEW_LINE> response = self.param_id, page_id <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.Core.write_event("Illegal url\n", 'a'); <NEW_LINE> <DEDENT> <DEDENT> elif command_name == self.action_stop_monitoring: <NEW_LINE> <INDENT> page_id = message.get(self.param_id) <NEW_LINE> self.extension.stop_monitoring(page_id) <NEW_LINE> <DEDENT> return response
PlugnhackAPI handles commands from user.
62598fa676e4537e8c3ef49f
class JRNLImporter(object): <NEW_LINE> <INDENT> names = ["jrnl"] <NEW_LINE> @staticmethod <NEW_LINE> def import_(journal, input=None): <NEW_LINE> <INDENT> old_cnt = len(journal.entries) <NEW_LINE> old_entries = journal.entries <NEW_LINE> if input: <NEW_LINE> <INDENT> with codecs.open(input, "r", "utf-8") as f: <NEW_LINE> <INDENT> other_journal_txt = f.read() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> other_journal_txt = util.py23_read() <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> util.prompt("[Entries NOT imported into journal.]") <NEW_LINE> sys.exit(0) <NEW_LINE> <DEDENT> <DEDENT> journal.import_(other_journal_txt) <NEW_LINE> new_cnt = len(journal.entries) <NEW_LINE> util.prompt("[{0} imported to {1} journal]".format(new_cnt - old_cnt, journal.name)) <NEW_LINE> journal.write()
This plugin imports entries from other jrnl files.
62598fa65fdd1c0f98e5de8a
class SystemData(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'created_by': {'key': 'createdBy', 'type': 'str'}, 'created_by_type': {'key': 'createdByType', 'type': 'str'}, 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(SystemData, self).__init__(**kwargs) <NEW_LINE> self.created_by = kwargs.get('created_by', None) <NEW_LINE> self.created_by_type = kwargs.get('created_by_type', None) <NEW_LINE> self.created_at = kwargs.get('created_at', None) <NEW_LINE> self.last_modified_by = kwargs.get('last_modified_by', None) <NEW_LINE> self.last_modified_by_type = kwargs.get('last_modified_by_type', None) <NEW_LINE> self.last_modified_at = kwargs.get('last_modified_at', None)
Metadata pertaining to creation and last modification of the resource. :param created_by: The identity that created the resource. :type created_by: str :param created_by_type: The type of identity that created the resource. Possible values include: "User", "Application", "ManagedIdentity", "Key". :type created_by_type: str or ~storage_cache_management_client.models.CreatedByType :param created_at: The timestamp of resource creation (UTC). :type created_at: ~datetime.datetime :param last_modified_by: The identity that last modified the resource. :type last_modified_by: str :param last_modified_by_type: The type of identity that last modified the resource. Possible values include: "User", "Application", "ManagedIdentity", "Key". :type last_modified_by_type: str or ~storage_cache_management_client.models.CreatedByType :param last_modified_at: The timestamp of resource last modification (UTC). :type last_modified_at: ~datetime.datetime
62598fa660cbc95b0636423f
class PluginAdapterV3(PluginAdapterV2): <NEW_LINE> <INDENT> node_roles_config_name = 'node_roles.yaml' <NEW_LINE> volumes_config_name = 'volumes.yaml' <NEW_LINE> deployment_tasks_config_name = 'deployment_tasks.yaml' <NEW_LINE> network_roles_config_name = 'network_roles.yaml' <NEW_LINE> def sync_metadata_to_db(self): <NEW_LINE> <INDENT> super(PluginAdapterV3, self).sync_metadata_to_db() <NEW_LINE> data_to_update = {} <NEW_LINE> db_config_metadata_mapping = { 'attributes_metadata': self.environment_config_name, 'roles_metadata': self.node_roles_config_name, 'volumes_metadata': self.volumes_config_name, 'network_roles_metadata': self.network_roles_config_name, 'deployment_tasks': self.deployment_tasks_config_name, 'tasks': self.task_config_name } <NEW_LINE> for attribute, config in six.iteritems(db_config_metadata_mapping): <NEW_LINE> <INDENT> config_file_path = os.path.join(self.plugin_path, config) <NEW_LINE> attribute_data = self._load_config(config_file_path) <NEW_LINE> if attribute_data: <NEW_LINE> <INDENT> data_to_update[attribute] = attribute_data <NEW_LINE> <DEDENT> <DEDENT> Plugin.update(self.plugin, data_to_update)
Plugin wrapper class for package version >= 3.0.0
62598fa65166f23b2e2432ca
class UnitMovable(UnitBird): <NEW_LINE> <INDENT> name_struct = "unit_movable" <NEW_LINE> name_struct_file = "unit" <NEW_LINE> struct_description = "adds attack and armor properties to units." <NEW_LINE> data_format = ( (dataformat.READ_EXPORT, None, dataformat.IncludeMembers(cls=UnitBird)), (dataformat.READ, "default_armor", "int16_t"), (dataformat.READ, "attack_count", "uint16_t"), (dataformat.READ, "attacks", dataformat.SubdataMember(ref_type=HitType, length="attack_count")), (dataformat.READ, "armor_count", "uint16_t"), (dataformat.READ, "armors", dataformat.SubdataMember(ref_type=HitType, length="armor_count")), (dataformat.READ_EXPORT, "interaction_type", dataformat.EnumLookupMember( raw_type = "int16_t", type_name = "interaction_types", lookup_dict = { -1: "UNIT", 4: "BUILDING", 6: "DOCK", 10: "WALL", }, )), (dataformat.READ, "max_range", "float"), (dataformat.READ, "blast_radius", "float"), (dataformat.READ, "reload_time0", "float"), (dataformat.READ, "projectile_unit_id", "int16_t"), (dataformat.READ, "accuracy_percent", "int16_t"), (dataformat.READ, "tower_mode", "int8_t"), (dataformat.READ, "delay", "int16_t"), (dataformat.READ, "projectile_graphics_displacement_lr", "float"), (dataformat.READ, "projectile_graphics_displacement_distance", "float"), (dataformat.READ, "projectile_graphics_displacement_height", "float"), (dataformat.READ_EXPORT, "blast_level", dataformat.EnumLookupMember( raw_type = "int8_t", type_name = "range_damage_type", lookup_dict = { 0: "RESSOURCES", 1: "TREES", 2: "NEARBY_UNITS", 3: "TARGET_ONLY", }, )), (dataformat.READ, "min_range", "float"), (dataformat.READ, "garrison_recovery_rate", "float"), (dataformat.READ_EXPORT, "attack_graphic", "int16_t"), (dataformat.READ, "melee_armor_displayed", "int16_t"), (dataformat.READ, "attack_displayed", "int16_t"), (dataformat.READ, "range_displayed", "float"), (dataformat.READ, "reload_time1", "float"), ) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__()
type_id >= 60
62598fa63617ad0b5ee06046
class AuthTool(cp.Tool): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> cp.Tool.__init__(self, 'before_handler', self._authenticate) <NEW_LINE> <DEDENT> def _authenticate(self): <NEW_LINE> <INDENT> if not self.get_user(): <NEW_LINE> <INDENT> raise cp.HTTPError(401, 'Unauthorized') <NEW_LINE> <DEDENT> <DEDENT> def sign_in(self, email, password): <NEW_LINE> <INDENT> r = None, None <NEW_LINE> user = UserProvider.get_by_email(cp.request.db, email) <NEW_LINE> if user and user.enabled and AuthTool.match_password(password, user.password): <NEW_LINE> <INDENT> token = AuthTool.generate_token() <NEW_LINE> cp.engine.publish('auth-sign_in', token, user) <NEW_LINE> r = token, user <NEW_LINE> <DEDENT> return r <NEW_LINE> <DEDENT> def sign_out(self): <NEW_LINE> <INDENT> token = self.get_token() <NEW_LINE> cp.engine.publish('auth-sign_out', token) <NEW_LINE> <DEDENT> def get_token(self): <NEW_LINE> <INDENT> r = None <NEW_LINE> if hasattr(cp.request, 'json'): <NEW_LINE> <INDENT> r = cp.request.json['token'] <NEW_LINE> <DEDENT> return r <NEW_LINE> <DEDENT> def get_user(self): <NEW_LINE> <INDENT> token = self.get_token() <NEW_LINE> user = cp.engine.publish('auth-get_user', token).pop() <NEW_LINE> return user <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def encode_password(password): <NEW_LINE> <INDENT> password = str.encode(password, encoding='UTF-8') <NEW_LINE> password_hash = bcrypt.hashpw(password, bcrypt.gensalt()) <NEW_LINE> return password_hash.decode('utf-8') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def match_password(attempt, password): <NEW_LINE> <INDENT> attempt = str.encode(attempt, encoding='UTF-8') <NEW_LINE> password = str.encode(password, encoding='UTF-8') <NEW_LINE> return bcrypt.checkpw(attempt, password) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def generate_token(): <NEW_LINE> <INDENT> return str(uuid.uuid4())
Auth tool for JSON requests. User have to give us a token each time he ask for data.
62598fa691af0d3eaad39d02
class Radius(Packet): <NEW_LINE> <INDENT> name = "RADIUS" <NEW_LINE> fields_desc = [ ByteEnumField("code", 1, _packet_codes), ByteField("id", 0), FieldLenField( "len", None, "attributes", "H", adjust=lambda pkt, x: len(pkt.attributes) + 20 ), XStrFixedLenField("authenticator", "", 16), _RADIUSAttrPacketListField( "attributes", [], RadiusAttribute, length_from=lambda pkt: pkt.len - 20 ) ] <NEW_LINE> def compute_authenticator(self, packed_request_auth, shared_secret): <NEW_LINE> <INDENT> data = prepare_packed_data(self, packed_request_auth) <NEW_LINE> radius_mac = hashlib.md5(data + shared_secret) <NEW_LINE> return radius_mac.digest() <NEW_LINE> <DEDENT> def post_build(self, p, pay): <NEW_LINE> <INDENT> p += pay <NEW_LINE> length = self.len <NEW_LINE> if length is None: <NEW_LINE> <INDENT> length = len(p) <NEW_LINE> p = p[:2] + struct.pack("!H", length) + p[4:] <NEW_LINE> <DEDENT> return p
Implements a RADIUS packet (RFC 2865).
62598fa64428ac0f6e658415
class ClusterView(MapView): <NEW_LINE> <INDENT> template_name = 'map/map_clusters.html' <NEW_LINE> def format_centroids(self, centroids, num_points, sizes): <NEW_LINE> <INDENT> centroid_data_dict = {"type": "FeatureCollection", "features": []} <NEW_LINE> for centroid, num, size in zip(centroids, num_points, sizes): <NEW_LINE> <INDENT> centroid_dict = {"type": "Feature", "geometry": {"type": "Point", "coordinates": [str(centroid[1]), str(centroid[0])]}, "properties": {"size": str(size), "numPoints": str(num)}, } <NEW_LINE> centroid_data_dict["features"].append(centroid_dict) <NEW_LINE> <DEDENT> return centroid_data_dict <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> data_dict = super(MapView, self).get_context_data(**kwargs) <NEW_LINE> request_date = self.get_date(**kwargs) <NEW_LINE> points = self.get_queryset(request_date) <NEW_LINE> if len(points) == 0: <NEW_LINE> <INDENT> point_clusters, centroids = [], [] <NEW_LINE> num_points, sizes = [], [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> DBSCAN = DBSCANClustering(points) <NEW_LINE> centroids, num_points, sizes = DBSCAN.compute_clusters() <NEW_LINE> print(centroids) <NEW_LINE> print(num_points) <NEW_LINE> print(sizes) <NEW_LINE> <DEDENT> data_dict["date"] = {"day": request_date.day, "month": request_date.month, "year": request_date.year} <NEW_LINE> data_dict["mode"] = "cluster" <NEW_LINE> data_dict["centroids"] = self.format_centroids(centroids, num_points, sizes) <NEW_LINE> return data_dict
Display points with cluster colors, and centroids
62598fa64e4d562566372318
class PartTrackingList(generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = PartTrackingInfo.objects.all() <NEW_LINE> serializer_class = PartTrackingInfoSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticatedOrReadOnly,) <NEW_LINE> filter_backends = (DjangoFilterBackend,) <NEW_LINE> filter_class = PartTrackingFilter
get: Return a list of all PartTrackingInfo objects (with optional query filter) post: Create a new PartTrackingInfo object
62598fa68a43f66fc4bf2071
class ForbiddenError(RPCError): <NEW_LINE> <INDENT> code = 403 <NEW_LINE> message = 'FORBIDDEN'
Privacy violation. For example, an attempt to write a message to someone who has blacklisted the current user.
62598fa62ae34c7f260aafd5
class LogicAppReceiver(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, 'resource_id': {'required': True}, 'callback_url': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'callback_url': {'key': 'callbackUrl', 'type': 'str'}, 'use_common_alert_schema': {'key': 'useCommonAlertSchema', 'type': 'bool'}, } <NEW_LINE> def __init__( self, *, name: str, resource_id: str, callback_url: str, use_common_alert_schema: Optional[bool] = False, **kwargs ): <NEW_LINE> <INDENT> super(LogicAppReceiver, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.resource_id = resource_id <NEW_LINE> self.callback_url = callback_url <NEW_LINE> self.use_common_alert_schema = use_common_alert_schema
A logic app receiver. All required parameters must be populated in order to send to Azure. :param name: Required. The name of the logic app receiver. Names must be unique across all receivers within an action group. :type name: str :param resource_id: Required. The azure resource id of the logic app receiver. :type resource_id: str :param callback_url: Required. The callback url where http request sent to. :type callback_url: str :param use_common_alert_schema: Indicates whether to use common alert schema. :type use_common_alert_schema: bool
62598fa624f1403a9268582d
class Client: <NEW_LINE> <INDENT> m_Controller = Controller() <NEW_LINE> m_Controller.AddOperation(GoAhead(step=12)) <NEW_LINE> m_Controller.AddOperation(GoBack(step=4)) <NEW_LINE> m_Controller.AddOperation(GoLeft(step=3)) <NEW_LINE> m_Controller.AddOperation(GoRight(step=5)) <NEW_LINE> m_Controller.Execute() <NEW_LINE> print("\n") <NEW_LINE> m_Controller.AddOperation(GoAhead(step=20)) <NEW_LINE> m_Controller.AddOperation(GoBack(step=44)) <NEW_LINE> m_Controller.AddOperation(GoLeft(step=34)) <NEW_LINE> m_Controller.AddOperation(GoRight(step=50)) <NEW_LINE> m_Controller.Execute()
This class creates a ConcreteCommand object and sets its receiver.
62598fa657b8e32f52508095
class ProxyMiddleware(): <NEW_LINE> <INDENT> def __init__(self, orderno, secret, host, port): <NEW_LINE> <INDENT> self.logger = logging.getLogger(__name__) <NEW_LINE> self.orderno = orderno <NEW_LINE> self.secret =secret <NEW_LINE> self.host = host <NEW_LINE> self.port = port <NEW_LINE> <DEDENT> def get_proxy(self): <NEW_LINE> <INDENT> proxy, auth = xdl_proxy(self.orderno, self.secret, self.host, self.port) <NEW_LINE> return proxy, auth <NEW_LINE> <DEDENT> def process_request(self, request, spider): <NEW_LINE> <INDENT> if request.meta.get('retry_times'): <NEW_LINE> <INDENT> proxy, auth = self.get_proxy() <NEW_LINE> self.logger.debug('使用代理: {proxy}'.format(proxy=self.host + ":" + self.port)) <NEW_LINE> request.meta['proxy'] = proxy['http'] <NEW_LINE> request.headers['Proxy-Authorization'] = auth <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_crawler(cls, crawler): <NEW_LINE> <INDENT> settings = crawler.settings <NEW_LINE> return cls( orderno = settings.get('PROXY_XDL_ORDERNO'), secret = settings.get('PROXY_XDL_SECRET'), host = settings.get('PROXY_XDL_HOST'), port = settings.get('PROXY_XDL_PORT') )
动态代理
62598fa6656771135c489576
class MRRamp(MRJob): <NEW_LINE> <INDENT> def mapper(self, _, line): <NEW_LINE> <INDENT> t = track.load_track(line) <NEW_LINE> if t and t['duration'] > 60 and len(t['segments']) > 20: <NEW_LINE> <INDENT> segments = t['segments'] <NEW_LINE> half_track = t['duration'] / 2 <NEW_LINE> first_half = 0 <NEW_LINE> second_half = 0 <NEW_LINE> first_count = 0 <NEW_LINE> second_count = 0 <NEW_LINE> for i in xrange(len(segments)): <NEW_LINE> <INDENT> seg = segments[i] <NEW_LINE> if seg['loudness_max'] < -40 and seg['duration'] > 30: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> seg_loudness = seg['loudness_max'] * seg['duration'] <NEW_LINE> if seg['start'] + seg['duration'] <= half_track: <NEW_LINE> <INDENT> seg_loudness = seg['loudness_max'] * seg['duration'] <NEW_LINE> first_half += seg_loudness <NEW_LINE> first_count += 1 <NEW_LINE> <DEDENT> elif seg['start'] < half_track and seg['start'] + seg['duration'] > half_track: <NEW_LINE> <INDENT> first_seg_loudness = seg['loudness_max'] * (half_track - seg['start']) <NEW_LINE> first_half += first_seg_loudness <NEW_LINE> first_count += 1 <NEW_LINE> second_seg_loudness = seg['loudness_max'] * (seg['duration'] - (half_track - seg['start'])) <NEW_LINE> second_half += second_seg_loudness <NEW_LINE> second_count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> seg_loudness = seg['loudness_max'] * seg['duration'] <NEW_LINE> second_half += seg_loudness <NEW_LINE> second_count += 1 <NEW_LINE> <DEDENT> <DEDENT> if first_count > 10 and second_count > 10: <NEW_LINE> <INDENT> ramp_factor = second_half / half_track - first_half / half_track <NEW_LINE> if YIELD_ALL or ramp_factor > 10 or ramp_factor < -10: <NEW_LINE> <INDENT> yield (t['artist_name'], t['title'], t['track_id']), ramp_factor
A map-reduce job that calculates the ramp factor
62598fa6d268445f26639afd
class NeuronalStrengthDifferenceEnergyFunction: <NEW_LINE> <INDENT> def __call__(self, matrix: np.ndarray) -> float: <NEW_LINE> <INDENT> return np.mean(np.power(np.sum(matrix - matrix.T, axis=1), 2))
Evaluate the network energy associated with the neural differences in in- and out- strength of a given neuron. These two values are close to the same for any given neuron. In order to preserve this nodal associativity the difference between neuron in-strengths and out-strengths is taken. If near zero, then it matches with expectations.
62598fa64428ac0f6e658416
class TestSandboxSetPositionBalanceRequest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testSandboxSetPositionBalanceRequest(self): <NEW_LINE> <INDENT> pass
SandboxSetPositionBalanceRequest unit test stubs
62598fa68da39b475be030d6
class RaggedFloat(object): <NEW_LINE> <INDENT> def __init__(self, ragged: Union[str, _k2.RaggedFloat, _k2.RaggedShape], values: Optional[torch.Tensor] = None): <NEW_LINE> <INDENT> if isinstance(ragged, str): <NEW_LINE> <INDENT> ragged = _k2.RaggedFloat(ragged) <NEW_LINE> assert values is None <NEW_LINE> <DEDENT> elif isinstance(ragged, _k2.RaggedShape): <NEW_LINE> <INDENT> assert values is not None <NEW_LINE> ragged = _k2.RaggedFloat(ragged, values) <NEW_LINE> <DEDENT> assert isinstance(ragged, _k2.RaggedFloat) <NEW_LINE> self.ragged = ragged <NEW_LINE> if values is not None: <NEW_LINE> <INDENT> self._values = values <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._values = ragged.values() <NEW_LINE> <DEDENT> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return str(self.ragged) <NEW_LINE> <DEDENT> @property <NEW_LINE> def values(self) -> torch.Tensor: <NEW_LINE> <INDENT> return self._values <NEW_LINE> <DEDENT> @property <NEW_LINE> def grad(self) -> torch.Tensor: <NEW_LINE> <INDENT> return self._values.grad <NEW_LINE> <DEDENT> @property <NEW_LINE> def requires_grad(self) -> bool: <NEW_LINE> <INDENT> return self._values.requires_grad <NEW_LINE> <DEDENT> def requires_grad_(self, requires_grad: bool) -> 'RaggedFloat': <NEW_LINE> <INDENT> self._values.requires_grad_(requires_grad) <NEW_LINE> return self <NEW_LINE> <DEDENT> def to(self, device: Union[torch.device, str]) -> 'RaggedFloat': <NEW_LINE> <INDENT> if isinstance(device, str): <NEW_LINE> <INDENT> device = torch.device(device) <NEW_LINE> <DEDENT> assert device.type in ('cpu', 'cuda') <NEW_LINE> if device == self.values.device: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> ragged_shape = self.ragged.shape().to(device) <NEW_LINE> values = self.values.to(device) <NEW_LINE> return RaggedFloat(ragged_shape, values)
A ragged float tensor. It is a wrapper of :class:`_k2.RaggedFloat`, whose purpose is to implement autograd for :class:`_k2.RaggedFloat`. Currently, it is used only in `k2.ragged.normalize_scores`.
62598fa6435de62698e9bce9
class XYCoords(Generic[CoordType], metaclass=XYCoordMeta): <NEW_LINE> <INDENT> x: CoordType <NEW_LINE> y: CoordType <NEW_LINE> X: CoordType <NEW_LINE> Y: CoordType
Provides a x,y coordinate pair type for the type checker.
62598fa61f5feb6acb162b16
class CeleryBzrsyncdJobLayer(AppServerLayer): <NEW_LINE> <INDENT> celeryd = None <NEW_LINE> @classmethod <NEW_LINE> @profiled <NEW_LINE> def setUp(cls): <NEW_LINE> <INDENT> cls.celeryd = celeryd('bzrsyncd_job') <NEW_LINE> cls.celeryd.__enter__() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> @profiled <NEW_LINE> def tearDown(cls): <NEW_LINE> <INDENT> cls.celeryd.__exit__(None, None, None) <NEW_LINE> cls.celeryd = None
Layer for tests that run jobs that read from branches via Celery.
62598fa64f88993c371f0484
class DebugServer(StoppableThread): <NEW_LINE> <INDENT> def __init__(self, local=None, host="localhost", port=2000): <NEW_LINE> <INDENT> self.__server_socket = None <NEW_LINE> self.__connections = [] <NEW_LINE> self.__local = local <NEW_LINE> self.__host = host <NEW_LINE> self.__port = port <NEW_LINE> StoppableThread.__init__(self, "debug server thread") <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while self._run_state.is_running(): <NEW_LINE> <INDENT> if self.__server_socket is None: <NEW_LINE> <INDENT> self.__setup_server_socket() <NEW_LINE> <DEDENT> session = self.__accept_connection() <NEW_LINE> if session is not None: <NEW_LINE> <INDENT> self.__connections.append(session) <NEW_LINE> session.start() <NEW_LINE> <DEDENT> if len(self.__connections) > 0: <NEW_LINE> <INDENT> remaining_connections = [] <NEW_LINE> for connection in self.__connections: <NEW_LINE> <INDENT> if connection.isAlive(): <NEW_LINE> <INDENT> remaining_connections.append(connection) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> connection.join(1) <NEW_LINE> <DEDENT> <DEDENT> self.__connections = remaining_connections <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __accept_connection(self): <NEW_LINE> <INDENT> if self.__server_socket is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> client, addr = self.__server_socket.accept() <NEW_LINE> return DebugConnection(self.__local, client, self.__host, self.__port) <NEW_LINE> <DEDENT> except socket.timeout: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> log.exception( "Failure while accepting new debug connection. Resetting socket" ) <NEW_LINE> self.__close_socket() <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def __setup_server_socket(self): <NEW_LINE> <INDENT> if self.__server_socket is not None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.__server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> self.__server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) <NEW_LINE> self.__server_socket.settimeout(1) <NEW_LINE> self.__server_socket.bind((self.__host, self.__port)) <NEW_LINE> self.__server_socket.listen(5) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> log.exception( "Failure while accepting new debug connection. Resetting socket" ) <NEW_LINE> self.__close_socket() <NEW_LINE> <DEDENT> <DEDENT> def __close_socket(self): <NEW_LINE> <INDENT> if self.__server_socket is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__server_socket.shutdown(socket.SHUT_RDWR) <NEW_LINE> self.__server_socket = None <NEW_LINE> <DEDENT> except socket.error: <NEW_LINE> <INDENT> self.__server_socket = None
A HTTP Server that accepts connections from local host to an interactive Python shell. This can be used for debugging purposes. The interactive Python shell allows you to inspect the state of the running Python process including global variables, etc. This currently creates a new thread for every incoming connection.
62598fa6435de62698e9bcea
class EveryNthEpochExtension(TrainExtension): <NEW_LINE> <INDENT> def __init__(self, nth_epoch, including_zero=True): <NEW_LINE> <INDENT> self.nth_epoch = nth_epoch <NEW_LINE> self.including_zero = including_zero <NEW_LINE> self._count = 0 <NEW_LINE> <DEDENT> def on_monitor(self, model, dataset, algorithm): <NEW_LINE> <INDENT> if self._count == 0 and self.including_zero or self._count !=0 and self._count % self.nth_epoch == 0: <NEW_LINE> <INDENT> self.apply(model, dataset, algorithm) <NEW_LINE> <DEDENT> self._count += 1 <NEW_LINE> <DEDENT> def apply(self, model, dataset, algorithm): <NEW_LINE> <INDENT> raise NotImplementedError
Apply some method every Nth epoch. Abstract base class.
62598fa6d7e4931a7ef3bf90
class Profile(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, on_delete=models.CASCADE) <NEW_LINE> nb_adv_played = models.IntegerField(blank=True) <NEW_LINE> nb_adv_created = models.IntegerField(blank=True)
Based on the user model, we extend it with new info
62598fa69c8ee823130400ea
class Find(ClientActionStub): <NEW_LINE> <INDENT> in_rdfvalue = rdf_client.FindSpec <NEW_LINE> out_rdfvalues = [rdf_client.FindSpec]
Recurses through a directory returning files which match conditions.
62598fa6e5267d203ee6b801
class SerialReader(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, port, chunkSize=1024, chunks=5000): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.buffer = np.zeros(chunks*chunkSize, dtype=np.uint16) <NEW_LINE> self.chunks = chunks <NEW_LINE> self.chunkSize = chunkSize <NEW_LINE> self.ptr = 0 <NEW_LINE> self.port = port <NEW_LINE> self.sps = 0.0 <NEW_LINE> self.exitFlag = False <NEW_LINE> self.exitMutex = threading.Lock() <NEW_LINE> self.dataMutex = threading.Lock() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> exitMutex = self.exitMutex <NEW_LINE> dataMutex = self.dataMutex <NEW_LINE> buffer = self.buffer <NEW_LINE> port = self.port <NEW_LINE> count = 0 <NEW_LINE> sps = None <NEW_LINE> lastUpdate = pg.ptime.time() <NEW_LINE> while True: <NEW_LINE> <INDENT> with exitMutex: <NEW_LINE> <INDENT> if self.exitFlag: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> data = port.read(self.chunkSize*2) <NEW_LINE> data = np.fromstring(data, dtype=np.uint16) <NEW_LINE> count += self.chunkSize <NEW_LINE> now = pg.ptime.time() <NEW_LINE> dt = now-lastUpdate <NEW_LINE> if dt > 1.0: <NEW_LINE> <INDENT> if sps is None: <NEW_LINE> <INDENT> sps = count / dt <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sps = sps * 0.9 + (count / dt) * 0.1 <NEW_LINE> <DEDENT> count = 0 <NEW_LINE> lastUpdate = now <NEW_LINE> <DEDENT> with dataMutex: <NEW_LINE> <INDENT> buffer[self.ptr:self.ptr+self.chunkSize] = data <NEW_LINE> self.ptr = (self.ptr + self.chunkSize) % buffer.shape[0] <NEW_LINE> if sps is not None: <NEW_LINE> <INDENT> self.sps = sps <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get(self, num, downsample=1): <NEW_LINE> <INDENT> with self.dataMutex: <NEW_LINE> <INDENT> ptr = self.ptr <NEW_LINE> if ptr-num < 0: <NEW_LINE> <INDENT> data = np.empty(num, dtype=np.uint16) <NEW_LINE> data[:num-ptr] = self.buffer[ptr-num:] <NEW_LINE> data[num-ptr:] = self.buffer[:ptr] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = self.buffer[self.ptr-num:self.ptr].copy() <NEW_LINE> <DEDENT> rate = self.sps <NEW_LINE> <DEDENT> data = data.astype(np.float32) * (3.3 / 2**12) <NEW_LINE> if downsample > 1: <NEW_LINE> <INDENT> data = data.reshape(num/downsample,downsample).mean(axis=1) <NEW_LINE> num = data.shape[0] <NEW_LINE> return np.linspace(0, (num-1)*1e-6*downsample, num), data, rate <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return np.linspace(0, (num-1)*1e-6, num), data, rate <NEW_LINE> <DEDENT> <DEDENT> def exit(self): <NEW_LINE> <INDENT> with self.exitMutex: <NEW_LINE> <INDENT> self.exitFlag = True
Defines a thread for reading and buffering serial data. By default, about 5MSamples are stored in the buffer. Data can be retrieved from the buffer by calling get(N)
62598fa64f6381625f199438
class OSMWay(object): <NEW_LINE> <INDENT> def __init__(self, way_id, nodes_id, attrs): <NEW_LINE> <INDENT> super(OSMWay, self).__init__() <NEW_LINE> if not isinstance(attrs, dict): <NEW_LINE> <INDENT> raise TypeError('attrs should be type of dict') <NEW_LINE> <DEDENT> self.id = way_id <NEW_LINE> self.nodes_id = nodes_id <NEW_LINE> self.attrs = attrs
docstring for Way
62598fa6fff4ab517ebcd6da
class TestViewsDepends(ModuleTestCase): <NEW_LINE> <INDENT> module = 'sale_data_warehouse'
Test views and depends
62598fa666673b3332c302bf
class NotifyHandler(webapp2.RequestHandler): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> logging.info('Got a notification with payload %s', self.request.body) <NEW_LINE> data = json.loads(self.request.body) <NEW_LINE> userid = data['userToken'] <NEW_LINE> self.mirror_service = util.create_service( 'mirror', 'v1', StorageByKeyName(Credentials, userid, 'credentials').get()) <NEW_LINE> if data.get('collection') == 'locations': <NEW_LINE> <INDENT> self._handle_locations_notification(data) <NEW_LINE> <DEDENT> elif data.get('collection') == 'timeline': <NEW_LINE> <INDENT> self._handle_timeline_notification(data) <NEW_LINE> <DEDENT> <DEDENT> def _handle_locations_notification(self, data): <NEW_LINE> <INDENT> location = self.mirror_service.locations().get(id=data['itemId']).execute() <NEW_LINE> text = 'Python Quick Start says you are at %s by %s.' % (location.get('latitude'), location.get('longitude')) <NEW_LINE> body = { 'text': text, 'location': location, 'menuItems': [{'action': 'NAVIGATE'}], 'notification': {'level': 'DEFAULT'} } <NEW_LINE> self.mirror_service.timeline().insert(body=body).execute() <NEW_LINE> <DEDENT> def _handle_timeline_notification(self, data): <NEW_LINE> <INDENT> for user_action in data.get('userActions', []): <NEW_LINE> <INDENT> item = self.mirror_service.timeline().get(id=data['itemId']).execute() <NEW_LINE> if user_action.get('type') == 'SHARE': <NEW_LINE> <INDENT> body = { 'text': 'Python Quick Start got your photo! %s' % item.get('text', '') } <NEW_LINE> self.mirror_service.timeline().patch( id=data['itemId'], body=body).execute() <NEW_LINE> break <NEW_LINE> <DEDENT> elif user_action.get('type') == 'LAUNCH': <NEW_LINE> <INDENT> note_text = item.get('text', ''); <NEW_LINE> utterance = choice(CAT_UTTERANCES) <NEW_LINE> item['text'] = None <NEW_LINE> item['html'] = ("<article class='auto-paginate'>" + "<p class='text-auto-size'>" + "Oh, did you say " + note_text + "? " + utterance + "</p>" + "<footer><p>Python Quick Start</p></footer></article>") <NEW_LINE> item['menuItems'] = [{ 'action': 'DELETE' }]; <NEW_LINE> self.mirror_service.timeline().update( id=item['id'], body=item).execute() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.info( "I don't know what to do with this notification: %s", user_action)
Request Handler for notification pings.
62598fa6cc0a2c111447af04
class ResultsDialog(BaseDialog): <NEW_LINE> <INDENT> SELECTED = 4 <NEW_LINE> URL = 5 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(ResultsDialog, self).__init__('Results', ('Close', gtk.RESPONSE_CLOSE, 'Add selected torrents', gtk.RESPONSE_YES), ui_file='results.ui') <NEW_LINE> self.set_default_response(gtk.RESPONSE_YES) <NEW_LINE> self.results_store = self.builder.get_object('results_store') <NEW_LINE> <DEDENT> def _format_date(self, str_date, input_format=None, output_format=None): <NEW_LINE> <INDENT> if input_format is None: <NEW_LINE> <INDENT> input_format = '%a, %d %b %Y %H:%M:%S %Z' <NEW_LINE> <DEDENT> if output_format is None: <NEW_LINE> <INDENT> output_format = '%d-%b-%Y' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> now = datetime.utcnow() <NEW_LINE> as_datetime = datetime.strptime(str_date, input_format) <NEW_LINE> delta = now - as_datetime <NEW_LINE> if delta.days <= 1: <NEW_LINE> <INDENT> output_date = 'Today' <NEW_LINE> <DEDENT> elif 1 < delta.days and delta.days < 2: <NEW_LINE> <INDENT> output_date = 'Yesterday' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output_date = as_datetime.strftime(output_format) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> output_date = '-' <NEW_LINE> <DEDENT> return output_date <NEW_LINE> <DEDENT> def _format_votes(self, votes): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> votes = int(votes) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> votes = 0 <NEW_LINE> <DEDENT> color = votes >= 0 and '#006400' or 'red' <NEW_LINE> votes_data = '<span color="%s"><b>%+d</b></span>' % (color, votes) <NEW_LINE> return votes_data <NEW_LINE> <DEDENT> def populate(self, results): <NEW_LINE> <INDENT> self.results_store.clear() <NEW_LINE> for result in results: <NEW_LINE> <INDENT> pub_date = self._format_date(result['pubDate']) <NEW_LINE> votes = self._format_votes(result['votes']) <NEW_LINE> row = [result['title'], result['seeds'], result['leechers'], result['size'], False, result['url'], pub_date, votes] <NEW_LINE> self.results_store.append(row) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def selected(self): <NEW_LINE> <INDENT> selected = [t[self.URL] for t in self.results_store if t[self.SELECTED]] <NEW_LINE> return selected <NEW_LINE> <DEDENT> def on_torrent_toggled(self, renderer, path): <NEW_LINE> <INDENT> current_value = renderer.get_active() <NEW_LINE> tree_iter = self.results_store.get_iter_from_string(path) <NEW_LINE> self.results_store.set(tree_iter, self.SELECTED, not current_value)
Torrent results dialog.
62598fa60a50d4780f7052d2
class ConfigStub: <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> def section(self, name): <NEW_LINE> <INDENT> return self.data[name] <NEW_LINE> <DEDENT> def get(self, sect, opt): <NEW_LINE> <INDENT> data = self.data[sect] <NEW_LINE> try: <NEW_LINE> <INDENT> return data[opt] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise configexc.NoOptionError(opt, sect)
Stub for basekeyparser.config. Attributes: data: The config data to return.
62598fa691f36d47f2230e1e
class ApplicationGatewayAvailableWafRuleSetsResult(Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[ApplicationGatewayFirewallRuleSet]'}, } <NEW_LINE> def __init__(self, value=None): <NEW_LINE> <INDENT> self.value = value
Response for ApplicationGatewayAvailableWafRuleSets API service call. :param value: The list of application gateway rule sets. :type value: list of :class:`ApplicationGatewayFirewallRuleSet <azure.mgmt.network.v2017_06_01.models.ApplicationGatewayFirewallRuleSet>`
62598fa6d53ae8145f918382
class IAMUserMFAEnabledCheck(IAMUserCheck): <NEW_LINE> <INDENT> def test(self): <NEW_LINE> <INDENT> if self.user_dict['mfa_active'] == 'true': <NEW_LINE> <INDENT> self.status = common.CheckState.PASS <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.status = common.CheckState.FAIL
Checks if the account has MFA enabled.
62598fa6bd1bec0571e1503e
class AcceptOwnerEvent(ChainEvent): <NEW_LINE> <INDENT> pass
An event signaling that a message is accepted to the -owner address.
62598fa6dd821e528d6d8e2b
class LinearFeatureBaseline(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_size, reg_coeff=1e-5): <NEW_LINE> <INDENT> super(LinearFeatureBaseline, self).__init__() <NEW_LINE> self.input_size = input_size <NEW_LINE> self._reg_coeff = reg_coeff <NEW_LINE> self.linear = nn.Linear(self.feature_size, 1, bias=False) <NEW_LINE> self.linear.weight.data.zero_() <NEW_LINE> <DEDENT> @property <NEW_LINE> def feature_size(self): <NEW_LINE> <INDENT> return 2 * self.input_size + 4 <NEW_LINE> <DEDENT> def _feature(self, episodes): <NEW_LINE> <INDENT> ones = episodes.mask.unsqueeze(2) <NEW_LINE> observations = episodes.observations * ones <NEW_LINE> cum_sum = torch.cumsum(ones, dim=0) * ones <NEW_LINE> al = cum_sum / 100.0 <NEW_LINE> return torch.cat([observations, observations ** 2, al, al ** 2, al ** 3, ones], dim=2) <NEW_LINE> <DEDENT> def fit(self, episodes): <NEW_LINE> <INDENT> featmat = self._feature(episodes).view(-1, self.feature_size) <NEW_LINE> returns = episodes.returns.view(-1, 1) <NEW_LINE> reg_coeff = self._reg_coeff <NEW_LINE> eye = torch.eye(self.feature_size, dtype=torch.float32, device=self.linear.weight.device) <NEW_LINE> for _ in range(5): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> coeffs, _ = torch.gels( torch.matmul(featmat.t(), returns), torch.matmul(featmat.t(), featmat) + reg_coeff * eye ) <NEW_LINE> break <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> reg_coeff += 10 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError('Unable to solve the normal equations in ' '`LinearFeatureBaseline`. The matrix X^T*X (with X the design ' 'matrix) is not full-rank, regardless of the regularization ' '(maximum regularization: {0}).'.format(reg_coeff)) <NEW_LINE> <DEDENT> self.linear.weight.data = coeffs.data.t() <NEW_LINE> <DEDENT> def forward(self, episodes): <NEW_LINE> <INDENT> features = self._feature(episodes) <NEW_LINE> return self.linear(features)
Linear baseline based on handcrafted features, as described in [1] (Supplementary Material 2). [1] Yan Duan, Xi Chen, Rein Houthooft, John Schulman, Pieter Abbeel, "Benchmarking Deep Reinforcement Learning for Continuous Control", 2016 (https://arxiv.org/abs/1604.06778)
62598fa645492302aabfc3c7
class NullfallBaeumeStraeucher(Draw_Bodenbedeckung): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(NullfallBaeumeStraeucher, self).__init__() <NEW_LINE> self.bodenbedeckung = 4 <NEW_LINE> self.planfall = False
Implementation for rpc_tools.bewohner_schaetzen (Button)
62598fa64e4d56256637231b
class ListMeta(type): <NEW_LINE> <INDENT> def __new__(mcs, name, bases, attrs): <NEW_LINE> <INDENT> doc = attrs['__doc__'] <NEW_LINE> tab_size = 4 <NEW_LINE> min_indent = min([len(a) - len(b) for a, b in zip(doc.splitlines(), [l.lstrip() for l in doc.splitlines()])]) <NEW_LINE> doc = "".join([line[min_indent:] + '\n' for line in doc.splitlines()]) <NEW_LINE> doc += "Quick method reference:\n\n" <NEW_LINE> doc += "\t{0}.".format(name) + "\n\t{0}.".format(name).join( ["update(changes)", "strip_style()", "get_data()", "to_graph_objs()", "validate()", "to_string()", "force_clean()"]) + "\n\n" <NEW_LINE> attrs['__doc__'] = doc.expandtabs(tab_size) <NEW_LINE> return super(ListMeta, mcs).__new__(mcs, name, bases, attrs)
A meta class for PlotlyList class creation. The sole purpose of this meta class is to properly create the __doc__ attribute so that running help(Obj), where Obj is a subclass of PlotlyList, will return useful information for that object.
62598fa6e5267d203ee6b802
class BackupOperationStatusesOperations(object): <NEW_LINE> <INDENT> models = _models <NEW_LINE> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self._config = config <NEW_LINE> <DEDENT> @distributed_trace <NEW_LINE> def get( self, vault_name: str, resource_group_name: str, operation_id: str, **kwargs: Any ) -> "_models.OperationStatus": <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> request = build_get_request( vault_name=vault_name, resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, operation_id=operation_id, template_url=self.get.metadata['url'], ) <NEW_LINE> request = _convert_request(request) <NEW_LINE> request.url = self._client.format_url(request.url) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> deserialized = self._deserialize('OperationStatus', pipeline_response) <NEW_LINE> if cls: <NEW_LINE> <INDENT> return cls(pipeline_response, deserialized, {}) <NEW_LINE> <DEDENT> return deserialized <NEW_LINE> <DEDENT> get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupOperations/{operationId}'}
BackupOperationStatusesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.recoveryservicesbackup.activestamp.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer.
62598fa663d6d428bbee26a8
class UUIDModel(models.Model): <NEW_LINE> <INDENT> id = models.UUIDField( primary_key=True, default=uuid.uuid4, editable=False ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True
This abstract model automatically uses UUID fields for the models instead of auto-incrementing integers.
62598fa68e71fb1e983bb9a8
@python_2_unicode_compatible <NEW_LINE> class CourseVideoUploadsEnabledByDefault(ConfigurationModel): <NEW_LINE> <INDENT> KEY_FIELDS = ('course_id',) <NEW_LINE> course_id = CourseKeyField(max_length=255, db_index=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> not_en = "Not " <NEW_LINE> if self.enabled: <NEW_LINE> <INDENT> not_en = "" <NEW_LINE> <DEDENT> return u"Course '{course_key}': Video Uploads {not_enabled}Enabled by default.".format( course_key=six.text_type(self.course_id), not_enabled=not_en )
Enables video uploads for a specific course. .. no_pii: .. toggle_name: CourseVideoUploadsEnabledByDefault.course_id .. toggle_implementation: ConfigurationModel .. toggle_default: False .. toggle_description: Allow video uploads for a specific course. This enables the "Video Uploads" menu in the CMS. .. toggle_use_cases: open_edx .. toggle_creation_date: 2017-11-10 .. toggle_target_removal_date: None .. toggle_warnings: None .. toggle_tickets: https://github.com/edx/edx-platform/pull/16536
62598fa68e7ae83300ee8f98
class TestIsWithinVisibleSpectrum(unittest.TestCase): <NEW_LINE> <INDENT> def test_is_within_visible_spectrum(self): <NEW_LINE> <INDENT> self.assertTrue( is_within_visible_spectrum(np.array([0.3205, 0.4131, 0.5100])) ) <NEW_LINE> self.assertFalse( is_within_visible_spectrum(np.array([-0.0005, 0.0031, 0.0010])) ) <NEW_LINE> self.assertTrue( is_within_visible_spectrum(np.array([0.4325, 0.3788, 0.1034])) ) <NEW_LINE> self.assertFalse( is_within_visible_spectrum(np.array([0.0025, 0.0088, 0.0340])) ) <NEW_LINE> <DEDENT> def test_n_dimensional_is_within_visible_spectrum(self): <NEW_LINE> <INDENT> a = np.array([0.3205, 0.4131, 0.5100]) <NEW_LINE> b = is_within_visible_spectrum(a) <NEW_LINE> a = np.tile(a, (6, 1)) <NEW_LINE> b = np.tile(b, 6) <NEW_LINE> np.testing.assert_almost_equal(is_within_visible_spectrum(a), b) <NEW_LINE> a = np.reshape(a, (2, 3, 3)) <NEW_LINE> b = np.reshape(b, (2, 3)) <NEW_LINE> np.testing.assert_almost_equal(is_within_visible_spectrum(a), b) <NEW_LINE> <DEDENT> @ignore_numpy_errors <NEW_LINE> def test_nan_is_within_visible_spectrum(self): <NEW_LINE> <INDENT> cases = [-1.0, 0.0, 1.0, -np.inf, np.inf, np.nan] <NEW_LINE> cases = set(permutations(cases * 3, r=3)) <NEW_LINE> for case in cases: <NEW_LINE> <INDENT> is_within_visible_spectrum(case)
Define :func:`colour.volume.spectrum.is_within_visible_spectrum` definition unit tests methods.
62598fa6ac7a0e7691f72401
class IllegalArgumentError(ValueError): <NEW_LINE> <INDENT> pass
bad argument passed to function
62598fa6cb5e8a47e493c0f3
class Permission(models.Model): <NEW_LINE> <INDENT> TYPE_CHOICES = ( (0,'菜单'), (1,'按钮'), (2,'接口'), ) <NEW_LINE> title = models.CharField(max_length=32,verbose_name='权限名称') <NEW_LINE> url = models.CharField(max_length=128,verbose_name='URL',blank=True,null=True,unique=True) <NEW_LINE> parent_id = models.IntegerField(verbose_name='上级菜单ID',blank=True,null=True,default=-1) <NEW_LINE> per_type = models.IntegerField(choices=TYPE_CHOICES,verbose_name='权限类型') <NEW_LINE> authority = models.CharField(verbose_name='权限标识',max_length=64,blank=True,null=True) <NEW_LINE> order_number = models.IntegerField(verbose_name='序号',blank=True,null=True) <NEW_LINE> menu_icon = models.CharField(max_length=64,verbose_name='菜单图标',blank=True,null=True) <NEW_LINE> c_time = models.DateTimeField(auto_now_add=True, null=True, verbose_name='创建时间', blank=True) <NEW_LINE> u_time = models.DateTimeField(auto_now=True, null=True, verbose_name='更新时间', blank=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'permission' <NEW_LINE> verbose_name = "权限表" <NEW_LINE> verbose_name_plural = verbose_name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> def save(self, force_insert=False, force_update=False, using=None, update_fields=None): <NEW_LINE> <INDENT> conn = get_redis_connection() <NEW_LINE> conn.hincrby('user_or_role_update', 'counter') <NEW_LINE> super(Permission, self).save(force_insert=False, force_update=False, using=None, update_fields=None)
权限表
62598fa632920d7e50bc5f4d
class T(): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def cat(cls): <NEW_LINE> <INDENT> print(cls.__module__) <NEW_LINE> print(cls.__doc__)
class doc
62598fa638b623060ffa8f8d
@python_2_unicode_compatible <NEW_LINE> class ContactPreference(models.Model): <NEW_LINE> <INDENT> name = models.CharField(_("name"), max_length=255) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _("contact preference") <NEW_LINE> verbose_name_plural = _("contact preferences") <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> icons = { 'by email': 'sent', 'by snail mail': 'envelope', 'by phone': 'phone-alt', 'by website': 'globe', } <NEW_LINE> title=self.name.capitalize() <NEW_LINE> template='<i class="glyphicon glyphicon-{cls}" title="{title}"></i> ' <NEW_LINE> return format_html(template, cls=icons.get(self.name), title=title) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Contact preference for a profile, whether by email, telephone or snail mail.
62598fa67d847024c075c2bb
class SecondOrStepTimer(object): <NEW_LINE> <INDENT> def __init__(self, every_secs=None, every_steps=None): <NEW_LINE> <INDENT> self._every_secs = every_secs <NEW_LINE> self._every_steps = every_steps <NEW_LINE> self._last_triggered_step = None <NEW_LINE> self._last_triggered_time = None <NEW_LINE> if self._every_secs is None and self._every_steps is None: <NEW_LINE> <INDENT> raise ValueError("Either every_secs or every_steps should be provided.") <NEW_LINE> <DEDENT> if (self._every_secs is not None) and (self._every_steps is not None): <NEW_LINE> <INDENT> raise ValueError("Can not provide both every_secs and every_steps.") <NEW_LINE> <DEDENT> <DEDENT> def should_trigger_for_step(self, step): <NEW_LINE> <INDENT> if self._last_triggered_step is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self._last_triggered_step == step: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._every_secs is not None: <NEW_LINE> <INDENT> if time.time() >= self._last_triggered_time + self._every_secs: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> if self._every_steps is not None: <NEW_LINE> <INDENT> if step >= self._last_triggered_step + self._every_steps: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def update_last_triggered_step(self, step): <NEW_LINE> <INDENT> current_time = time.time() <NEW_LINE> if self._last_triggered_time is None: <NEW_LINE> <INDENT> elapsed_secs = None <NEW_LINE> elapsed_steps = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> elapsed_secs = current_time - self._last_triggered_time <NEW_LINE> elapsed_steps = step - self._last_triggered_step <NEW_LINE> <DEDENT> self._last_triggered_time = current_time <NEW_LINE> self._last_triggered_step = step <NEW_LINE> return (elapsed_secs, elapsed_steps) <NEW_LINE> <DEDENT> def last_triggered_step(self): <NEW_LINE> <INDENT> return self._last_triggered_step
Timer that triggers at most once every N seconds or once every N steps.
62598fa6167d2b6e312b6e67
class occupancy(tflAPI): <NEW_LINE> <INDENT> def getBikePointByIDs(self, ids): <NEW_LINE> <INDENT> return super(occupancy, self).sendRequestUnified( f"/Occupancy/BikePoints/{self.arrayToCSV(ids)}", {} ) <NEW_LINE> <DEDENT> def getCarParkByID(self, id): <NEW_LINE> <INDENT> return super(occupancy, self).sendRequestUnified( f"/Occupancy/CarPark/{id}", {} ) <NEW_LINE> <DEDENT> def getChargeConnectorByID(self, id): <NEW_LINE> <INDENT> return super(occupancy, self).sendRequestUnified( f"/Occupancy/ChargeConnector/{id}", {} ) <NEW_LINE> <DEDENT> def getAllCarParks(self): <NEW_LINE> <INDENT> return super(occupancy, self).sendRequestUnified( f"/Occupancy/CarPark", {} ) <NEW_LINE> <DEDENT> def getAllChargeConnectors(self): <NEW_LINE> <INDENT> return super(occupancy, self).sendRequestUnified( f"/Occupancy/ChargeConnector", {} )
Occupancy from Unified API
62598fa6f7d966606f747edb