code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class Panel(object): <NEW_LINE> <INDENT> def __init__(self, drawing, larghezza, altezza, model, struct = None): <NEW_LINE> <INDENT> self.drawing = drawing <NEW_LINE> self.l = larghezza <NEW_LINE> self.h = altezza <NEW_LINE> self.model = model <NEW_LINE> self.x = 0.0 <NEW_LINE> self.y = 0.0 <NEW_LINE> self.z = 0.0 <NEW_LINE> if struct: <NEW_LINE> <INDENT> self.setOnStruct(struct) <NEW_LINE> <DEDENT> <DEDENT> def setOnStruct(self, struct): <NEW_LINE> <INDENT> self.l = struct.l <NEW_LINE> self.h = struct.h <NEW_LINE> self.model = struct.model <NEW_LINE> <DEDENT> def basePanel(self): <NEW_LINE> <INDENT> x = self.x <NEW_LINE> y = self.y <NEW_LINE> l = self.l <NEW_LINE> h = self.h <NEW_LINE> LayerStruttura = self.drawing.dsLayerManager.GetLayer("lamiera") <NEW_LINE> LayerStruttura.Activate() <NEW_LINE> Coordinates1 = array.array('d',(x,h,x,y,l,y,l,h)) <NEW_LINE> plineExt = self.drawing.dsSketchManager.InsertPolyline2D(Coordinates1, True ) <NEW_LINE> <DEDENT> def quoteBasePanel(self): <NEW_LINE> <INDENT> x = self.x <NEW_LINE> y = self.y <NEW_LINE> z = self.z <NEW_LINE> l = self.l <NEW_LINE> h = self.h <NEW_LINE> LayerQuote = self.drawing.dsLayerManager.GetLayer("quote_lamiera") <NEW_LINE> LayerQuote.Activate() <NEW_LINE> pql1 = array.array('d',(x,y,z)) <NEW_LINE> pql2 = array.array('d',(l,y,z)) <NEW_LINE> pqlq = array.array('d',(l/2,y-200,z)) <NEW_LINE> ql = self.drawing.dsSketchManager.InsertAlignedDimension(pql1, pql2, pqlq, "") | class for draw panels | 62598fa74a966d76dd5eedf2 |
class WorksetPreview(object,IDisposable): <NEW_LINE> <INDENT> def Dispose(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ReleaseUnmanagedResources(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __enter__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __exit__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __repr__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Id=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> IsDefaultWorkset=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> IsValidObject=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> Name=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> Owner=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> UniqueId=property(lambda self: object(),lambda self,v: None,lambda self: None) | Represents an accessor for workset data which can be obtained from an unopened document. | 62598fa766656f66f7d5a300 |
class ISYInsteonBinarySensorEntity(ISYBinarySensorEntity): <NEW_LINE> <INDENT> def __init__(self, node, force_device_class=None, unknown_state=None) -> None: <NEW_LINE> <INDENT> super().__init__(node, force_device_class) <NEW_LINE> self._negative_node = None <NEW_LINE> self._heartbeat_device = None <NEW_LINE> if self._node.status == ISY_VALUE_UNKNOWN: <NEW_LINE> <INDENT> self._computed_state = unknown_state <NEW_LINE> self._status_was_unknown = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._computed_state = bool(self._node.status) <NEW_LINE> self._status_was_unknown = False <NEW_LINE> <DEDENT> <DEDENT> async def async_added_to_hass(self) -> None: <NEW_LINE> <INDENT> await super().async_added_to_hass() <NEW_LINE> self._node.control_events.subscribe(self._positive_node_control_handler) <NEW_LINE> if self._negative_node is not None: <NEW_LINE> <INDENT> self._negative_node.control_events.subscribe( self._negative_node_control_handler ) <NEW_LINE> <DEDENT> <DEDENT> def add_heartbeat_device(self, device) -> None: <NEW_LINE> <INDENT> self._heartbeat_device = device <NEW_LINE> <DEDENT> def _heartbeat(self) -> None: <NEW_LINE> <INDENT> if self._heartbeat_device is not None: <NEW_LINE> <INDENT> self._heartbeat_device.heartbeat() <NEW_LINE> <DEDENT> <DEDENT> def add_negative_node(self, child) -> None: <NEW_LINE> <INDENT> self._negative_node = child <NEW_LINE> if ( self._negative_node.status != ISY_VALUE_UNKNOWN and self._negative_node.status == self._node.status ): <NEW_LINE> <INDENT> self._computed_state = None <NEW_LINE> <DEDENT> <DEDENT> def _negative_node_control_handler(self, event: object) -> None: <NEW_LINE> <INDENT> if event.control == CMD_ON: <NEW_LINE> <INDENT> _LOGGER.debug( "Sensor %s turning Off via the Negative node sending a DON command", self.name, ) <NEW_LINE> self._computed_state = False <NEW_LINE> self.schedule_update_ha_state() <NEW_LINE> self._heartbeat() <NEW_LINE> <DEDENT> <DEDENT> def _positive_node_control_handler(self, event: object) -> None: <NEW_LINE> <INDENT> if event.control == CMD_ON: <NEW_LINE> <INDENT> _LOGGER.debug( "Sensor %s turning On via the Primary node sending a DON command", self.name, ) <NEW_LINE> self._computed_state = True <NEW_LINE> self.schedule_update_ha_state() <NEW_LINE> self._heartbeat() <NEW_LINE> <DEDENT> if event.control == CMD_OFF: <NEW_LINE> <INDENT> _LOGGER.debug( "Sensor %s turning Off via the Primary node sending a DOF command", self.name, ) <NEW_LINE> self._computed_state = False <NEW_LINE> self.schedule_update_ha_state() <NEW_LINE> self._heartbeat() <NEW_LINE> <DEDENT> <DEDENT> def on_update(self, event: object) -> None: <NEW_LINE> <INDENT> if self._status_was_unknown and self._computed_state is None: <NEW_LINE> <INDENT> self._computed_state = bool(self._node.status) <NEW_LINE> self._status_was_unknown = False <NEW_LINE> self.schedule_update_ha_state() <NEW_LINE> self._heartbeat() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_on(self) -> bool: <NEW_LINE> <INDENT> if self._computed_state is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.device_class == DEVICE_CLASS_MOISTURE: <NEW_LINE> <INDENT> return not self._computed_state <NEW_LINE> <DEDENT> return self._computed_state | Representation of an ISY994 Insteon binary sensor device.
Often times, a single device is represented by multiple nodes in the ISY,
allowing for different nuances in how those devices report their on and
off events. This class turns those multiple nodes into a single Home
Assistant entity and handles both ways that ISY binary sensors can work. | 62598fa767a9b606de545edc |
@python_2_unicode_compatible <NEW_LINE> class LogLevel(object): <NEW_LINE> <INDENT> def __init__(self, level, name, traceback=False): <NEW_LINE> <INDENT> self.level = level <NEW_LINE> self.name = name <NEW_LINE> self.traceback = traceback <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<LogLevel %s (%s)>' % (self.name, self.level) <NEW_LINE> <DEDENT> def __int__(self): <NEW_LINE> <INDENT> return self.level <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | A logging level
Args:
level (int): Log level
name (unicode): Level name
traceback (bool): Include traceback when logging | 62598fa799cbb53fe6830de6 |
class IndexListError(C8ServerError): <NEW_LINE> <INDENT> pass | Failed to retrieve collection indexes. | 62598fa74f6381625f199446 |
class AWSServer(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=20) <NEW_LINE> description = models.CharField(max_length=100, null=True, blank=True) <NEW_LINE> aws_access_key = models.CharField(max_length=20) <NEW_LINE> aws_secret_key = models.CharField(max_length=40) <NEW_LINE> aws_region = models.CharField(max_length=20, default='us-east-1') <NEW_LINE> user_name = models.CharField(max_length=20, default='Administrator') <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_connection(self): <NEW_LINE> <INDENT> conn = boto.ec2.connect_to_region(self.aws_region, aws_access_key_id=self.aws_access_key, aws_secret_access_key=self.aws_secret_key) <NEW_LINE> all_instances = conn.get_all_instances(filters={'tag:Name': self.name}) <NEW_LINE> if not all_instances: <NEW_LINE> <INDENT> raise AWSNotFoundException("AWS Instance not found, check settings.") <NEW_LINE> <DEDENT> inst = all_instances[0].instances[0] <NEW_LINE> return inst <NEW_LINE> <DEDENT> def start_server(self): <NEW_LINE> <INDENT> inst = self.get_connection() <NEW_LINE> inst.start() <NEW_LINE> <DEDENT> def stop_server(self): <NEW_LINE> <INDENT> inst = self.get_connection() <NEW_LINE> inst.stop() <NEW_LINE> <DEDENT> def get_server_state(self): <NEW_LINE> <INDENT> inst = self.get_connection() <NEW_LINE> return inst.state | AWSServer represents a single EC2 server at AWS | 62598fa756b00c62f0fb27c4 |
class PriceNotFound(UserError): <NEW_LINE> <INDENT> def __init__(self, msg='Price not found'): <NEW_LINE> <INDENT> super(PriceNotFound, self).__init__(msg) | Raised when a price is not found | 62598fa7fff4ab517ebcd6f5 |
class Graph(object): <NEW_LINE> <INDENT> def __init__(self, edges=None, directed=False): <NEW_LINE> <INDENT> self.directed = directed <NEW_LINE> self.original_edges = edges <NEW_LINE> self.out_mapping = defaultdict(lambda: defaultdict(float)) <NEW_LINE> self.in_mapping = defaultdict(lambda: defaultdict(float)) <NEW_LINE> self._edges = [] <NEW_LINE> if edges: <NEW_LINE> <INDENT> self._add_edges(edges) <NEW_LINE> <DEDENT> <DEDENT> def _add_edge(self, source, target, weight=None): <NEW_LINE> <INDENT> if (source, target) not in self._edges: <NEW_LINE> <INDENT> self._edges.append((source, target)) <NEW_LINE> self.out_mapping[source][target] = weight <NEW_LINE> self.in_mapping[target][source] = weight <NEW_LINE> <DEDENT> if ( not self.directed and (source != target) and (target, source) not in self._edges ): <NEW_LINE> <INDENT> self._edges.append((target, source)) <NEW_LINE> self.out_mapping[target][source] = weight <NEW_LINE> self.in_mapping[source][target] = weight <NEW_LINE> <DEDENT> <DEDENT> def _add_edges(self, edges): <NEW_LINE> <INDENT> for edge in edges: <NEW_LINE> <INDENT> self._add_edge(*edge) <NEW_LINE> <DEDENT> <DEDENT> def add_loops(self): <NEW_LINE> <INDENT> self._add_edges((x, x) for x in self.vertices) <NEW_LINE> <DEDENT> @property <NEW_LINE> def edges(self): <NEW_LINE> <INDENT> return self._edges <NEW_LINE> <DEDENT> @property <NEW_LINE> def vertices(self): <NEW_LINE> <INDENT> return list(self.out_mapping.keys()) <NEW_LINE> <DEDENT> def out_dict(self, source): <NEW_LINE> <INDENT> return self.out_mapping[source] <NEW_LINE> <DEDENT> def out_vertices(self, source): <NEW_LINE> <INDENT> return list(self.out_mapping[source].keys()) <NEW_LINE> <DEDENT> def in_dict(self, target): <NEW_LINE> <INDENT> return self.in_mapping[target] <NEW_LINE> <DEDENT> def in_vertices(self, source): <NEW_LINE> <INDENT> return list(self.in_mapping[source].keys()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> s = "<Graph: {}>".format(repr(self.original_edges)) <NEW_LINE> return s | Weighted and directed graph class.
This class is intended for the graph associated to a Markov process,
since it gives easy access to the neighbors of a particular state.
Vertices can be any hashable Python object.
Initialize with a list of edges:
[[node1, node2, weights], ...]
Weights can be omitted for an undirected graph.
For efficiency, neighbors are cached in dictionaries. Undirected
graphs are implemented as directed graphs in which every edge (s, t)
has the opposite edge (t, s).
Attributes
----------
directed: Boolean indicating whether the graph is directed
original_edges: the edges passed into the initializer
out_mapping: a dictionary mapping all heads to dictionaries that map
all tails to their edge weights (None means no weight)
in_mapping: a dictionary mapping all tails to dictionaries that map
all heads to their edge weights (none means to weight)
Properties
----------
vertices: the set of vertices in the graph
edges: the set of current edges in the graph | 62598fa721bff66bcd722b77 |
class Colors(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._colors = {} <NEW_LINE> <DEDENT> def __setitem__(self, key_, value): <NEW_LINE> <INDENT> if isinstance(value, Color): <NEW_LINE> <INDENT> self._colors[key_] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._colors[key_] = Color(*value) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, key_): <NEW_LINE> <INDENT> return self._colors[key_] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for key_ in self._colors: <NEW_LINE> <INDENT> yield key_ | Color collection and wrapper for wx.ColourDatabase. Provides
dictionary syntax access (colors[key]) and predefined colors. | 62598fa78c0ade5d55dc3619 |
class gnaf_feb_2020_gda2020: <NEW_LINE> <INDENT> ... <NEW_LINE> class CSVFormat: <NEW_LINE> <INDENT> __slots__= ('Description','Format','FilePath','PathToFiles','DownURL', 'ZipDir', 'ZipPath') <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.Description = 'gnaf_feb_2020_gda2020' <NEW_LINE> self.Format = 'CSV' <NEW_LINE> self.PathToFiles = 'FEB20_GNAF_PipeSeparatedValue_GDA2020/G-NAF/G-NAF_FEBRUARY_2020/' <NEW_LINE> self.FilePath = 'FEB20_GNAF_PipeSeparatedValue_GDA2020{slash}G-NAF{slash}G-NAF_FEBRUARY_2020{slash}Standard{slash}ACT_ADDRESS_ALIAS_psv.psv' <NEW_LINE> self.DownURL = 'https://data.gov.au/data/dataset/19432f89-dc3a-4ef3-b943-5326ef1dbecc/resource/fdce090a-b356-4afe-91bb-c78fbf88082a/download/feb20_gnaf_pipeseparatedvalue_gda2020.zip' <NEW_LINE> self.ZipDir = 'csv' <NEW_LINE> self.ZipPath ='feb20_gnaf_pipeseparatedvalue_gda2020.zip' | ABS Australian Boundary | 62598fa756ac1b37e63020fd |
class HTTPMethods(object): <NEW_LINE> <INDENT> GET = 'get' <NEW_LINE> POST = 'post' <NEW_LINE> DELETE = 'delete' | HTTP methods that can be used with Buttercoin's API. | 62598fa71b99ca400228f4b8 |
class Genresult(ndb.Model): <NEW_LINE> <INDENT> name = ndb.StringProperty(indexed=True) <NEW_LINE> email = ndb.StringProperty(indexed=True) <NEW_LINE> date = ndb.DateTimeProperty(auto_now_add=True) <NEW_LINE> generationname = ndb.StringProperty(indexed=True) <NEW_LINE> imageurl = ndb.StringProperty(indexed=False) <NEW_LINE> like = ndb.StringProperty(indexed=True) <NEW_LINE> renderstatus = ndb.StringProperty(indexed=True) <NEW_LINE> script = ndb.StringProperty(indexed=False) <NEW_LINE> renderniter = ndb.StringProperty(indexed=False) <NEW_LINE> rendersize = ndb.StringProperty(indexed=False) <NEW_LINE> publishstatus = ndb.StringProperty(indexed=True) <NEW_LINE> publishtitle = ndb.StringProperty(indexed=False) <NEW_LINE> publishdescription = ndb.StringProperty(indexed=False) <NEW_LINE> publishcategories = ndb.StringProperty(indexed=False) <NEW_LINE> publishtags = ndb.StringProperty(indexed=False) <NEW_LINE> def string(self): <NEW_LINE> <INDENT> content = [] <NEW_LINE> content.append("** " + self.name) <NEW_LINE> content.append(" - name: " + str(self.name)) <NEW_LINE> content.append(" - email: " + str(self.email)) <NEW_LINE> content.append(" - date: " + datedump(self.date)) <NEW_LINE> content.append(" - generationname: " + str(self.generationname)) <NEW_LINE> content.append(" - imageurl: " + str(self.imageurl)) <NEW_LINE> content.append(" - like: " + str(self.like)) <NEW_LINE> content.append(" - renderstatus: " + str(self.renderstatus)) <NEW_LINE> content.append(" - script: " + str(self.script)) <NEW_LINE> content.append(" - renderniter: " + str(self.renderniter)) <NEW_LINE> content.append(" - rendersize: " + str(self.rendersize)) <NEW_LINE> content.append(" - publishstatus: " + str(self.publishstatus)) <NEW_LINE> content.append(" - publishtitle: " + str(self.publishtitle)) <NEW_LINE> content.append(" - publishdescription: " + str(self.publishdescription)) <NEW_LINE> content.append(" - publishcategories: " + str(self.publishcategories)) <NEW_LINE> content.append(" - publishtags: " + str(self.publishtags)) <NEW_LINE> return "\n".join(content) | A main model for representing a gen result. | 62598fa799fddb7c1ca62d71 |
class Graph(object): <NEW_LINE> <INDENT> def __init__(self, graph_dict=None,oriented=False): <NEW_LINE> <INDENT> if oriented == True: <NEW_LINE> <INDENT> print ("Oriented graphs are not implemented yet") <NEW_LINE> <DEDENT> if graph_dict == None: <NEW_LINE> <INDENT> graph_dict = {} <NEW_LINE> <DEDENT> self.__graph_dict = graph_dict <NEW_LINE> <DEDENT> def vertices(self): <NEW_LINE> <INDENT> return list(self.__graph_dict.keys()); | Graph represented as dict of node:edges | 62598fa785dfad0860cbf9fd |
class HyperlinkURLDestination(Element): <NEW_LINE> <INDENT> Self = StringField(required=True) <NEW_LINE> Name = StringField() <NEW_LINE> DestinationURL = StringField() <NEW_LINE> Hidden = BooleanField() <NEW_LINE> DestinationUniqueKey = IntField() <NEW_LINE> Properties = EmbeddedDocumentField(Properties) | A hyperlink page destination specifies a web address as the destination for
a hyperlink. | 62598fa7eab8aa0e5d30bc9c |
class VPCRouteTableDefinition(nixops.resources.ResourceDefinition): <NEW_LINE> <INDENT> config: VpcRouteTableOptions <NEW_LINE> @classmethod <NEW_LINE> def get_type(cls): <NEW_LINE> <INDENT> return "vpc-route-table" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_resource_type(cls): <NEW_LINE> <INDENT> return "vpcRouteTables" <NEW_LINE> <DEDENT> def show_type(self): <NEW_LINE> <INDENT> return "{0}".format(self.get_type()) | Definition of a VPC route table | 62598fa7097d151d1a2c0f3a |
@dataclasses.dataclass <NEW_LINE> class Validator(amicus.base.Quirk): <NEW_LINE> <INDENT> validations: ClassVar[Sequence[str]] = [] <NEW_LINE> converters: ClassVar[amicus.base.Catalog] = amicus.base.Catalog() <NEW_LINE> """ Public Methods """ <NEW_LINE> def validate(self, validations: Sequence[str] = None) -> None: <NEW_LINE> <INDENT> if validations is None: <NEW_LINE> <INDENT> validations = self.validations <NEW_LINE> <DEDENT> for name in validations: <NEW_LINE> <INDENT> if hasattr(self, f'_validate_{name}'): <NEW_LINE> <INDENT> kwargs = {name: getattr(self, name)} <NEW_LINE> validated = getattr(self, f'_validate_{name}')(**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> converter = self._initialize_converter(name = name) <NEW_LINE> try: <NEW_LINE> <INDENT> validated = converter.validate( item = getattr(self, name), instance = self) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> validated = getattr(self, name) <NEW_LINE> <DEDENT> <DEDENT> setattr(self, name, validated) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> """ Private Methods """ <NEW_LINE> def _initialize_converter(self, name: str) -> Converter: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> converter = self.converters[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise KeyError( f'No local or stored type validator exists for {name}') <NEW_LINE> <DEDENT> return converter() | Mixin for calling validation methods
Args:
validations (List[str]): a list of attributes that need validating.
Each item in 'validations' should have a corresponding method named
f'_validate_{name}' or match a key in 'converters'. Defaults to an
empty list.
converters (amicus.base.Catalog):
| 62598fa7656771135c489593 |
class BulkProcessor(log_publisher.LogPublisher): <NEW_LINE> <INDENT> def __init__(self, logs_in_counter=None, logs_rejected_counter=None): <NEW_LINE> <INDENT> super(BulkProcessor, self).__init__() <NEW_LINE> self.service_region = CONF.region <NEW_LINE> <DEDENT> def send_message(self, logs, global_dimensions=None, log_tenant_id=None): <NEW_LINE> <INDENT> num_of_msgs = len(logs) if logs else 0 <NEW_LINE> to_send_msgs = [] <NEW_LINE> LOG.debug('Bulk package <logs=%d, dimensions=%s, tenant_id=%s>', num_of_msgs, global_dimensions, log_tenant_id) <NEW_LINE> try: <NEW_LINE> <INDENT> for log_el in logs: <NEW_LINE> <INDENT> t_el = self._transform_message(log_el, global_dimensions, log_tenant_id) <NEW_LINE> if t_el: <NEW_LINE> <INDENT> to_send_msgs.append(t_el) <NEW_LINE> <DEDENT> <DEDENT> self._publish(to_send_msgs) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> LOG.error('Failed to send bulk package <logs=%d, dimensions=%s>', num_of_msgs, global_dimensions) <NEW_LINE> LOG.exception(ex) <NEW_LINE> <DEDENT> <DEDENT> def _transform_message(self, log_element, *args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> validation.validate_log_message(log_element) <NEW_LINE> log_envelope = model.Envelope.new_envelope( log=log_element, tenant_id=args[1], region=self.service_region, dimensions=self._get_dimensions(log_element, global_dims=args[0]) ) <NEW_LINE> msg_payload = (super(BulkProcessor, self) ._transform_message(log_envelope)) <NEW_LINE> return msg_payload <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> LOG.error('Log transformation failed, rejecting log') <NEW_LINE> LOG.exception(ex) <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> def _create_envelope(self, log_element, tenant_id, dimensions=None): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def _get_dimensions(self, log_element, global_dims=None): <NEW_LINE> <INDENT> local_dims = log_element.get('dimensions', {}) <NEW_LINE> if not global_dims: <NEW_LINE> <INDENT> global_dims = {} <NEW_LINE> <DEDENT> if local_dims: <NEW_LINE> <INDENT> validation.validate_dimensions(local_dims) <NEW_LINE> <DEDENT> dimensions = global_dims.copy() <NEW_LINE> dimensions.update(local_dims) <NEW_LINE> return dimensions | BulkProcessor for effective log processing and publishing.
BulkProcessor is customized version of
:py:class:`monasca_log_api.app.base.log_publisher.LogPublisher`
that utilizes processing of bulk request inside single loop. | 62598fa7bd1bec0571e1504c |
class nDynamic(serverModel.serverModel): <NEW_LINE> <INDENT> def __init__ (self): <NEW_LINE> <INDENT> self.noServers= 1 <NEW_LINE> self.updateInterval= 0 <NEW_LINE> self.lastUpdate= 0 <NEW_LINE> <DEDENT> def parseJSON(self,js,fName): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.noServers= int (js.pop("number")) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print >> sys.stderr, "Number of servers specified in json for server model must be int" <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.updateInterval= int (js.pop("update_interval")) <NEW_LINE> self.lastUpdate= -self.updateInterval <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print >> sys.stderr, "Number of servers specified in json for server model must be int" <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def chooseServers( clusters, servers, qoeMod): <NEW_LINE> <INDENT> clusters.sort() <NEW_LINE> clusters.reverse() <NEW_LINE> centres= [] <NEW_LINE> slist=list(servers) <NEW_LINE> for c in clusters: <NEW_LINE> <INDENT> dc= c.getClosest(slist, qoeMod) <NEW_LINE> slist.remove(dc) <NEW_LINE> centres.append(dc) <NEW_LINE> <DEDENT> return centres <NEW_LINE> <DEDENT> def updateServers(self,time,session, netMod, qoeMod, routeMod,demMod): <NEW_LINE> <INDENT> if session.changed == False: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if time - self.lastUpdate < self.updateInterval: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> choice= netMod.locations() <NEW_LINE> if (self.noServers == len(choice)): <NEW_LINE> <INDENT> session.datacentres= choice <NEW_LINE> return <NEW_LINE> <DEDENT> clusters= cluster.clusterUsers(session.getUsers(), self.noServers, qoeMod) <NEW_LINE> centres= self.chooseServers(clusters,choice,qoeMod) <NEW_LINE> session.datacentres= centres | server model chooses n servers to be clustered | 62598fa738b623060ffa8fa9 |
class InternalServerError(ApiError): <NEW_LINE> <INDENT> def __init__(self, message=None, response=None): <NEW_LINE> <INDENT> status = 500 <NEW_LINE> if response is not None: <NEW_LINE> <INDENT> status = response.status_code <NEW_LINE> <DEDENT> if message is None: <NEW_LINE> <INDENT> message = 'Internal Server Error' <NEW_LINE> <DEDENT> super().__init__( message=message, status=status, response=response, ) | Internal server error class.
The server has encountered a situation it doesn't know how to handle. | 62598fa72ae34c7f260aaff3 |
class getSimpleChannelContacts_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.LIST, 'ids', (TType.STRING,None), None, ), ) <NEW_LINE> def __init__(self, ids=None,): <NEW_LINE> <INDENT> self.ids = ids <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.ids = [] <NEW_LINE> (_etype1237, _size1234) = iprot.readListBegin() <NEW_LINE> for _i1238 in xrange(_size1234): <NEW_LINE> <INDENT> _elem1239 = iprot.readString() <NEW_LINE> self.ids.append(_elem1239) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getSimpleChannelContacts_args') <NEW_LINE> if self.ids is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('ids', TType.LIST, 1) <NEW_LINE> oprot.writeListBegin(TType.STRING, len(self.ids)) <NEW_LINE> for iter1240 in self.ids: <NEW_LINE> <INDENT> oprot.writeString(iter1240) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.ids) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- ids | 62598fa78da39b475be030f4 |
class tempal_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'userid', None, None, ), ) <NEW_LINE> def __init__(self, userid=None,): <NEW_LINE> <INDENT> self.userid = userid <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.userid = iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('tempal_args') <NEW_LINE> if self.userid is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('userid', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.userid) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.userid) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- userid | 62598fa745492302aabfc3e2 |
class DistributionSelectionSepc(unittest.TestCase): <NEW_LINE> <INDENT> def test_gaussian_distribution(self): <NEW_LINE> <INDENT> case = JTKCycleRun(np.ones(TEST_N), 2*np.arange(TEST_N), None, 2, distribution="normal") <NEW_LINE> self.assertTrue(isinstance(case.distribution, NormalDistribution)) <NEW_LINE> <DEDENT> def test_exact_distribution(self): <NEW_LINE> <INDENT> case = JTKCycleRun(np.ones(TEST_N), 2*np.arange(TEST_N), None, 2, distribution="harding") <NEW_LINE> self.assertTrue(isinstance(case.distribution, HardingDistribution)) | Describe the Null Distribution boolean selector. | 62598fa7435de62698e9bd08 |
class AssociationSet(CSDLElement): <NEW_LINE> <INDENT> XMLNAME = (EDM_NAMESPACE, 'AssociationSet') <NEW_LINE> XMLATTR_Name = ('name', validate_simple_identifier, None) <NEW_LINE> XMLATTR_Association = 'associationName' <NEW_LINE> XMLCONTENT = xml.ElementType.ElementContent <NEW_LINE> def __init__(self, parent): <NEW_LINE> <INDENT> CSDLElement.__init__(self, parent) <NEW_LINE> self.name = "Default" <NEW_LINE> self.associationName = "" <NEW_LINE> self.association = None <NEW_LINE> self.Documentation = None <NEW_LINE> self.AssociationSetEnd = [] <NEW_LINE> self.TypeAnnotation = [] <NEW_LINE> self.ValueAnnotation = [] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_element_class(cls, name): <NEW_LINE> <INDENT> if xmlns.match_expanded_names((EDM_NAMESPACE, 'End'), name, EDM_NAMESPACE_ALIASES): <NEW_LINE> <INDENT> return AssociationSetEnd <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_children(self): <NEW_LINE> <INDENT> if self.Documentation: <NEW_LINE> <INDENT> yield self.Documentation <NEW_LINE> <DEDENT> for child in itertools.chain( self.AssociationSetEnd, self.TypeAnnotation, self.ValueAnnotation, CSDLElement.get_children(self)): <NEW_LINE> <INDENT> yield child <NEW_LINE> <DEDENT> <DEDENT> def update_set_refs(self, scope, stop_on_errors=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.association = scope[self.associationName] <NEW_LINE> if not isinstance(self.association, Association): <NEW_LINE> <INDENT> raise KeyError("%s is not an Association" % self.associationName) <NEW_LINE> <DEDENT> for iEnd in self.AssociationSetEnd: <NEW_LINE> <INDENT> iEnd.update_set_refs(scope, stop_on_errors) <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.association = None <NEW_LINE> if stop_on_errors: <NEW_LINE> <INDENT> raise | Represents an association set in the metadata model.
The purpose of the association set is to bind the ends of an
association to entity sets in the container.
Contrast this with the association element which merely describes
the association between entity types.
At first sight this part of the entity data model can be confusing
but imagine an entity container that contains two entity sets
that have the same entity type. Any navigation properties that
reference this type will need to be explicitly bound to one or
other of the entity sets in the container.
As an aside, it isn't really clear if the model was intended to
be used this way. It may have been intended that the entity type
in the definition of an entity set should be unique within the
scope of the entity container. | 62598fa760cbc95b0636425e |
class SignedOffBy(CommitRule): <NEW_LINE> <INDENT> name = "body-requires-signed-off-by" <NEW_LINE> id = "CAP1" <NEW_LINE> def validate(self, commit): <NEW_LINE> <INDENT> for line in commit.message.body: <NEW_LINE> <INDENT> if line.startswith("Signed-off-by"): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> msg = "Body does not contain a 'Signed-Off-By' line" <NEW_LINE> return [RuleViolation(self.id, msg, line_nr=1)] | This rule will enforce that each commit contains a "Signed-off-by" line. | 62598fa75166f23b2e2432e9 |
class BST: <NEW_LINE> <INDENT> def __init__(self, root: TreeNode = None) -> None: <NEW_LINE> <INDENT> self.root = root <NEW_LINE> <DEDENT> def insert(self, key: int, value: Any) -> None: <NEW_LINE> <INDENT> if self.root is None: <NEW_LINE> <INDENT> self.root = TreeNode(key, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.root.insert(key, value) <NEW_LINE> <DEDENT> <DEDENT> def size(self) -> int: <NEW_LINE> <INDENT> if self.root is None: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return self.root.size <NEW_LINE> <DEDENT> def height(self) -> int: <NEW_LINE> <INDENT> if self.root is None: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> return self.root.height <NEW_LINE> <DEDENT> def in_order_traversal(self) -> None: <NEW_LINE> <INDENT> self.root.in_order_traversal() <NEW_LINE> <DEDENT> def pre_order_traversal(self) -> None: <NEW_LINE> <INDENT> self.root.pre_order_traversal() <NEW_LINE> <DEDENT> def post_order_traversal(self) -> None: <NEW_LINE> <INDENT> self.root.post_order_traversal() <NEW_LINE> <DEDENT> def level_order_traversal(self) -> None: <NEW_LINE> <INDENT> self.root.level_order_traversal() | Implementation of the Binary Search Tree ADT. Has the following methods:
__init__(TreeNode=None), insert(int, Any), size(), height(), in_order_traversal(),
pre_order_traversal(), post_order_traversal(), level_order_traversal() | 62598fa7009cb60464d01431 |
class ConvertToSensorUnits(ProcessingNode): <NEW_LINE> <INDENT> nodeClass = 'common' <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> ProcessingNode.__init__(self, name = 'convert to sensor units', mode = 'uneditable', category = 'test', tags = ['convert', 'unit', 'physical'], **kwargs ) <NEW_LINE> <DEDENT> def execute(self, stream, process_limits = None): <NEW_LINE> <INDENT> for tr in stream.traces: <NEW_LINE> <INDENT> station = self.parentStack.project.geometry_inventory.get_station(network = tr.stats.network, name = tr.stats.station, location = tr.stats.location) <NEW_LINE> if len(station) > 1: <NEW_LINE> <INDENT> raise ValueError('There are more than one stations. This is not yet supported.') <NEW_LINE> <DEDENT> station = station[0] <NEW_LINE> channel = station.get_channel(name = tr.stats.channel) <NEW_LINE> if len(channel) > 1: <NEW_LINE> <INDENT> raise ValueError('There are more than one channels. This is not yet supported.') <NEW_LINE> <DEDENT> channel = channel[0] <NEW_LINE> stream_tb = channel.get_stream(start_time = tr.stats.starttime, end_time = tr.stats.endtime) <NEW_LINE> if len(stream_tb) > 1: <NEW_LINE> <INDENT> raise ValueError('There are more than one recorder streams. This is not yet supported.') <NEW_LINE> <DEDENT> rec_stream = stream_tb[0].item <NEW_LINE> rec_stream_param = rec_stream.get_parameter(start_time = tr.stats.starttime, end_time = tr.stats.endtime) <NEW_LINE> if len(rec_stream_param) > 1: <NEW_LINE> <INDENT> raise ValueError('There are more than one recorder stream parameters. This is not yet supported.') <NEW_LINE> <DEDENT> rec_stream_param = rec_stream_param[0] <NEW_LINE> components_tb = rec_stream.get_component(start_time = tr.stats.starttime, end_time = tr.stats.endtime) <NEW_LINE> if len(components_tb) > 1: <NEW_LINE> <INDENT> raise ValueError('There are more than one components. This is not yet supported.') <NEW_LINE> <DEDENT> component = components_tb[0].item <NEW_LINE> comp_param = component.get_parameter(start_time = tr.stats.starttime, end_time = tr.stats.endtime) <NEW_LINE> if len(comp_param) > 1: <NEW_LINE> <INDENT> raise ValueError('There are more than one parameters for this component. This is not yet supported.') <NEW_LINE> <DEDENT> comp_param = comp_param[0] <NEW_LINE> tr.data = tr.data * rec_stream_param.bitweight / (rec_stream_param.gain * comp_param.sensitivity) <NEW_LINE> tr.stats.unit = component.output_unit.strip() | Detrend a timeseries.
This node uses the detrend method of the obspy stream class to remove the
trend from a timeseries. | 62598fa74f6381625f199447 |
class StationParser(jsonparser.JSONParser): <NEW_LINE> <INDENT> def parse_JSON(self, JSON_string): <NEW_LINE> <INDENT> if JSON_string is None: <NEW_LINE> <INDENT> raise parse_response_error.ParseResponseError('JSON data is None') <NEW_LINE> <DEDENT> d = json.loads(JSON_string) <NEW_LINE> try: <NEW_LINE> <INDENT> name = d['station']['name'] <NEW_LINE> station_ID = d['station']['id'] <NEW_LINE> station_type = d['station']['type'] <NEW_LINE> status = d['station']['status'] <NEW_LINE> lat = d['station']['coord']['lat'] <NEW_LINE> if 'lon' in d['station']['coord']: <NEW_LINE> <INDENT> lon = d['station']['coord']['lon'] <NEW_LINE> <DEDENT> elif 'lng' in d['station']['coord']: <NEW_LINE> <INDENT> lon = d['station']['coord']['lng'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lon = None <NEW_LINE> <DEDENT> if 'distance' in d: <NEW_LINE> <INDENT> distance = d['distance'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> distance = None <NEW_LINE> <DEDENT> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> error_msg = ''.join((__name__, ': unable to read JSON data', )) <NEW_LINE> raise parse_response_error.ParseResponseError(error_msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if 'last' in d: <NEW_LINE> <INDENT> last_weather = weather.weather_from_dictionary(d['last']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> last_weather = None <NEW_LINE> <DEDENT> <DEDENT> return station.Station(name, station_ID, station_type, status, lat, lon, distance, last_weather) | Concrete *JSONParser* implementation building a *Station* instance
out of raw JSON data coming from OWM Weather API responses. | 62598fa791af0d3eaad39d22 |
class Joystick(): <NEW_LINE> <INDENT> def __init__(self, StepperInstance, JoystickIndex): <NEW_LINE> <INDENT> self.Stepper = StepperInstance <NEW_LINE> pygame.init() <NEW_LINE> self.PygameJoystick = pygame.joystick.Joystick(JoystickIndex) <NEW_LINE> self.PygameJoystick.init() <NEW_LINE> <DEDENT> def get_axes(self, horizontal_axis, vertical_axis, zoom_axis): <NEW_LINE> <INDENT> logging.debug(type(self.PygameJoystick)) <NEW_LINE> horiAxis = self.PygameJoystick.get_axis(horizontal_axis) <NEW_LINE> vertAxis = self.PygameJoystick.get_axis(vertical_axis) <NEW_LINE> zoomAxis = self.PygameJoystick.get_axis(zoom_axis) <NEW_LINE> return (horiAxis, vertAxis, zoom_axis) <NEW_LINE> <DEDENT> def begin_joystick(self, loop = True, horizontal_axis = 0, vertical_axis = 1, zoom_axis = 3): <NEW_LINE> <INDENT> currentHorizontalAxis, currentVerticalAxis, currentZoomAxis = self.get_axes(horizontal_axis, vertical_axis, zoom_axis) <NEW_LINE> logging.debug(currentHorizontalAxis) <NEW_LINE> self.Stepper.write_axes(currentVerticalAxis, currentHorizontalAxis) <NEW_LINE> if self.PygameJoystick.get_button(0) == 1: <NEW_LINE> <INDENT> loop = False <NEW_LINE> <DEDENT> if loop == True: <NEW_LINE> <INDENT> self.begin_joystick(loop, horizontal_axis, vertical_axis, zoom_axis) | A class that uses a joystick to control an arduino
'StepperInstance', takes an instance of Controller().Stepper() | 62598fa70a50d4780f7052f0 |
class Output(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self, cfg): <NEW_LINE> <INDENT> self.cfg = cfg <NEW_LINE> self.sessions = {} <NEW_LINE> self.ips = {} <NEW_LINE> self.re_sessionlog = re.compile( '.*HoneyPotTransport,([0-9]+),[0-9.]+$') <NEW_LINE> if self.cfg.has_option('honeypot', 'sensor_name'): <NEW_LINE> <INDENT> self.sensor = self.cfg.get('honeypot', 'sensor_name') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sensor = socket.gethostname() <NEW_LINE> <DEDENT> self.start(cfg) <NEW_LINE> <DEDENT> def logDispatch(self, *msg, **kw): <NEW_LINE> <INDENT> ev = kw <NEW_LINE> ev['message'] = msg <NEW_LINE> self.emit(ev) <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def start(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def stop(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def handleLog(self, session, event): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def emit(self, event): <NEW_LINE> <INDENT> if 'printed' in event: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not 'eventid' in event: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> ev = copy.copy(event) <NEW_LINE> if 'isError' in ev: <NEW_LINE> <INDENT> del ev['isError'] <NEW_LINE> <DEDENT> ev['sensor'] = self.sensor <NEW_LINE> if not 'time' in ev: <NEW_LINE> <INDENT> ev['timestamp'] = datetime.datetime.today().isoformat() + 'Z' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ev['timestamp'] = datetime.datetime.fromtimestamp(ev['time']).isoformat() + 'Z' <NEW_LINE> del ev['time'] <NEW_LINE> <DEDENT> if 'sessionno' in ev: <NEW_LINE> <INDENT> sessionno = ev['sessionno'] <NEW_LINE> del ev['sessionno'] <NEW_LINE> <DEDENT> elif 'system' in ev: <NEW_LINE> <INDENT> match = self.re_sessionlog.match(ev['system']) <NEW_LINE> if not match: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> sessionno = int(match.groups()[0]) <NEW_LINE> del ev['system'] <NEW_LINE> <DEDENT> if sessionno in self.ips: <NEW_LINE> <INDENT> ev['src_ip'] = self.ips[sessionno] <NEW_LINE> <DEDENT> if ev['eventid'] == 'KIPP0001': <NEW_LINE> <INDENT> self.sessions[sessionno] = uuid.uuid4().hex <NEW_LINE> self.ips[sessionno] = ev['src_ip'] <NEW_LINE> del ev['system'] <NEW_LINE> <DEDENT> self.handleLog(self.sessions[sessionno], ev) <NEW_LINE> if ev['eventid'] == 'KIPP0011': <NEW_LINE> <INDENT> del self.sessions[sessionno] <NEW_LINE> del self.ips[sessionno] | This is the abstract base class intended to be inherited by kippo output plugins
Plugins require the mandatory methods: stop, start and handleLog | 62598fa763d6d428bbee26c5 |
class FeesUpdated(WebsocketFilterMessage[FeesUpdatedMessageData]): <NEW_LINE> <INDENT> event: ClassVar[str] = 'fee_update' <NEW_LINE> schema: ClassVar[PSJSONSchema] = PSJSONSchema({ 'properties': { 'bounty_fee': { **uint256, 'srckey': 'bountyFee' }, 'assertion_fee': { **uint256, 'srckey': 'assertionFee' } }, }) | FeesUpdated
doctest:
>>> event = mkevent({'bountyFee': 5000000000000000, 'assertionFee': 5000000000000000 })
>>> decoded_msg(FeesUpdated.serialize_message(event))
{'block_number': 117,
'data': {'assertion_fee': 5000000000000000, 'bounty_fee': 5000000000000000},
'event': 'fee_update',
'txhash': '0000000000000000000000000000000b'} | 62598fa77047854f4633f2ed |
class Kanmusu(models.Model): <NEW_LINE> <INDENT> shipname = models.CharField('艦名', max_length=255) <NEW_LINE> shipclass = models.CharField('艦種', max_length=255, blank=True) <NEW_LINE> shiplevel = models.IntegerField('レベル', blank=True, default=1) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.shipname | 艦娘 | 62598fa73539df3088ecc1c8 |
class User(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> objects = UserManager() <NEW_LINE> USERNAME_FIELD = 'email' | Custom user model that suports using emiail instead of username | 62598fa78e7ae83300ee8fb6 |
class CustomTopo(Topo): <NEW_LINE> <INDENT> def __init__(self, linkopts1, linkopts2, linkopts3, fanout=2, **opts): <NEW_LINE> <INDENT> Topo.__init__(self, **opts) <NEW_LINE> self.fanout = fanout <NEW_LINE> c1 = self.addSwitch('c1') <NEW_LINE> for i in range(fanout): <NEW_LINE> <INDENT> a = self.addSwitch('a%d' % (i + 1)) <NEW_LINE> self.addLink(a, c1, **linkopts1) <NEW_LINE> for j in range(fanout): <NEW_LINE> <INDENT> e = self.addSwitch('e%d' % (j + 1 + i * fanout)) <NEW_LINE> self.addLink(e, a, **linkopts2) <NEW_LINE> for k in range(fanout): <NEW_LINE> <INDENT> h = self.addHost('h%d' % (k + 1 + i * fanout + j * fanout)) <NEW_LINE> self.addLink(h, e, **linkopts3) | Simple Data Center Topology | 62598fa73d592f4c4edbade1 |
class ServiceBusAuthentication(Model): <NEW_LINE> <INDENT> _attribute_map = { 'sas_key': {'key': 'sasKey', 'type': 'str'}, 'sas_key_name': {'key': 'sasKeyName', 'type': 'str'}, 'type': {'key': 'type', 'type': 'ServiceBusAuthenticationType'}, } <NEW_LINE> def __init__(self, sas_key=None, sas_key_name=None, type=None): <NEW_LINE> <INDENT> self.sas_key = sas_key <NEW_LINE> self.sas_key_name = sas_key_name <NEW_LINE> self.type = type | ServiceBusAuthentication.
:param sas_key: Gets or sets the SAS key.
:type sas_key: str
:param sas_key_name: Gets or sets the SAS key name.
:type sas_key_name: str
:param type: Gets or sets the authentication type. Possible values
include: 'NotSpecified', 'SharedAccessKey'
:type type: str or :class:`ServiceBusAuthenticationType
<azure.mgmt.scheduler.models.ServiceBusAuthenticationType>` | 62598fa76e29344779b00570 |
class PyFlaskCompress(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/libwilliam/flask-compress" <NEW_LINE> pypi = "Flask-Compress/Flask-Compress-1.4.0.tar.gz" <NEW_LINE> version('1.4.0', sha256='468693f4ddd11ac6a41bca4eb5f94b071b763256d54136f77957cfee635badb3') <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('py-flask@0.9:', type=('build', 'run')) | Flask-Compress allows you to easily compress your Flask application's
responses with gzip. | 62598fa767a9b606de545ee0 |
class CachingSkyMap(BaseSkyMap): <NEW_LINE> <INDENT> def __init__(self, numTracts, config=None, version=0): <NEW_LINE> <INDENT> super(CachingSkyMap, self).__init__(config) <NEW_LINE> self._numTracts = numTracts <NEW_LINE> self._tractCache = [None] * self._numTracts <NEW_LINE> self._tractInfo = None <NEW_LINE> self._version = version <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> return (self.__class__, (self.config, self._version)) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for i in range(self._numTracts): <NEW_LINE> <INDENT> yield self[i] <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._numTracts <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> if index < 0 or index > self._numTracts: <NEW_LINE> <INDENT> raise IndexError("Index out of range: %d vs %d" % (index, self._numTracts)) <NEW_LINE> <DEDENT> if self._tractCache[index] is not None: <NEW_LINE> <INDENT> return self._tractCache[index] <NEW_LINE> <DEDENT> tract = self.generateTract(index) <NEW_LINE> self._tractCache[index] = tract <NEW_LINE> return tract <NEW_LINE> <DEDENT> def generateTract(self, index): <NEW_LINE> <INDENT> raise NotImplementedError("Subclasses must define this method.") | A SkyMap that generates its tracts on request and caches them.
Parameters
----------
numTracts : `int`
Number of tracts to create.
config : `lsst.skymap.BaseSkyMapConfig` (optional)
The configuration for this SkyMap; if None use the default config.
version : `int` or `tuple` of `int` (optional)
Software version of this class, to retain compatibility with old
instances.
Notes
-----
A subclass should define
* __init__ to calculate the required number of tracts (and pass it up)
* generateTract to generate a tract
Subclassers should also check that the arguments to the constructor are
consistent with the below __reduce__ method. | 62598fa792d797404e388aef |
@python_2_unicode_compatible <NEW_LINE> class ImportItem(UUIDModelMixin, TimestampedModelMixin, models.Model): <NEW_LINE> <INDENT> ct_limit = ( models.Q(app_label="alibrary", model="media") | models.Q(app_label="alibrary", model="release") | models.Q(app_label="alibrary", model="artist") | models.Q(app_label="alibrary", model="label") ) <NEW_LINE> import_session = models.ForeignKey( Import, verbose_name=_("Import"), null=True, related_name="importitem_set" ) <NEW_LINE> content_type = models.ForeignKey(ContentType, limit_choices_to=ct_limit) <NEW_LINE> object_id = models.PositiveIntegerField() <NEW_LINE> content_object = GenericForeignKey("content_type", "object_id") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = "importer" <NEW_LINE> verbose_name = _("Import Item") <NEW_LINE> verbose_name_plural = _("Import Items") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return "%s | %s" % ( ContentType.objects.get_for_model(self.content_object), self.content_object.name, ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return "%s" % (self.pk) <NEW_LINE> <DEDENT> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ImportItem, self).save(*args, **kwargs) | stores relations to objects created/assigned during the specific import | 62598fa75166f23b2e2432eb |
class ReadOnlyModelAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> def has_add_permission(self, request, obj=None): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def has_delete_permission(self, request, obj=None): <NEW_LINE> <INDENT> return False | 只读model admin | 62598fa7a17c0f6771d5c149 |
class TzOffset(tzinfo): <NEW_LINE> <INDENT> def __init__(self, offset): <NEW_LINE> <INDENT> self._offset = timedelta(hours=offset) <NEW_LINE> <DEDENT> def utcoffset(self, dt): <NEW_LINE> <INDENT> return self._offset <NEW_LINE> <DEDENT> def tzname(self, dt): <NEW_LINE> <INDENT> return 'TzOffset: {}'.format(self._offset.hours) <NEW_LINE> <DEDENT> def dst(self, dt): <NEW_LINE> <INDENT> return timedelta(0) | Minimal implementation of a timezone offset to help testing with timezone
aware datetimes. | 62598fa70c0af96317c56296 |
class VoucherForm(forms.Form): <NEW_LINE> <INDENT> amount = forms.IntegerField(label=_(u"Amount"), required=True, help_text=_(u'Number of vouchers to create')) <NEW_LINE> value = forms.FloatField(label=_(u"Value"), required=True) <NEW_LINE> start_date = forms.DateField(label=_(u"Start date"), required=True) <NEW_LINE> end_date = forms.DateField(label=_(u"End date"), required=True) <NEW_LINE> kind_of = forms.ChoiceField(label=_(u"Kind of"), choices=KIND_OF_CHOICES, required=True) <NEW_LINE> effective_from = forms.FloatField(label=_(u"Effective from"), required=True, help_text=_(u"Minimum cart price from which the vouchers are valid")) <NEW_LINE> tax = forms.ChoiceField(label=_(u"Tax"), required=False) <NEW_LINE> limit = forms.IntegerField(label=_(u"Limit"), initial=1, required=True, help_text=_(u'How many times voucher can be used')) <NEW_LINE> sums_up = forms.BooleanField(label=_(u"Sums up"), initial=True, required=False, help_text=_(u'Determines if voucher can be summed up with other discounts')) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(VoucherForm, self).__init__(*args, **kwargs) <NEW_LINE> taxes = [["", "---"]] <NEW_LINE> taxes.extend([(t.id, t.rate) for t in Tax.objects.all()]) <NEW_LINE> self.fields["tax"].choices = taxes <NEW_LINE> self.fields["start_date"].widget.attrs = {'class': 'date-picker'} <NEW_LINE> self.fields["end_date"].widget.attrs = {'class': 'date-picker'} | Form to add a Voucher.
| 62598fa744b2445a339b68f9 |
class WalltimeError(AbiFWError): <NEW_LINE> <INDENT> ERROR_CODE = ErrorCode.WALLTIME | Exception raised when the calculation didn't complete within the specified walltime. | 62598fa78e71fb1e983bb9c7 |
class StringsResponse(BaseModel): <NEW_LINE> <INDENT> language: Optional[str] = Field(None, description="Language of the returned strings") <NEW_LINE> age: Optional[List[AgeString]] = Field(None, description="List of all strings and their IDs for ages") <NEW_LINE> civ: Optional[List[CivilizationString]] = Field( None, description="List of all strings and their IDs for civilizations" ) <NEW_LINE> game_type: Optional[List[GameTypeString]] = Field( None, description="List of all strings and their IDs for game types" ) <NEW_LINE> leaderboard: Optional[List[LeaderBoardString]] = Field( None, description="List of all strings and their IDs for leaderboards" ) <NEW_LINE> map_size: Optional[List[MapSizeString]] = Field( None, description="List of all strings and their IDs for map sizes" ) <NEW_LINE> map_type: Optional[List[MapTypeString]] = Field( None, description="List of all strings and their IDs for map types" ) <NEW_LINE> rating_type: Optional[List[RatingTypeString]] = Field( None, description="List of all strings and their IDs for rating types" ) <NEW_LINE> resources: Optional[List[ResourcesString]] = Field( None, description="List of all strings and their IDs for resources" ) <NEW_LINE> speed: Optional[List[SpeedString]] = Field( None, description="List of all strings and their IDs for game speeds" ) <NEW_LINE> victory: Optional[List[VictoryString]] = Field( None, description="List of all strings and their IDs for victory types" ) <NEW_LINE> visibility: Optional[List[VisibilityString]] = Field( None, description="List of all strings and their IDs for visibility" ) | An object to encapsulate the response from the strings API endpoint. | 62598fa73539df3088ecc1c9 |
class TrieTree: <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> self.children = {} <NEW_LINE> self.end = False <NEW_LINE> self.parent = parent <NEW_LINE> self._value = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value): <NEW_LINE> <INDENT> self.end = True <NEW_LINE> self._value = value <NEW_LINE> <DEDENT> @value.deleter <NEW_LINE> def value(self): <NEW_LINE> <INDENT> self.end = False <NEW_LINE> self._value = None <NEW_LINE> <DEDENT> def __child(self, key): <NEW_LINE> <INDENT> current = self <NEW_LINE> for char in key: <NEW_LINE> <INDENT> if char not in current.children: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> current = current.children[char] <NEW_LINE> <DEDENT> return current <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> current = self <NEW_LINE> for char in key: <NEW_LINE> <INDENT> if char not in current.children: <NEW_LINE> <INDENT> current.children[char] = TrieTree(current) <NEW_LINE> <DEDENT> current = current.children[char] <NEW_LINE> <DEDENT> current.value = value <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> child = self.__child(key) <NEW_LINE> if child is None or not child.end: <NEW_LINE> <INDENT> raise KeyError(key) <NEW_LINE> <DEDENT> return child.value <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> child = self.__child(key) <NEW_LINE> if child is None: <NEW_LINE> <INDENT> raise KeyError(key) <NEW_LINE> <DEDENT> del child.value <NEW_LINE> parent = child.parent <NEW_LINE> for char in reversed(key): <NEW_LINE> <INDENT> if (not parent.children[char].children and not parent.children[char].end): <NEW_LINE> <INDENT> del parent.children[char] <NEW_LINE> parent = parent.parent <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> child = self.__child(key) <NEW_LINE> return child.end if child is not None else False <NEW_LINE> <DEDENT> def anagrams(self, letters, key=''): <NEW_LINE> <INDENT> result = [] <NEW_LINE> if self.end: <NEW_LINE> <INDENT> result.append(key) <NEW_LINE> <DEDENT> for letter, child in self.children.items(): <NEW_LINE> <INDENT> if letter in letters: <NEW_LINE> <INDENT> result.extend( child.anagrams( letters, key + letter ) ) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def print(self, indent='...'): <NEW_LINE> <INDENT> return "({})".format(self.value) + ''.join( '\n{indent}{name} {rest}'.format( indent=indent, name=name, rest=child.print(indent + '...') ) for name, child in self.children.items() ) | Trie tree. Also called digital, radix or prefix tree.
To create new tree just use default constructor: TrieTree() | 62598fa710dbd63aa1c70ac7 |
class FakeStream(object): <NEW_LINE> <INDENT> def __init__(self, data, size): <NEW_LINE> <INDENT> self._count = 0 <NEW_LINE> self._data = data <NEW_LINE> self._size = size <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._data) <NEW_LINE> <DEDENT> def read(self, *args): <NEW_LINE> <INDENT> if self._count < self._size: <NEW_LINE> <INDENT> data = self._data[self._count:self._count+size//3] <NEW_LINE> self._count += len(data) <NEW_LINE> return data <NEW_LINE> <DEDENT> if self._count == len(self._data): <NEW_LINE> <INDENT> return b'' <NEW_LINE> <DEDENT> raise Exception("break connection") | Send data and simulate a connectivity issue | 62598fa70a50d4780f7052f1 |
class HelperExceptionInner(Exception): <NEW_LINE> <INDENT> pass | Exception for tests to raise to test exception handling. | 62598fa721bff66bcd722b7b |
class StreamSummary(StreamResult): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._hook = _StreamToTestRecord(self._gather_test) <NEW_LINE> self._handle_status = { 'success': self._success, 'skip': self._skip, 'exists': self._exists, 'fail': self._fail, 'xfail': self._xfail, 'uxsuccess': self._uxsuccess, 'unknown': self._incomplete, 'inprogress': self._incomplete, } <NEW_LINE> <DEDENT> def startTestRun(self): <NEW_LINE> <INDENT> super().startTestRun() <NEW_LINE> self.failures = [] <NEW_LINE> self.errors = [] <NEW_LINE> self.testsRun = 0 <NEW_LINE> self.skipped = [] <NEW_LINE> self.expectedFailures = [] <NEW_LINE> self.unexpectedSuccesses = [] <NEW_LINE> self._hook.startTestRun() <NEW_LINE> <DEDENT> def status(self, *args, **kwargs): <NEW_LINE> <INDENT> super().status(*args, **kwargs) <NEW_LINE> self._hook.status(*args, **kwargs) <NEW_LINE> <DEDENT> def stopTestRun(self): <NEW_LINE> <INDENT> super().stopTestRun() <NEW_LINE> self._hook.stopTestRun() <NEW_LINE> <DEDENT> def wasSuccessful(self): <NEW_LINE> <INDENT> return (not self.failures and not self.errors) <NEW_LINE> <DEDENT> def _gather_test(self, test_record): <NEW_LINE> <INDENT> if test_record.status == 'exists': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.testsRun += 1 <NEW_LINE> case = test_record.to_test_case() <NEW_LINE> self._handle_status[test_record.status](case) <NEW_LINE> <DEDENT> def _incomplete(self, case): <NEW_LINE> <INDENT> self.errors.append((case, "Test did not complete")) <NEW_LINE> <DEDENT> def _success(self, case): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _skip(self, case): <NEW_LINE> <INDENT> if 'reason' not in case._details: <NEW_LINE> <INDENT> reason = "Unknown" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> reason = case._details['reason'].as_text() <NEW_LINE> <DEDENT> self.skipped.append((case, reason)) <NEW_LINE> <DEDENT> def _exists(self, case): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _fail(self, case): <NEW_LINE> <INDENT> message = _details_to_str(case._details, special="traceback") <NEW_LINE> self.errors.append((case, message)) <NEW_LINE> <DEDENT> def _xfail(self, case): <NEW_LINE> <INDENT> message = _details_to_str(case._details, special="traceback") <NEW_LINE> self.expectedFailures.append((case, message)) <NEW_LINE> <DEDENT> def _uxsuccess(self, case): <NEW_LINE> <INDENT> case._outcome = 'addUnexpectedSuccess' <NEW_LINE> self.unexpectedSuccesses.append(case) | A specialised StreamResult that summarises a stream.
The summary uses the same representation as the original
unittest.TestResult contract, allowing it to be consumed by any test
runner. | 62598fa74f88993c371f0494 |
class TestLoggerDevelop(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.log_level = 'WARNING' <NEW_LINE> <DEDENT> def test_logging_message(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> message = input('Enter some log message or q to exit: ') <NEW_LINE> if 'q' in message: <NEW_LINE> <INDENT> sys.exit() <NEW_LINE> <DEDENT> logg = interfaceLog(self.log_level, message) | Tests for LoggerDevelop | 62598fa7627d3e7fe0e06dc1 |
class NoAuthBaseTestCase(BaseTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> BaseTestCase.setUp(self) <NEW_LINE> self.app.config['LOGIN_DISABLED'] = True <NEW_LINE> self.app.login_manager.init_app(self.app) | Base class for Flask unit tests without authentication.
Apart from the functionality provided by the BaseTestClass, this class disables authentication checking, so that
login_required decorators will be ignored. | 62598fa799cbb53fe6830deb |
class ThermalMap(ColorMap): <NEW_LINE> <INDENT> def __call__(self, x): <NEW_LINE> <INDENT> if x < 0.33: <NEW_LINE> <INDENT> return color.RGBColor(x/0.33, 0, 0) <NEW_LINE> <DEDENT> if x < 0.67: <NEW_LINE> <INDENT> return color.RGBColor(1, (x-0.33)/0.34, 0) <NEW_LINE> <DEDENT> return color.RGBColor(1, 1, (x-0.67)/0.33) | black->red->yellow->white | 62598fa701c39578d7f12c95 |
class FritzDevice: <NEW_LINE> <INDENT> def __init__(self, mac, name=None): <NEW_LINE> <INDENT> self._mac = mac <NEW_LINE> self._name = name <NEW_LINE> self._ip_address = None <NEW_LINE> self._last_activity = None <NEW_LINE> self._connected = False <NEW_LINE> <DEDENT> def update(self, dev_info, dev_home, consider_home): <NEW_LINE> <INDENT> utc_point_in_time = dt_util.utcnow() <NEW_LINE> if not self._name: <NEW_LINE> <INDENT> self._name = dev_info.name or self._mac.replace(":", "_") <NEW_LINE> <DEDENT> if not dev_home and self._last_activity: <NEW_LINE> <INDENT> self._connected = ( utc_point_in_time - self._last_activity ).total_seconds() < consider_home <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._connected = dev_home <NEW_LINE> <DEDENT> if self._connected: <NEW_LINE> <INDENT> self._ip_address = dev_info.ip_address <NEW_LINE> self._last_activity = utc_point_in_time <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._ip_address = None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_connected(self): <NEW_LINE> <INDENT> return self._connected <NEW_LINE> <DEDENT> @property <NEW_LINE> def mac_address(self): <NEW_LINE> <INDENT> return self._mac <NEW_LINE> <DEDENT> @property <NEW_LINE> def hostname(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def ip_address(self): <NEW_LINE> <INDENT> return self._ip_address <NEW_LINE> <DEDENT> @property <NEW_LINE> def last_activity(self): <NEW_LINE> <INDENT> return self._last_activity | FritzScanner device. | 62598fa71b99ca400228f4ba |
class CarrierParcelTemplate(orm.Model): <NEW_LINE> <INDENT> _name = 'carrier.parcel.template' <NEW_LINE> _description = 'Parcel template' <NEW_LINE> _rec_name = 'name' <NEW_LINE> def _get_volumetric_weight( self, cr, uid, ids, fields=None, args=None, context=None): <NEW_LINE> <INDENT> res = {} <NEW_LINE> for template in self.browse(cr, uid, ids, context=context): <NEW_LINE> <INDENT> res[template.id] = ( template.length * template.width * template.height / 5000.0) <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> _columns = { 'is_active': fields.boolean('Attivo'), 'name': fields.char('Name'), 'no_label': fields.boolean('No label'), 'carrier_supplier_id': fields.many2one('carrier.supplier', 'Carrier'), 'length': fields.float('Length', digits=(16, 2), required=True), 'width': fields.float('Width', digits=(16, 2), required=True), 'height': fields.float('Height', digits=(16, 2), required=True), 'dimension_uom_id': fields.many2one('product.uom', 'Product UOM'), 'weight': fields.function( _get_volumetric_weight, string='Weight volumetric', digits=(16, 2), type='float', help='Volumetric weight (H x L x P / 5000)', readonly=True), 'weight_uom_id': fields.many2one('product.uom', 'Product UOM'), 'carrier_connection_id': fields.many2one( 'carrier.connection', 'Carrier Connection', help='Force carrier connection for small package'), 'package_type': fields.selection( [ ('GENERIC', 'Generic'), ('ENVELOPE', 'Envelope'), ('DOCUMENTS', 'Documents'), ], 'Package type', required=True), } <NEW_LINE> _defaults = { 'package_type': lambda *x: 'GENERIC', } | Model name: Parcels template
| 62598fa799fddb7c1ca62d73 |
class Handler(object): <NEW_LINE> <INDENT> def __init__(self, app=None, request=None, settings={}): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> self.request = request <NEW_LINE> self.settings = settings <NEW_LINE> <DEDENT> def handle(self, **m): <NEW_LINE> <INDENT> method = self.request.method.lower() <NEW_LINE> if hasattr(self, method): <NEW_LINE> <INDENT> self.settings.log.debug("calling method %s on handler '%s' " %(self.request.method, m['handler'])) <NEW_LINE> del m['handler'] <NEW_LINE> return getattr(self, method)(**m) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return werkzeug.exceptions.MethodNotAllowed() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def context(self): <NEW_LINE> <INDENT> d = dict( handler = self, js_jquery_link = self.settings['js_resources']("jquery"), js_head_link = self.settings['js_resources']("head"), jslinks = self.settings['js_resources'](), csslinks = self.settings['css_resources'](), virtual_path = self.settings.virtual_path, ) <NEW_LINE> return PageContext(d) | a request handler which is also the base class for an application | 62598fa74428ac0f6e658438 |
class CredentialTypeAccess(BaseAccess): <NEW_LINE> <INDENT> model = CredentialType <NEW_LINE> def can_read(self, obj): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def can_use(self, obj): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def get_method_capability(self, method, obj, parent_obj): <NEW_LINE> <INDENT> if obj.managed_by_tower: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return super(CredentialTypeAccess, self).get_method_capability(method, obj, parent_obj) <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> return self.model.objects.all() | I can see credentials types when:
- I'm authenticated
I can create when:
- I'm a superuser:
I can change when:
- I'm a superuser and the type is not "managed by Tower" | 62598fa7eab8aa0e5d30bca0 |
class DmozSpider(Spider): <NEW_LINE> <INDENT> name = "dmoz" <NEW_LINE> allowed_domains = ['dmoztools.net'] <NEW_LINE> start_urls = [ "http://dmoztools.net/Computers/Programming/Languages/Python/Books/", "http://dmoztools.net/Arts/Design/Fashion/Magazines_and_E-zines/Women/" ] <NEW_LINE> def parse(self, response): <NEW_LINE> <INDENT> selector = Selector(response) <NEW_LINE> sites = selector.xpath('//div[@class="title-and-desc"]') <NEW_LINE> items =[] <NEW_LINE> for site in sites: <NEW_LINE> <INDENT> item = DmozItem() <NEW_LINE> item['title'] = site.xpath('a/div/text()').extract() <NEW_LINE> item['link'] = site.xpath('a/@href').extrat() <NEW_LINE> item['desc'] = site.xpath('div/text()').extract() <NEW_LINE> items.append(item) <NEW_LINE> <DEDENT> return items | 首先,Scrapy为爬虫的 start_urls属性中的每个URL创建了一个 scrapy.http.Request 对象 ,并将爬虫的parse 方法指定为回调函数。
然后,这些 Request被调度并执行,之后通过parse()方法返回scrapy.http.Response对象,并反馈给爬虫。 | 62598fa7097d151d1a2c0f3e |
class Operator: <NEW_LINE> <INDENT> def __call__(self, target, flick, alt_ext = None): <NEW_LINE> <INDENT> raise NotImplemented | Interface example for an Operator (but can actually be any callable with the same signature). | 62598fa756ac1b37e6302102 |
class Crawler(): <NEW_LINE> <INDENT> def __init__(self, handler): <NEW_LINE> <INDENT> self.__handler = handler <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> return self.__crawl_next() <NEW_LINE> <DEDENT> def quit(self): <NEW_LINE> <INDENT> if self.is_alive(): <NEW_LINE> <INDENT> self._Thread__stop() <NEW_LINE> thread.exit() <NEW_LINE> <DEDENT> <DEDENT> def __crawl_next(self): <NEW_LINE> <INDENT> sleep( min(time() - self.__handler._last_crawl_time, self.__handler._crawler_delay)) <NEW_LINE> self.__handler._last_crawl_time = time() <NEW_LINE> try: <NEW_LINE> <INDENT> page_url = self.__handler._queue.get(False) <NEW_LINE> <DEDENT> except Empty: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> page = Page(page_url, self.__handler) <NEW_LINE> self.__handler._last_crawl_time = time() <NEW_LINE> self.__handler._site[page.page_ID] = page <NEW_LINE> self.__handler._url_to_page_id[page_url] = page.page_ID <NEW_LINE> self.__handler._queue.task_done() <NEW_LINE> return self.__crawl_next() | A breadth-first crawler.
:param handler: A reference to the CrawlerHandler object coordinating the crawling. | 62598fa7a8370b77170f02f0 |
class Soupy(Fs): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.content = self.get_file_contents(self.filename) <NEW_LINE> self.soup = BeautifulSoup(self.content) <NEW_LINE> <DEDENT> def get_content(self): <NEW_LINE> <INDENT> return self.content <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> self.rewrite_file(self.filename, self.soup.renderContents()) <NEW_LINE> <DEDENT> def find_tags(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.soup.find_all(*args, **kwargs) <NEW_LINE> <DEDENT> def delete(self, tags): <NEW_LINE> <INDENT> if is_sequence(tags): <NEW_LINE> <INDENT> [s.extract() for s in tags] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tags.extract() | beautiful soup stuff | 62598fa73317a56b869be4d5 |
class MyCustomClassifier(BaseEstimator, ClassifierMixin): <NEW_LINE> <INDENT> def __init__(self, penalty="l1"): <NEW_LINE> <INDENT> BaseEstimator.__init__(self) <NEW_LINE> ClassifierMixin.__init__(self) <NEW_LINE> self.penalty = penalty <NEW_LINE> self.estimator = LogisticRegression(penalty=self.penalty, solver="liblinear") <NEW_LINE> <DEDENT> def fit(self, X, y, sample_weight=None): <NEW_LINE> <INDENT> self.estimator_ = self.estimator.fit(X, y, sample_weight=sample_weight) <NEW_LINE> return self <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> return self.estimator_.predict(X) <NEW_LINE> <DEDENT> def predict_proba(self, X): <NEW_LINE> <INDENT> return self.estimator_.predict_proba(X) <NEW_LINE> <DEDENT> def decision_function(self, X): <NEW_LINE> <INDENT> return self.estimator_.decision_function(X) | does a simple logistic regression | 62598fa78e7ae83300ee8fb8 |
class TestChangesetTranslationsChecks(FakeCheckoutTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> changed_translation_patch = patch( "pontoon.sync.changeset.ChangeSet.changed_translations", new_callable=PropertyMock, ) <NEW_LINE> self.mock_changed_translations = changed_translation_patch.start() <NEW_LINE> self.addCleanup(changed_translation_patch.stop) <NEW_LINE> <DEDENT> def test_bulk_check_translations_no_translations(self): <NEW_LINE> <INDENT> self.mock_changed_translations.return_value = [] <NEW_LINE> assert self.changeset.bulk_check_translations() == set() <NEW_LINE> assert not Error.objects.exists() <NEW_LINE> assert not Warning.objects.exists() <NEW_LINE> <DEDENT> def test_bulk_check_valid_translations(self): <NEW_LINE> <INDENT> translation1, translation2 = TranslationFactory.create_batch( 2, locale=self.translated_locale, entity=self.main_db_entity, approved=True, date=aware_datetime(2015, 1, 1), ) <NEW_LINE> self.mock_changed_translations.return_value = [ translation1, translation2, ] <NEW_LINE> assert self.changeset.bulk_check_translations() == { translation1.pk, translation2.pk, } <NEW_LINE> assert not Error.objects.exists() <NEW_LINE> assert not Warning.objects.exists() <NEW_LINE> <DEDENT> def test_bulk_check_invalid_translations(self): <NEW_LINE> <INDENT> invalid_translation, valid_translation = TranslationFactory.create_batch( 2, locale=self.translated_locale, entity=self.main_db_entity, approved=True, date=aware_datetime(2015, 1, 1), ) <NEW_LINE> invalid_translation.string = "a\nb" <NEW_LINE> invalid_translation.save() <NEW_LINE> invalid_translation.memory_entries.all().delete() <NEW_LINE> valid_translation.memory_entries.all().delete() <NEW_LINE> self.mock_changed_translations.return_value = [ invalid_translation, valid_translation, ] <NEW_LINE> valid_translations = self.changeset.bulk_check_translations() <NEW_LINE> assert valid_translations == {valid_translation.pk} <NEW_LINE> (error,) = Error.objects.all() <NEW_LINE> assert error.library == FailedCheck.Library.PONTOON <NEW_LINE> assert error.message == "Newline characters are not allowed" <NEW_LINE> assert error.translation == invalid_translation <NEW_LINE> self.changeset.translations_to_update = { valid_translation.pk: valid_translation } <NEW_LINE> self.changeset.bulk_create_translation_memory_entries(valid_translations) <NEW_LINE> assert not invalid_translation.memory_entries.exists() <NEW_LINE> assert valid_translation.memory_entries.count() == 1 | Semi-integration tests for translation checks during a sync. | 62598fa7e5267d203ee6b821 |
class CORSMiddleware(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.paths = getattr(settings, "CORS_PATHS", DEFAULT_CORS_PATHS) <NEW_LINE> <DEDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> content_type = response.get('content-type', '').split(";")[0].lower() <NEW_LINE> for path, types, allowed in self.paths: <NEW_LINE> <INDENT> if request.path.startswith(path) and content_type in types: <NEW_LINE> <INDENT> for domain in allowed: <NEW_LINE> <INDENT> response['Access-Control-Allow-Origin'] = domain <NEW_LINE> response['Access-Control-Allow-Methods'] = 'PUT, DELETE, POST, GET, OPTIONS' <NEW_LINE> response['Access-Control-Max-Age'] = 1000 <NEW_LINE> response['Access-Control-Allow-Headers'] = 'Authorization, Content-Type, Origin, X-Requested-With, X-Requested-By, *' <NEW_LINE> response['Access-Control-Allow-Credentials'] = "true" <NEW_LINE> response['Access-Control-Expose-Headers'] = 'Authorization, Content-Type, Origin, X-Requested-With, X-Requested-By, *' <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> return response | From https://github.com/acdha/django-sugar/blob/master/sugar/middleware/cors.py
Middleware that serves up representations with a CORS header to
allow third parties to use your web api from JavaScript without
requiring them to proxy it.
See: http://www.w3.org/TR/cors/
Installation
------------
1. Add to ``settings.MIDDLEWARE_CLASSES``::
'sugar.middleware.cors.CORSMiddleware',
2. Optionally, configure ``settings.CORS_PATHS`` if the default settings
aren't appropriate for your application. ``CORS_PATHS`` should be a
list of (path, content_types, headers) values where content_types and
headers are lists of mime types and (key, value) pairs, respectively.
Processing occurs first to last so you should order ``CORS_PATHS``
items from most to least specific.
See ``DEFAULT_CORS_PATHS`` for an example. | 62598fa78da39b475be030f8 |
class URLSafeSerializerMixin(Serializer): <NEW_LINE> <INDENT> default_serializer = _CompactJSON <NEW_LINE> def load_payload( self, payload: bytes, *args: _t.Any, serializer: _t.Optional[_t.Any] = None, **kwargs: _t.Any, ) -> _t.Any: <NEW_LINE> <INDENT> decompress = False <NEW_LINE> if payload.startswith(b"."): <NEW_LINE> <INDENT> payload = payload[1:] <NEW_LINE> decompress = True <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> json = base64_decode(payload) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise BadPayload( "Could not base64 decode the payload because of an exception", original_error=e, ) from e <NEW_LINE> <DEDENT> if decompress: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> json = zlib.decompress(json) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise BadPayload( "Could not zlib decompress the payload before decoding the payload", original_error=e, ) from e <NEW_LINE> <DEDENT> <DEDENT> return super().load_payload(json, *args, **kwargs) <NEW_LINE> <DEDENT> def dump_payload(self, obj: _t.Any) -> bytes: <NEW_LINE> <INDENT> json = super().dump_payload(obj) <NEW_LINE> is_compressed = False <NEW_LINE> compressed = zlib.compress(json) <NEW_LINE> if len(compressed) < (len(json) - 1): <NEW_LINE> <INDENT> json = compressed <NEW_LINE> is_compressed = True <NEW_LINE> <DEDENT> base64d = base64_encode(json) <NEW_LINE> if is_compressed: <NEW_LINE> <INDENT> base64d = b"." + base64d <NEW_LINE> <DEDENT> return base64d | Mixed in with a regular serializer it will attempt to zlib
compress the string to make it shorter if necessary. It will also
base64 encode the string so that it can safely be placed in a URL. | 62598fa7ac7a0e7691f72421 |
class geogrid: <NEW_LINE> <INDENT> def __init__(self, iface): <NEW_LINE> <INDENT> self.iface = iface <NEW_LINE> self.plugin_dir = os.path.dirname(__file__) <NEW_LINE> locale = QSettings().value('locale/userLocale')[0:2] <NEW_LINE> locale_path = os.path.join( self.plugin_dir, 'i18n', 'geogrid_{}.qm'.format(locale)) <NEW_LINE> if os.path.exists(locale_path): <NEW_LINE> <INDENT> self.translator = QTranslator() <NEW_LINE> self.translator.load(locale_path) <NEW_LINE> if qVersion() > '4.3.3': <NEW_LINE> <INDENT> QCoreApplication.installTranslator(self.translator) <NEW_LINE> <DEDENT> <DEDENT> self.dlg = geogridDialog(self.iface) <NEW_LINE> self.actions = [] <NEW_LINE> self.menu = self.tr(u'&Geo Grid') <NEW_LINE> self.toolbar = self.iface.addToolBar(u'geogrid') <NEW_LINE> self.toolbar.setObjectName(u'geogrid') <NEW_LINE> <DEDENT> def tr(self, message): <NEW_LINE> <INDENT> return QCoreApplication.translate('geogrid', message) <NEW_LINE> <DEDENT> def initGui(self): <NEW_LINE> <INDENT> self.action = QAction( QIcon(":/plugins/geogrid/icon.png"), u"Build Geo grid", self.iface.mainWindow()) <NEW_LINE> self.action.triggered.connect(self.run) <NEW_LINE> self.iface.addToolBarIcon(self.action) <NEW_LINE> self.iface.addPluginToMenu(u"&Geo Grid", self.action) <NEW_LINE> self.help_action = QAction( QIcon(":/plugins/geogrid/help.png"), u"Help on Geo grid", self.iface.mainWindow()) <NEW_LINE> self.help_action.triggered.connect(self.help) <NEW_LINE> self.iface.addPluginToMenu(u"&Geo Grid", self.help_action) <NEW_LINE> <DEDENT> def unload(self): <NEW_LINE> <INDENT> self.iface.removePluginMenu(u"&Geo Grid", self.action) <NEW_LINE> self.iface.removePluginMenu(u"&Geo Grid", self.help_action) <NEW_LINE> self.iface.removeToolBarIcon(self.action) <NEW_LINE> <DEDENT> def help(self): <NEW_LINE> <INDENT> webbrowser.open(currentPath + "/help/help_geogrid.html") <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.dlg.show() <NEW_LINE> result = self.dlg.exec_() <NEW_LINE> if result: <NEW_LINE> <INDENT> self.dlg.run() <NEW_LINE> pass | QGIS Plugin Implementation. | 62598fa72c8b7c6e89bd36dc |
class SKUAdmin(object): <NEW_LINE> <INDENT> model_icon = 'fa fa-gift' <NEW_LINE> list_display = ['id', 'name', 'price', 'stock','is_launched', 'sales', 'comments'] <NEW_LINE> search_fields = ['id','name','stock'] <NEW_LINE> list_filter = ['category','is_launched','stock'] <NEW_LINE> list_editable = ['price', 'stock','is_launched'] <NEW_LINE> show_detail_fields = ['name'] <NEW_LINE> readonly_fields = ['sales', 'comments'] <NEW_LINE> data_charts = { "sku_amount": {'title': '库存', "x-field": "id", "y-field": ('stock',), }, "sales_count": {'title': '销量', "x-field": "id", "y-field": ('sales',), }, } | 商品Admin管理类 | 62598fa766656f66f7d5a306 |
class ShopStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.Ping = channel.unary_unary( '/bloombox.schema.services.shop.v1.Shop/Ping', request_serializer=shop_dot_v1_dot_ShopService__v1__pb2.Ping.Request.SerializeToString, response_deserializer=shop_dot_v1_dot_ShopService__v1__pb2.Ping.Response.FromString, ) <NEW_LINE> self.ShopInfo = channel.unary_unary( '/bloombox.schema.services.shop.v1.Shop/ShopInfo', request_serializer=shop_dot_v1_dot_ShopService__v1__pb2.ShopInfo.Request.SerializeToString, response_deserializer=shop_dot_v1_dot_ShopService__v1__pb2.ShopInfo.Response.FromString, ) <NEW_LINE> self.EnrollMember = channel.unary_unary( '/bloombox.schema.services.shop.v1.Shop/EnrollMember', request_serializer=shop_dot_v1_dot_ShopService__v1__pb2.EnrollMember.Request.SerializeToString, response_deserializer=shop_dot_v1_dot_ShopService__v1__pb2.EnrollMember.Response.FromString, ) <NEW_LINE> self.CheckZipcode = channel.unary_unary( '/bloombox.schema.services.shop.v1.Shop/CheckZipcode', request_serializer=shop_dot_v1_dot_ShopService__v1__pb2.CheckZipcode.Request.SerializeToString, response_deserializer=shop_dot_v1_dot_ShopService__v1__pb2.CheckZipcode.Response.FromString, ) <NEW_LINE> self.VerifyMember = channel.unary_unary( '/bloombox.schema.services.shop.v1.Shop/VerifyMember', request_serializer=shop_dot_v1_dot_ShopService__v1__pb2.VerifyMember.Request.SerializeToString, response_deserializer=shop_dot_v1_dot_ShopService__v1__pb2.VerifyMember.Response.FromString, ) <NEW_LINE> self.SubmitOrder = channel.unary_unary( '/bloombox.schema.services.shop.v1.Shop/SubmitOrder', request_serializer=shop_dot_v1_dot_ShopService__v1__pb2.SubmitOrder.Request.SerializeToString, response_deserializer=shop_dot_v1_dot_ShopService__v1__pb2.SubmitOrder.Response.FromString, ) <NEW_LINE> self.GetOrder = channel.unary_unary( '/bloombox.schema.services.shop.v1.Shop/GetOrder', request_serializer=shop_dot_v1_dot_ShopService__v1__pb2.GetOrder.Request.SerializeToString, response_deserializer=shop_dot_v1_dot_ShopService__v1__pb2.GetOrder.Response.FromString, ) <NEW_LINE> self.ShareOrder = channel.unary_unary( '/bloombox.schema.services.shop.v1.Shop/ShareOrder', request_serializer=shop_dot_v1_dot_ShopService__v1__pb2.ShareOrder.Request.SerializeToString, response_deserializer=shop_dot_v1_dot_ShopService__v1__pb2.ShareOrder.Response.FromString, ) | Specifies the retail shop service, which provides functionality for pickup and delivery orders, member verification,
member enrollment, and more. | 62598fa74a966d76dd5eedf8 |
class TestWorkspace(unittest.TestCase): <NEW_LINE> <INDENT> tmp_examples_path = os.path.join(sys.prefix, 'share', 'doc', 'steelscript', 'examples', 'steelscript') <NEW_LINE> tmp_workspace_path = os.path.join(tmp_examples_path, 'test-workspace') <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> mkdir(os.path.join(sys.prefix, 'share')) <NEW_LINE> mkdir(os.path.join(sys.prefix, 'share', 'doc')) <NEW_LINE> mkdir(os.path.join(sys.prefix, 'share', 'doc', 'steelscript')) <NEW_LINE> mkdir(os.path.join(sys.prefix, 'share', 'doc', 'steelscript', 'examples')) <NEW_LINE> mkdir(cls.tmp_examples_path) <NEW_LINE> for i in range(10): <NEW_LINE> <INDENT> mk_dummy_file(cls.tmp_examples_path, 'test_example_' + str(i) + '.py') <NEW_LINE> <DEDENT> cd = 'cd ' + cls.tmp_examples_path + ';' <NEW_LINE> cls.shell(cd + 'steel mkworkspace -d ' + cls.tmp_workspace_path) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> shutil.rmtree(cls.tmp_examples_path) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def shell(cls, cmd): <NEW_LINE> <INDENT> if cmd.startswith('steel' or cmd.startswith(cls.steel)): <NEW_LINE> <INDENT> opts = ' --loglevel debug --logfile -' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> opts = '' <NEW_LINE> <DEDENT> return shell('{cmd}{opts}'.format(cmd=cmd, opts=opts), exit_on_fail=False, save_output=True) | Simple test class which creates a temporary workspace for testing
Before the tests are run, this class will make a workspace in the root
directory of Steelscript as well as ten dummy example files to be used for
testing. Then when all tests have finished, it will delete both the
workspace and the examples files.
The examples path is stored in TestWorkspace.tmp_examples_path.
The workspace path is stored in TestWorkspace.tmp_workspace_path. | 62598fa7baa26c4b54d4f1c7 |
class CourseAccessHandler(object): <NEW_LINE> <INDENT> def scope_course_instructor(self, data): <NEW_LINE> <INDENT> course_ids = self._courses_with_access_type(data, 'instructor') <NEW_LINE> return ['instructor_courses'] if course_ids else None <NEW_LINE> <DEDENT> def scope_course_staff(self, data): <NEW_LINE> <INDENT> course_ids = self._courses_with_access_type(data, 'staff') <NEW_LINE> return ['staff_courses'] if course_ids else None <NEW_LINE> <DEDENT> def claim_instructor_courses(self, data): <NEW_LINE> <INDENT> return self._courses_with_access_type(data, 'instructor') <NEW_LINE> <DEDENT> def claim_staff_courses(self, data): <NEW_LINE> <INDENT> return self._courses_with_access_type(data, 'staff') <NEW_LINE> <DEDENT> def _courses_with_access_type(self, data, access_type): <NEW_LINE> <INDENT> user = data['user'] <NEW_LINE> values = set(data.get('values', [])) <NEW_LINE> courses = branding.get_visible_courses() <NEW_LINE> courses = (c for c in courses if has_access(user, access_type, c)) <NEW_LINE> course_ids = (unicode(c.id) for c in courses) <NEW_LINE> if values: <NEW_LINE> <INDENT> return [c for c in course_ids if c in values] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [c for c in course_ids] | Defines two new scopes: `course_instructor` and `course_staff`. Each one is
valid only if the user is instructor or staff of at least one course.
Each new scope has a corresponding claim: `instructor_courses` and
`staff_courses` that lists the course_ids for which the user as instructor
or staff privileges.
The claims support claim request values. In other words, if no claim is
requested it returns all the courses for the corresponding privileges. If a
claim request is used, then it only returns the from the list of requested
values that have the corresponding privileges.
For example, if the user is staff of course_a and course_b but not
course_c, the request:
scope = openid course_staff
will return:
{staff_courses: [course_a, course_b] }
If the request is:
claims = {userinfo: {staff_courses=[course_b, course_d]}}
the result will be:
{staff_courses: [course_b] }.
This is useful to quickly determine if a user has the right
privileges for a given course.
For a description of the function naming and arguments, see:
`oauth2_provider/oidc/handlers.py` | 62598fa77b25080760ed73c3 |
class QuadBorder(object): <NEW_LINE> <INDENT> def __init__(self, source, line_width, colour=None): <NEW_LINE> <INDENT> self.quads = [Quad(source) for i in range(4)] <NEW_LINE> self.line_width = line_width <NEW_LINE> if colour: <NEW_LINE> <INDENT> self.set_colour(colour) <NEW_LINE> <DEDENT> <DEDENT> def set_vertices(self, bl, tr): <NEW_LINE> <INDENT> self.quads[0].set_vertices(Point(bl.x, tr.y - self.line_width), tr, constants.DrawLevels.ui + 1) <NEW_LINE> self.quads[1].set_vertices(Point(tr.x - self.line_width, bl.y), tr, constants.DrawLevels.ui + 1) <NEW_LINE> self.quads[2].set_vertices(bl, Point(tr.x, bl.y + self.line_width), constants.DrawLevels.ui + 1) <NEW_LINE> self.quads[3].set_vertices(bl, Point(bl.x + self.line_width, tr.y), constants.DrawLevels.ui + 1) <NEW_LINE> <DEDENT> def set_colour(self, colour): <NEW_LINE> <INDENT> for quad in self.quads: <NEW_LINE> <INDENT> quad.set_colour(colour) <NEW_LINE> <DEDENT> <DEDENT> def enable(self): <NEW_LINE> <INDENT> for quad in self.quads: <NEW_LINE> <INDENT> quad.enable() <NEW_LINE> <DEDENT> <DEDENT> def disable(self): <NEW_LINE> <INDENT> for quad in self.quads: <NEW_LINE> <INDENT> quad.disable() <NEW_LINE> <DEDENT> <DEDENT> def delete(self): <NEW_LINE> <INDENT> for quad in self.quads: <NEW_LINE> <INDENT> quad.delete() | Class that draws the outline of a rectangle | 62598fa766673b3332c302e1 |
class ServerStub: <NEW_LINE> <INDENT> _stubs = {} <NEW_LINE> def __init__(self, method, is_notification, callable_object): <NEW_LINE> <INDENT> self._method = method <NEW_LINE> self._is_notification = is_notification <NEW_LINE> self._callable_object = callable_object <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def rpc_function(cls, method='', prefix='', is_notification=False): <NEW_LINE> <INDENT> def wrapper(callable_object): <NEW_LINE> <INDENT> nonlocal prefix, method <NEW_LINE> prefix = prefix.strip('.') <NEW_LINE> if not method: <NEW_LINE> <INDENT> method = callable_object.__name__ <NEW_LINE> <DEDENT> if prefix: <NEW_LINE> <INDENT> method = '{}.{}'.format(prefix, method) <NEW_LINE> <DEDENT> if method in cls._stubs: <NEW_LINE> <INDENT> raise KeyError('JSON-RPC method "{}" duplicated definition.'.format(method)) <NEW_LINE> <DEDENT> cls._stubs[method] = cls(method, is_notification, callable_object) <NEW_LINE> return callable_object <NEW_LINE> <DEDENT> return wrapper <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def rpc_class(cls, method='', prefix='', is_notification=False): <NEW_LINE> <INDENT> def wrapper(class_type): <NEW_LINE> <INDENT> nonlocal prefix, method <NEW_LINE> prefix = prefix.strip('.') <NEW_LINE> if not method: <NEW_LINE> <INDENT> method = class_type.__name__ <NEW_LINE> <DEDENT> if prefix: <NEW_LINE> <INDENT> method = '{}.{}'.format(prefix, method) <NEW_LINE> <DEDENT> if method in cls._stubs: <NEW_LINE> <INDENT> raise KeyError('JSON-RPC method "{}" has a duplicated definition.'.format(method)) <NEW_LINE> <DEDENT> cls._stubs[method] = cls(method, is_notification, class_type) <NEW_LINE> return class_type <NEW_LINE> <DEDENT> return wrapper <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(cls, method): <NEW_LINE> <INDENT> return cls._stubs.get(method) <NEW_LINE> <DEDENT> @property <NEW_LINE> def method(self): <NEW_LINE> <INDENT> return self._method <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_notification(self): <NEW_LINE> <INDENT> return self._is_notification <NEW_LINE> <DEDENT> @property <NEW_LINE> def callable_object(self): <NEW_LINE> <INDENT> return self._callable_object | Server side RPC stub
This class contains server side RPC implementations.
Decorate RPC implementations by :meth:`rpc_function`.
eg::
ServerStub.rpc_function()
def hello(name):
return 'Hello %s!' % name
will define a RPC, whose method name is `hello`.
The `hello()` function will be called when `hello` RPC received. | 62598fa799fddb7c1ca62d74 |
class RandomVerticalFlip(object): <NEW_LINE> <INDENT> def __init__(self, prob=0.5): <NEW_LINE> <INDENT> self.prob = prob <NEW_LINE> <DEDENT> def __call__(self, item): <NEW_LINE> <INDENT> check_image_is_numpy(item['image']) <NEW_LINE> if random.random() > self.prob: <NEW_LINE> <INDENT> return item <NEW_LINE> <DEDENT> item['image'] = item['image'][::-1, :] <NEW_LINE> image_height = item['image'].shape[0] <NEW_LINE> if 'annotations' in item: <NEW_LINE> <INDENT> temp = item['annotations'][:, 1].copy() <NEW_LINE> item['annotations'][:, 1] = image_height - item['annotations'][:, 3] <NEW_LINE> item['annotations'][:, 3] = image_height - temp <NEW_LINE> <DEDENT> if 'masks' in item: <NEW_LINE> <INDENT> item['masks'] = item['masks'][:, ::-1, :] <NEW_LINE> <DEDENT> return item | Add Class helper | 62598fa7462c4b4f79dbb924 |
@gin.configurable <NEW_LINE> class EnvironmentDynamics(tf.Module, metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> @abc.abstractproperty <NEW_LINE> def batch_size(self) -> types.Int: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractproperty <NEW_LINE> def observation_spec(self) -> types.TensorSpec: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractproperty <NEW_LINE> def action_spec(self) -> types.TensorSpec: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def observation(self, env_time: types.Int) -> types.NestedTensor: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def reward(self, observation: types.NestedTensor, env_time: types.Int) -> types.NestedTensor: <NEW_LINE> <INDENT> pass | Abstract class to represent a non-stationary environment dynamics.
This class is used with the NonStationaryStochasticEnvironment class below to
obtain a non-stationary environment.
To define a dynamics, derive from this class and define the abstract methods
and properties below.
To work correctly with graph and eager mode, Tensorflow variables must be
defined in the constructor of this class. When used within a
`BanditTFEnvironment` autodeps in reset and step functions will handle
automatically the operation order. | 62598fa7097d151d1a2c0f40 |
class TelescopeParameter(List): <NEW_LINE> <INDENT> klass = TelescopeParameterLookup <NEW_LINE> def __init__(self, dtype=float, default_value=None, **kwargs): <NEW_LINE> <INDENT> if not isinstance(dtype, type): <NEW_LINE> <INDENT> raise ValueError("dtype should be a type") <NEW_LINE> <DEDENT> if isinstance(default_value, dtype): <NEW_LINE> <INDENT> default_value = [("type", "*", default_value)] <NEW_LINE> <DEDENT> super().__init__(default_value=default_value, **kwargs) <NEW_LINE> self._dtype = dtype <NEW_LINE> <DEDENT> def validate(self, obj, value): <NEW_LINE> <INDENT> if isinstance(value, self._dtype): <NEW_LINE> <INDENT> value = [("type", "*", value)] <NEW_LINE> <DEDENT> normalized_value = [] <NEW_LINE> for pattern in value: <NEW_LINE> <INDENT> if len(pattern) != 3: <NEW_LINE> <INDENT> raise TraitError( "pattern should be a tuple of (command, argument, value)" ) <NEW_LINE> <DEDENT> command, arg, val = pattern <NEW_LINE> if not isinstance(val, self._dtype): <NEW_LINE> <INDENT> raise TraitError(f"Value should be a {self._dtype}") <NEW_LINE> <DEDENT> if not isinstance(command, str): <NEW_LINE> <INDENT> raise TraitError("command must be a string") <NEW_LINE> <DEDENT> if command not in ["type", "id"]: <NEW_LINE> <INDENT> raise TraitError("command must be one of: '*', 'type', 'id'") <NEW_LINE> <DEDENT> if command == "type": <NEW_LINE> <INDENT> if not isinstance(arg, str): <NEW_LINE> <INDENT> raise TraitError("'type' argument should be a string") <NEW_LINE> <DEDENT> <DEDENT> if command == "id": <NEW_LINE> <INDENT> arg = int(arg) <NEW_LINE> <DEDENT> val = self._dtype(val) <NEW_LINE> normalized_value.append((command, arg, val)) <NEW_LINE> <DEDENT> normalized_value = TelescopeParameterLookup(normalized_value) <NEW_LINE> super().validate(obj, normalized_value) <NEW_LINE> return normalized_value <NEW_LINE> <DEDENT> def set(self, obj, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> old_value = obj._trait_values[self.name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> old_value = self.default_value <NEW_LINE> <DEDENT> super().set(obj, value) <NEW_LINE> if getattr(old_value, '_subarray', None) is not None: <NEW_LINE> <INDENT> obj._trait_values[self.name].attach_subarray(old_value._subarray) | Allow a parameter value to be specified as a simple value (of type *dtype*),
or as a list of patterns that match different telescopes.
The patterns are given as a list of 3-tuples in in the
form: `[(command, argument, value), ...]`.
Command can be one of:
- 'type': argument is then a telescope type string (e.g.
`('type', 'SST_ASTRI_CHEC', 4.0)` to apply to all telescopes of that type,
or use a wildcard like "LST*", or "*" to set a pure default value for all
telescopes.
- 'id': argument is a specific telescope ID `['id', 89, 5.0]`)
These are evaluated in-order, so you can first set a default value, and then set
values for specific telescopes or types to override them.
Examples
--------
.. code-block: python
tel_param = [
('type', '*', 5.0), # default for all
('type', 'LST_*', 5.2),
('type', 'MST_MST_NectarCam', 4.0),
('type', 'MST_MST_FlashCam', 4.5),
('id', 34, 4.0), # override telescope 34 specifically
]
.. code-block: python
tel_param = 4.0 # sets this value for all telescopes | 62598fa724f1403a9268583f |
class AclObject(object): <NEW_LINE> <INDENT> swagger_types = { 'accessrights': 'list[str]', 'accesstype': 'str', 'inherit_flags': 'list[str]', 'op': 'str', 'trustee': 'MemberObject' } <NEW_LINE> attribute_map = { 'accessrights': 'accessrights', 'accesstype': 'accesstype', 'inherit_flags': 'inherit_flags', 'op': 'op', 'trustee': 'trustee' } <NEW_LINE> def __init__(self, accessrights=None, accesstype=None, inherit_flags=None, op=None, trustee=None): <NEW_LINE> <INDENT> self._accessrights = None <NEW_LINE> self._accesstype = None <NEW_LINE> self._inherit_flags = None <NEW_LINE> self._op = None <NEW_LINE> self._trustee = None <NEW_LINE> self.discriminator = None <NEW_LINE> if accessrights is not None: <NEW_LINE> <INDENT> self.accessrights = accessrights <NEW_LINE> <DEDENT> if accesstype is not None: <NEW_LINE> <INDENT> self.accesstype = accesstype <NEW_LINE> <DEDENT> if inherit_flags is not None: <NEW_LINE> <INDENT> self.inherit_flags = inherit_flags <NEW_LINE> <DEDENT> if op is not None: <NEW_LINE> <INDENT> self.op = op <NEW_LINE> <DEDENT> if trustee is not None: <NEW_LINE> <INDENT> self.trustee = trustee <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def accessrights(self): <NEW_LINE> <INDENT> return self._accessrights <NEW_LINE> <DEDENT> @accessrights.setter <NEW_LINE> def accessrights(self, accessrights): <NEW_LINE> <INDENT> self._accessrights = accessrights <NEW_LINE> <DEDENT> @property <NEW_LINE> def accesstype(self): <NEW_LINE> <INDENT> return self._accesstype <NEW_LINE> <DEDENT> @accesstype.setter <NEW_LINE> def accesstype(self, accesstype): <NEW_LINE> <INDENT> self._accesstype = accesstype <NEW_LINE> <DEDENT> @property <NEW_LINE> def inherit_flags(self): <NEW_LINE> <INDENT> return self._inherit_flags <NEW_LINE> <DEDENT> @inherit_flags.setter <NEW_LINE> def inherit_flags(self, inherit_flags): <NEW_LINE> <INDENT> self._inherit_flags = inherit_flags <NEW_LINE> <DEDENT> @property <NEW_LINE> def op(self): <NEW_LINE> <INDENT> return self._op <NEW_LINE> <DEDENT> @op.setter <NEW_LINE> def op(self, op): <NEW_LINE> <INDENT> self._op = op <NEW_LINE> <DEDENT> @property <NEW_LINE> def trustee(self): <NEW_LINE> <INDENT> return self._trustee <NEW_LINE> <DEDENT> @trustee.setter <NEW_LINE> def trustee(self, trustee): <NEW_LINE> <INDENT> self._trustee = trustee <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, AclObject): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fa78e7ae83300ee8fb9 |
class TestEnum(TestBase): <NEW_LINE> <INDENT> a = auto() <NEW_LINE> b = auto() | Tests the false positive for enums. | 62598fa7a8ecb03325871127 |
class WCCSVProduct(WCProduct): <NEW_LINE> <INDENT> sku_key = 'meta:MYOB SKU' <NEW_LINE> second_sku_key = 'sku' <NEW_LINE> stock_level_key = 'stock' <NEW_LINE> stock_status_key = 'stock_status' <NEW_LINE> managing_stock_key = 'manage_stock' <NEW_LINE> title_key = 'post_title' <NEW_LINE> id_key = 'ID' <NEW_LINE> @property <NEW_LINE> def pid(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return int(super(WCCSVProduct, self).pid) <NEW_LINE> <DEDENT> except TypeError as exc: <NEW_LINE> <INDENT> raise Exception("could not get PID for %s: %s" % (self, exc)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def stock_status(self): <NEW_LINE> <INDENT> return super(WCCSVProduct, self).stock_status == 'instock' <NEW_LINE> <DEDENT> @stock_status.setter <NEW_LINE> def stock_status(self, value): <NEW_LINE> <INDENT> value = 'instock' if value else 'outofstock' <NEW_LINE> self[self.stock_status_key] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def managing_stock(self): <NEW_LINE> <INDENT> return super(WCCSVProduct, self).managing_stock == 'yes' <NEW_LINE> <DEDENT> @managing_stock.setter <NEW_LINE> def managing_stock(self, value): <NEW_LINE> <INDENT> assert isinstance(value, bool), "value must be boolean, not %s: %s" % ( type(value), repr(value) ) <NEW_LINE> self[self.managing_stock_key] = 'yes' if value else 'no' | Interfact for a WooCommerce CSV Product. | 62598fa77047854f4633f2f1 |
class LoopEnv(discrete.DiscreteEnv): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.state = 0 <NEW_LINE> self.action_space = spaces.Discrete(2) <NEW_LINE> self.observation_space = spaces.Discrete(9) <NEW_LINE> nA = 2 <NEW_LINE> nS = 9 <NEW_LINE> isd = np.zeros(nS) <NEW_LINE> isd[0] = 1. <NEW_LINE> P = {s: {a: [] for a in range(nA)} for s in range(nS)} <NEW_LINE> P[0][0] = [(1., 1, 0, False)] <NEW_LINE> P[0][1] = [(1., 5, 0, False)] <NEW_LINE> P[1][0] = [(1., 2, 0, False)] <NEW_LINE> P[1][1] = [(1., 2, 0, False)] <NEW_LINE> P[2][0] = [(1., 3, 0, False)] <NEW_LINE> P[2][1] = [(1., 3, 0, False)] <NEW_LINE> P[3][0] = [(1., 4, 0, False)] <NEW_LINE> P[3][1] = [(1., 4, 0, False)] <NEW_LINE> P[4][0] = [(1., 0, 1, False)] <NEW_LINE> P[4][1] = [(1., 0, 1, False)] <NEW_LINE> P[5][0] = [(1., 0, 0, False)] <NEW_LINE> P[5][1] = [(1., 6, 0, False)] <NEW_LINE> P[6][0] = [(1., 0, 0, False)] <NEW_LINE> P[6][1] = [(1., 7, 0, False)] <NEW_LINE> P[7][0] = [(1., 0, 0, False)] <NEW_LINE> P[7][1] = [(1., 8, 0, False)] <NEW_LINE> P[8][0] = [(1., 0, 2, False)] <NEW_LINE> P[8][1] = [(1., 0, 2, False)] <NEW_LINE> super(LoopEnv, self).__init__(nS, nA, P, isd) | Loop environment
Loop This domain consists of two loops, as shown inFigure
3(b). Actions are deterministic. The problem here is that a
learning algorithm may have already converged on action @ forstate 0 before the largerreward available in state 8 has
been backed up. Here the optimal policy is to do action å
everywhere.
C.J.Watkins. Models of Delayed ReinforcementLearning.
PhD thesis, Psychology Department, Cambridge
University, 1989 | 62598fa7656771135c489599 |
class LRUCache(BaseCaching): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.indexes = Counter() <NEW_LINE> <DEDENT> def put(self, key, item): <NEW_LINE> <INDENT> if key is not None and item is not None: <NEW_LINE> <INDENT> if self.cache_data.get(key) is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> tmp = max(self.indexes, key=self.indexes.get) <NEW_LINE> self.indexes[key] = self.indexes[tmp] + 1 <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.indexes[key] = 0 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.indexes[key] = self.indexes[max(self.indexes, key=self.indexes.get)] + 1 <NEW_LINE> <DEDENT> self.cache_data[key] = item <NEW_LINE> <DEDENT> if len(self.indexes) > BaseCaching.MAX_ITEMS: <NEW_LINE> <INDENT> tmp = min(self.indexes, key=self.indexes.get) <NEW_LINE> print('DISCARD:', tmp) <NEW_LINE> self.cache_data.pop(tmp) <NEW_LINE> self.indexes.pop(tmp) <NEW_LINE> <DEDENT> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> if key is not None and self.indexes.get(key) is not None: <NEW_LINE> <INDENT> self.indexes[key] = self.indexes[max(self.indexes, key=self.indexes.get)] + 1 <NEW_LINE> return self.cache_data.get(key) | lru caching system | 62598fa7d486a94d0ba2bee5 |
class ColorBackgroundSettings(BackgroundSettings,IDisposable): <NEW_LINE> <INDENT> def Dispose(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ReleaseUnmanagedResources(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __enter__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __exit__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Color=property(lambda self: object(),lambda self,v: None,lambda self: None) | Represents the rendering color background settings. | 62598fa7bd1bec0571e1504f |
class repeat(object): <NEW_LINE> <INDENT> def next(self): <NEW_LINE> <INDENT> return None | repeat(object [,times]) -> create an iterator which returns the object
for the specified number of times. If not specified, returns the object
endlessly. | 62598fa77d43ff248742738e |
class Price(models.Model): <NEW_LINE> <INDENT> product = models.ForeignKey(Product, on_delete=models.CASCADE, related_name='prices') <NEW_LINE> unit = models.CharField(_('Unit of measurement'), max_length=5, choices=MEASURING_UNIT_CHOICES, default=KILOGRAM) <NEW_LINE> price = models.IntegerField(_('Price per unit')) <NEW_LINE> currency = models.CharField(_('Currency'), max_length=5, choices=CURRENCIES, default='AED') <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return f' {self.currency} {self.price} {self.unit}' | product unit price | 62598fa72c8b7c6e89bd36dd |
class DocumentNotFoundError(Error): <NEW_LINE> <INDENT> pass | Raised when document is not found in db. | 62598fa73d592f4c4edbade5 |
class Orc(CMakePackage): <NEW_LINE> <INDENT> homepage = "https://orc.apache.org/" <NEW_LINE> url = "https://github.com/apache/orc/archive/rel/release-1.6.5.tar.gz" <NEW_LINE> version('1.6.5', sha256='df5885db8fa2e4435db8d486c6c7fc4e2c565d6197eee27729cf9cbdf36353c0') <NEW_LINE> depends_on('maven') <NEW_LINE> depends_on('openssl') <NEW_LINE> depends_on('zlib@1.2.11:') <NEW_LINE> depends_on('pcre') <NEW_LINE> depends_on('protobuf@3.5.1:') <NEW_LINE> depends_on('zstd@1.4.5:') <NEW_LINE> depends_on('googletest@1.8.0:') <NEW_LINE> depends_on('snappy@1.1.7:') <NEW_LINE> depends_on('lz4@1.7.5:') <NEW_LINE> patch('thirdparty.patch') <NEW_LINE> def cmake_args(self): <NEW_LINE> <INDENT> args = [] <NEW_LINE> args.append('-DCMAKE_CXX_FLAGS=' + self.compiler.cxx_pic_flag) <NEW_LINE> args.append('-DCMAKE_C_FLAGS=' + self.compiler.cc_pic_flag) <NEW_LINE> args.append('-DINSTALL_VENDORED_LIBS:BOOL=OFF') <NEW_LINE> args.append('-DBUILD_LIBHDFSPP:BOOL=OFF') <NEW_LINE> args.append('-DBUILD_TOOLS:BOOL=OFF') <NEW_LINE> args.append('-DBUILD_CPP_TESTS:BOOL=OFF') <NEW_LINE> for x in ('snappy', 'zlib', 'zstd', 'lz4', 'protobuf'): <NEW_LINE> <INDENT> args.append('-D{0}_HOME={1}'.format(x.upper(), self.spec[x].prefix)) <NEW_LINE> <DEDENT> return args | the smallest, fastest columnar storage for Hadoop
workloads. | 62598fa72ae34c7f260aaffa |
class MyMutEmbedding(gluon.nn.Block): <NEW_LINE> <INDENT> def __init__(self, dims, out_put_dim, is_reshape=False, **kwargs): <NEW_LINE> <INDENT> super(MyMutEmbedding, self).__init__(**kwargs) <NEW_LINE> self.is_reshape = is_reshape <NEW_LINE> self.dims = list(dims) <NEW_LINE> self.out_put_dim = out_put_dim <NEW_LINE> with self.name_scope(): <NEW_LINE> <INDENT> for i in range(len(dims)): <NEW_LINE> <INDENT> temp_embedding = gluon.nn.Embedding(input_dim=dims[i], output_dim=out_put_dim) <NEW_LINE> self._children.append(temp_embedding) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> embeding_list = [] <NEW_LINE> for i in range(len(self.dims)): <NEW_LINE> <INDENT> temp = x[:, i] <NEW_LINE> embedding_temp = self._children[i](temp) <NEW_LINE> embeding_list.append(embedding_temp) <NEW_LINE> <DEDENT> result = embeding_list[0] <NEW_LINE> for i in range(1, len(self.dims)): <NEW_LINE> <INDENT> result = mxnet.nd.concat(result, embeding_list[i], dim=1) <NEW_LINE> <DEDENT> if self.is_reshape: <NEW_LINE> <INDENT> result = result.reshape((x.shape[0], x.shape[1], self.out_put_dim)) <NEW_LINE> <DEDENT> return result | 自己的embedding层 | 62598fa745492302aabfc3e8 |
class InferHeaderFields(beam.PTransform): <NEW_LINE> <INDENT> def __init__(self, defined_headers, allow_incompatible_records=False): <NEW_LINE> <INDENT> self._defined_headers = defined_headers <NEW_LINE> self._allow_incompatible_records = allow_incompatible_records <NEW_LINE> <DEDENT> def expand(self, pcoll): <NEW_LINE> <INDENT> return (pcoll | 'InferHeaderFields' >> beam.ParDo( _InferHeaderFields(), self._defined_headers) | 'MergeHeaders' >> merge_headers.MergeHeaders( split_alternate_allele_info_fields=True, allow_incompatible_records=self._allow_incompatible_records)) | Extracts inferred header fields from `Variant` records. | 62598fa7be383301e0253710 |
class StorageExplorer(AmazonS3Handler, GoogleStorageHandler, AzureStorageHandler): <NEW_LINE> <INDENT> def __init__(self, host, logger): <NEW_LINE> <INDENT> super().__init__(host, logger) <NEW_LINE> self.host = host <NEW_LINE> self.logger = logger <NEW_LINE> self.buckets_found = set() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_image_sources_from_html(soup): <NEW_LINE> <INDENT> images = soup.select("img") <NEW_LINE> return {img.get("src") for img in images if img.get("src")} <NEW_LINE> <DEDENT> def _add_to_found_storage(self, storage_url): <NEW_LINE> <INDENT> storage_url = self._normalize_url(storage_url) <NEW_LINE> bucket = S3Bucket(storage_url) <NEW_LINE> if bucket.url not in self.storage_urls_found: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = self.request_handler.send("GET", url=storage_url) <NEW_LINE> if self._is_amazon_s3_bucket(res): <NEW_LINE> <INDENT> self.storage_urls_found.add(bucket.url) <NEW_LINE> self.s3_buckets.add(bucket) <NEW_LINE> <DEDENT> <DEDENT> except RequestHandlerException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def run(self, soup): <NEW_LINE> <INDENT> img_srcs = self._get_image_sources_from_html(soup) <NEW_LINE> urls = {src for src in img_srcs if self._is_s3_url(src)} <NEW_LINE> for url in urls: <NEW_LINE> <INDENT> self._add_to_found_storage(url) <NEW_LINE> <DEDENT> if self.s3_buckets: <NEW_LINE> <INDENT> self.logger.info("{} S3 buckets discovered. Testing for permissions".format(COLORED_COMBOS.NOTIFY)) <NEW_LINE> for bucket in self.s3_buckets: <NEW_LINE> <INDENT> if bucket.no_scheme_url in self.storage_urls_found: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._test_s3_bucket_permissions(bucket) <NEW_LINE> <DEDENT> <DEDENT> if self.num_files_found > 0: <NEW_LINE> <INDENT> self.logger.info( "{} Found {}{}{} sensitive files in S3 buckets. inspect web scan logs for more information.".format( COLORED_COMBOS.GOOD, COLOR.GREEN, self.num_files_found, COLOR.RESET)) <NEW_LINE> <DEDENT> elif any(b.vulnerable for b in self.s3_buckets): <NEW_LINE> <INDENT> self.logger.info("{} No sensitive files found in target's cloud storage".format(COLORED_COMBOS.BAD)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.info("{} Could not access target's cloud storage." " All permissions are set properly".format(COLORED_COMBOS.BAD)) | Find and test privileges of target cloud storage and look for sensitive files in it.
Can lead to finding .git/.DS_Store/etc files with tokens, passwords and more. | 62598fa7435de62698e9bd0e |
class SettingsError(Exception): <NEW_LINE> <INDENT> pass | Error in game settings | 62598fa74e4d56256637233d |
class CreateTokenView(ObtainAuthToken): <NEW_LINE> <INDENT> serializer_class = AuthTokenSerialiezr <NEW_LINE> renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES | Create new auth token for user | 62598fa73617ad0b5ee0606b |
class ErrorMessageWidget(QLabel): <NEW_LINE> <INDENT> pass | class represinting line with error message shown
to user | 62598fa767a9b606de545ee4 |
class RestClient(Client): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(RestClient, self).__init__(None) <NEW_LINE> <DEDENT> def call(self, session, cmd, action, options, args, method): <NEW_LINE> <INDENT> pass | Class handles REST call for cli | 62598fa744b2445a339b68fb |
class HierarchicalConcurrent(SimpleSequential): <NEW_LINE> <INDENT> def __init__(self, data_format="channels_last", **kwargs): <NEW_LINE> <INDENT> super(HierarchicalConcurrent, self).__init__(**kwargs) <NEW_LINE> self.axis = get_channel_axis(data_format) <NEW_LINE> <DEDENT> def call(self, x, training=None): <NEW_LINE> <INDENT> out = [] <NEW_LINE> y_prev = None <NEW_LINE> for block in self.children: <NEW_LINE> <INDENT> y = block(x, training=training) <NEW_LINE> print(y.shape) <NEW_LINE> if y_prev is not None: <NEW_LINE> <INDENT> y = y + y_prev <NEW_LINE> <DEDENT> out.append(y) <NEW_LINE> y_prev = y <NEW_LINE> <DEDENT> out = tf.concat(out, axis=self.axis) <NEW_LINE> return out | A container for hierarchical concatenation of blocks with parameters.
Parameters:
----------
data_format : str, default 'channels_last'
The ordering of the dimensions in tensors. | 62598fa74f6381625f19944a |
class trace(AffAtom): <NEW_LINE> <INDENT> def __init__(self, expr) -> None: <NEW_LINE> <INDENT> super(trace, self).__init__(expr) <NEW_LINE> <DEDENT> @AffAtom.numpy_numeric <NEW_LINE> def numeric(self, values): <NEW_LINE> <INDENT> return np.trace(values[0]) <NEW_LINE> <DEDENT> def validate_arguments(self) -> None: <NEW_LINE> <INDENT> shape = self.args[0].shape <NEW_LINE> if self.args[0].ndim != 2 or shape[0] != shape[1]: <NEW_LINE> <INDENT> raise ValueError("Argument to trace must be a square matrix.") <NEW_LINE> <DEDENT> <DEDENT> def shape_from_args(self) -> Tuple[int, ...]: <NEW_LINE> <INDENT> return tuple() <NEW_LINE> <DEDENT> def is_atom_log_log_convex(self) -> bool: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_atom_log_log_concave(self) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def graph_implementation( self, arg_objs, shape: Tuple[int, ...], data=None ) -> Tuple[lo.LinOp, List[Constraint]]: <NEW_LINE> <INDENT> return (lu.trace(arg_objs[0]), []) | The sum of the diagonal entries of a matrix.
Parameters
----------
expr : Expression
The expression to sum the diagonal of. | 62598fa7cc0a2c111447af28 |
class QStageDisplay(QOffsetDisplay): <NEW_LINE> <INDENT> def __init__(self, jump_signal = None, q_label = None, **kwds): <NEW_LINE> <INDENT> super().__init__(**kwds) <NEW_LINE> self.jump_signal = jump_signal <NEW_LINE> self.jump_size = None <NEW_LINE> self.q_label = q_label <NEW_LINE> self.adjust_mode = False <NEW_LINE> self.tooltips = ["click to adjust", "use scroll wheel to move stage"] <NEW_LINE> self.setFocusPolicy(QtCore.Qt.ClickFocus) <NEW_LINE> self.setToolTip(self.tooltips[0]) <NEW_LINE> <DEDENT> def paintBackground(self, painter): <NEW_LINE> <INDENT> if self.adjust_mode: <NEW_LINE> <INDENT> color = QtGui.QColor(180, 180, 180) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> color = QtGui.QColor(255, 255, 255) <NEW_LINE> <DEDENT> if (self.value < self.warning_low) or (self.value > self.warning_high): <NEW_LINE> <INDENT> color = QtGui.QColor(255, 0, 0) <NEW_LINE> <DEDENT> painter.setPen(color) <NEW_LINE> painter.setBrush(color) <NEW_LINE> painter.drawRect(0, 0, self.width(), self.height()) <NEW_LINE> <DEDENT> def mousePressEvent(self, event): <NEW_LINE> <INDENT> if self.functionality is not None: <NEW_LINE> <INDENT> self.adjust_mode = not self.adjust_mode <NEW_LINE> if self.adjust_mode: <NEW_LINE> <INDENT> self.setToolTip(self.tooltips[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setToolTip(self.tooltips[0]) <NEW_LINE> <DEDENT> self.update() <NEW_LINE> <DEDENT> <DEDENT> def setFunctionality(self, functionality): <NEW_LINE> <INDENT> super().setFunctionality(functionality) <NEW_LINE> self.has_center_bar = self.functionality.getParameter("has_center_bar") <NEW_LINE> self.scale_max = self.functionality.getParameter("maximum") <NEW_LINE> self.scale_min = self.functionality.getParameter("minimum") <NEW_LINE> self.warning_high = self.functionality.getParameter("warning_high") <NEW_LINE> self.warning_low = self.functionality.getParameter("warning_low") <NEW_LINE> self.scale_range = 1.0/(self.scale_max - self.scale_min) <NEW_LINE> self.updateValue(self.functionality.getCurrentPosition()) <NEW_LINE> self.functionality.zStagePosition.connect(self.updateValue) <NEW_LINE> <DEDENT> def setJumpSize(self, jump_size): <NEW_LINE> <INDENT> self.jump_size = jump_size <NEW_LINE> <DEDENT> def updateValue(self, value): <NEW_LINE> <INDENT> if self.isEnabled(): <NEW_LINE> <INDENT> super().updateValue(value) <NEW_LINE> self.q_label.setText("{0:.3f}".format(value)) <NEW_LINE> <DEDENT> <DEDENT> def wheelEvent(self, event): <NEW_LINE> <INDENT> if self.adjust_mode and (not event.angleDelta().isNull()): <NEW_LINE> <INDENT> if (event.angleDelta().y() > 0): <NEW_LINE> <INDENT> self.jump_signal.emit(self.jump_size) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.jump_signal.emit(-self.jump_size) <NEW_LINE> <DEDENT> event.accept() | Z stage position. | 62598fa74f88993c371f0496 |
class ResponseProxy(AttributeExposer): <NEW_LINE> <INDENT> __moya_exposed_attributes__ = ["url", "text", "status_code", "headers", "cookies", "history", "content", "json", "encoding"] <NEW_LINE> def __init__(self, req, url, method): <NEW_LINE> <INDENT> self._req = req <NEW_LINE> self._url = url <NEW_LINE> self._method = method <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<httpresponse {} "{}">'.format(self._method, self._url) <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self._req.url <NEW_LINE> <DEDENT> @property <NEW_LINE> def text(self): <NEW_LINE> <INDENT> return self._req.text <NEW_LINE> <DEDENT> @property <NEW_LINE> def status_code(self): <NEW_LINE> <INDENT> return self._req.status_code <NEW_LINE> <DEDENT> @property <NEW_LINE> def headers(self): <NEW_LINE> <INDENT> return dict(self._req.headers) <NEW_LINE> <DEDENT> @property <NEW_LINE> def cookies(self): <NEW_LINE> <INDENT> return dict(self._req.cookies) <NEW_LINE> <DEDENT> @property <NEW_LINE> def history(self): <NEW_LINE> <INDENT> self._req.history <NEW_LINE> <DEDENT> @property <NEW_LINE> def content(self): <NEW_LINE> <INDENT> return self._req.content <NEW_LINE> <DEDENT> @property <NEW_LINE> def json(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._req.json() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def encoding(self): <NEW_LINE> <INDENT> return self._req.encoding | Proxy for a request object | 62598fa7a219f33f346c6730 |
class OriginTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_creationInfo(self): <NEW_LINE> <INDENT> orig = Origin() <NEW_LINE> self.assertEqual(orig.creation_info, None) <NEW_LINE> orig = Origin(creation_info={}) <NEW_LINE> self.assertTrue(isinstance(orig.creation_info, CreationInfo)) <NEW_LINE> orig = Origin(creation_info=CreationInfo(author='test2')) <NEW_LINE> self.assertTrue(isinstance(orig.creation_info, CreationInfo)) <NEW_LINE> self.assertEqual(orig.creation_info.author, 'test2') <NEW_LINE> orig = Origin(creation_info={'author': 'test'}) <NEW_LINE> self.assertEqual(orig.creation_info, orig['creation_info']) <NEW_LINE> self.assertEqual(orig.creation_info.author, 'test') <NEW_LINE> self.assertEqual(orig['creation_info']['author'], 'test') <NEW_LINE> orig.creation_info.agency_id = "muh" <NEW_LINE> self.assertEqual(orig.creation_info, orig['creation_info']) <NEW_LINE> self.assertEqual(orig.creation_info.agency_id, 'muh') <NEW_LINE> self.assertEqual(orig['creation_info']['agency_id'], 'muh') <NEW_LINE> <DEDENT> def test_multipleOrigins(self): <NEW_LINE> <INDENT> origin = Origin() <NEW_LINE> origin.public_id = 'smi:ch.ethz.sed/origin/37465' <NEW_LINE> origin.time = UTCDateTime(0) <NEW_LINE> origin.latitude = 12 <NEW_LINE> origin.latitude_errors.confidence_level = 95 <NEW_LINE> origin.longitude = 42 <NEW_LINE> origin.depth_type = 'from location' <NEW_LINE> self.assertEqual(origin.latitude, 12) <NEW_LINE> self.assertEqual(origin.latitude_errors.confidence_level, 95) <NEW_LINE> self.assertEqual(origin.latitude_errors.uncertainty, None) <NEW_LINE> self.assertEqual(origin.longitude, 42) <NEW_LINE> origin2 = Origin() <NEW_LINE> origin2.latitude = 13.4 <NEW_LINE> self.assertEqual(origin2.depth_type, None) <NEW_LINE> self.assertEqual(origin2.resource_id, None) <NEW_LINE> self.assertEqual(origin2.latitude, 13.4) <NEW_LINE> self.assertEqual(origin2.latitude_errors.confidence_level, None) <NEW_LINE> self.assertEqual(origin2.longitude, None) | Test suite for obspy.core.event.Origin | 62598fa756ac1b37e6302105 |
class TestModuleAnalyzer(TestCaseAnalyzer): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.tmp_path = tempfile.mkdtemp(prefix='coqua_') <NEW_LINE> data_path = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> data_path = os.path.join(data_path, 'data') <NEW_LINE> repo_name = 'graaltest' <NEW_LINE> cls.repo_path = os.path.join(cls.tmp_path, repo_name) <NEW_LINE> fdout, _ = tempfile.mkstemp(dir=cls.tmp_path) <NEW_LINE> zip_path = os.path.join(data_path, repo_name + '.zip') <NEW_LINE> subprocess.check_call(['unzip', '-qq', zip_path, '-d', cls.tmp_path]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> shutil.rmtree(cls.tmp_path) <NEW_LINE> <DEDENT> def test_init(self): <NEW_LINE> <INDENT> vuln_analyzer = VulnAnalyzer() <NEW_LINE> self.assertIsInstance(vuln_analyzer, VulnAnalyzer) <NEW_LINE> self.assertIsInstance(vuln_analyzer.bandit, Bandit) <NEW_LINE> <DEDENT> def test_analyze(self): <NEW_LINE> <INDENT> module_path = os.path.join(self.tmp_path, 'graaltest', 'perceval') <NEW_LINE> vuln_analyzer = VulnAnalyzer() <NEW_LINE> result = vuln_analyzer.analyze(module_path) <NEW_LINE> self.assertIn('loc_analyzed', result) <NEW_LINE> self.assertTrue(type(result['loc_analyzed']), int) <NEW_LINE> self.assertIn('num_vulns', result) <NEW_LINE> self.assertTrue(type(result['num_vulns']), int) <NEW_LINE> self.assertIn('by_severity', result) <NEW_LINE> self.assertTrue(type(result['by_severity']), dict) <NEW_LINE> self.assertIn('undefined', result['by_severity']) <NEW_LINE> self.assertTrue(type(result['by_severity']['undefined']), int) <NEW_LINE> self.assertIn('low', result['by_severity']) <NEW_LINE> self.assertTrue(type(result['by_severity']['low']), int) <NEW_LINE> self.assertIn('medium', result['by_severity']) <NEW_LINE> self.assertTrue(type(result['by_severity']['medium']), int) <NEW_LINE> self.assertIn('high', result['by_severity']) <NEW_LINE> self.assertTrue(type(result['by_severity']['high']), int) <NEW_LINE> self.assertIn('by_confidence', result) <NEW_LINE> self.assertTrue(type(result['by_confidence']), dict) <NEW_LINE> self.assertIn('undefined', result['by_confidence']) <NEW_LINE> self.assertTrue(type(result['by_confidence']['undefined']), int) <NEW_LINE> self.assertIn('low', result['by_confidence']) <NEW_LINE> self.assertTrue(type(result['by_confidence']['low']), int) <NEW_LINE> self.assertIn('medium', result['by_confidence']) <NEW_LINE> self.assertTrue(type(result['by_confidence']['medium']), int) <NEW_LINE> self.assertIn('high', result['by_confidence']) <NEW_LINE> self.assertTrue(type(result['by_confidence']['high']), int) | ModuleAnalyzer tests | 62598fa7498bea3a75a57a36 |
class Packages(object): <NEW_LINE> <INDENT> @handlers.json_or_html_action <NEW_LINE> def index(self, page=1, format='html'): <NEW_LINE> <INDENT> if format == 'json': <NEW_LINE> <INDENT> pager = Pager(int(page), "/packages.json?page=%d", Package.all().order('-updated'), per_page=50) <NEW_LINE> return json.dumps({ "packages": [ handlers.request().url(action='show', id=package.name) for package in pager.get_items() ], "prev": pager.prev_url, "next": pager.next_url, "pages": pager.page_count }) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pager = Pager(int(page), "/packages?page=%d", Package.all().order('-updated')) <NEW_LINE> title = 'All Packages' <NEW_LINE> if page != 1: title = 'Page %s | %s' % (page, title) <NEW_LINE> return handlers.render("packages/index", packages=pager.get_items(), pagination=pager.render_pagination(), layout={'title': title}) <NEW_LINE> <DEDENT> <DEDENT> @handlers.json_or_html_action <NEW_LINE> def show(self, id, format='html'): <NEW_LINE> <INDENT> if format == 'json': <NEW_LINE> <INDENT> package = handlers.request().package <NEW_LINE> versions = [str(version.version) for version in package.version_set] <NEW_LINE> return json.dumps({ "name": package.name, "uploaders": [uploader.email() for uploader in package.uploaders], "versions": versions }) <NEW_LINE> <DEDENT> elif format == 'html': <NEW_LINE> <INDENT> package = handlers.request().package <NEW_LINE> version_count = package.version_set.count() <NEW_LINE> title = package.name <NEW_LINE> readme = None <NEW_LINE> readme_filename = None <NEW_LINE> changelog = None <NEW_LINE> changelog_filename = None <NEW_LINE> if package.latest_version: <NEW_LINE> <INDENT> title = '%s %s' % (package.name, package.latest_version.version) <NEW_LINE> if package.latest_version.readme: <NEW_LINE> <INDENT> readme = package.latest_version.readme.render() <NEW_LINE> readme_filename = package.latest_version.readme.filename <NEW_LINE> <DEDENT> if package.latest_version.changelog: <NEW_LINE> <INDENT> changelog = package.latest_version.changelog.render() <NEW_LINE> changelog_filename = package.latest_version.changelog.filename <NEW_LINE> <DEDENT> <DEDENT> return handlers.render( "packages/show", package=package, versions=package.version_set.order('-sort_order').fetch(10), version_count=version_count, show_versions_link=version_count > 10, readme=readme, readme_filename=readme_filename, changelog=changelog, changelog_filename=changelog_filename, layout={'title': title}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise handlers.http_error(404) | The handler for /packages/*.
This handler is in charge of packages (but not package versions, which are
the responsibility of the PackageVersions class). | 62598fa73cc13d1c6d465685 |
class GridCell(object): <NEW_LINE> <INDENT> def __init__(self, xmin, ymin, xmax, ymax, number, index): <NEW_LINE> <INDENT> self.xmin = xmin <NEW_LINE> self.ymin = ymin <NEW_LINE> self.xmax = xmax <NEW_LINE> self.ymax = ymax <NEW_LINE> self.cell_number = number <NEW_LINE> self.index = index | A simple class for determining data values over a grid. | 62598fa74428ac0f6e65843c |
class ssd1325(greyscale_device): <NEW_LINE> <INDENT> def __init__(self, serial_interface=None, width=128, height=64, rotate=0, mode="RGB", framebuffer=None, **kwargs): <NEW_LINE> <INDENT> super(ssd1325, self).__init__(luma.core.const.common, serial_interface, width, height, rotate, mode, framebuffer, nibble_order=1, **kwargs) <NEW_LINE> <DEDENT> def _supported_dimensions(self): <NEW_LINE> <INDENT> return [(128, 64)] <NEW_LINE> <DEDENT> def _init_sequence(self): <NEW_LINE> <INDENT> self.command( 0xAE, 0xB3, 0xF2, 0xA8, 0x3F, 0xA2, 0x4C, 0xA1, 0x00, 0xAD, 0x02, 0xA0, 0x50, 0x86, 0xB8, 0x01, 0x11, 0x22, 0x32, 0x43, 0x54, 0x65, 0x76, 0xB2, 0x51, 0xB1, 0x55, 0xB4, 0x03, 0xB0, 0x28, 0xBC, 0x01, 0xBE, 0x00, 0xBF, 0x02, 0xA4) <NEW_LINE> <DEDENT> def _set_position(self, top, right, bottom, left): <NEW_LINE> <INDENT> self.command( 0x15, left >> 1, (right - 1) >> 1, 0x75, top, bottom - 1) | Serial interface to a 4-bit greyscale SSD1325 OLED display.
On creation, an initialization sequence is pumped to the
display to properly configure it. Further control commands can then be
called to affect the brightness and other settings. | 62598fa7eab8aa0e5d30bca4 |
class AddToCartForm(forms.Form): <NEW_LINE> <INDENT> product_type = forms.ModelChoiceField( queryset=product_type_queryset, widget=forms.widgets.HiddenInput() ) <NEW_LINE> product_id = forms.IntegerField( min_value=1, widget=forms.widgets.HiddenInput() ) <NEW_LINE> quantity = forms.IntegerField(min_value=1, initial=1) <NEW_LINE> def clean_product_id(self): <NEW_LINE> <INDENT> if self.get_product(): <NEW_LINE> <INDENT> return self.cleaned_data['product_id'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise forms.ValidationError('Invalid product') <NEW_LINE> <DEDENT> <DEDENT> def get_product(self): <NEW_LINE> <INDENT> product_class = self.cleaned_data['product_type'].model_class() <NEW_LINE> try: <NEW_LINE> <INDENT> return product_class.objects.get(pk=self.cleaned_data['product_id']) <NEW_LINE> <DEDENT> except product_class.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_options(self): <NEW_LINE> <INDENT> options = {} <NEW_LINE> for field in self.cleaned_data: <NEW_LINE> <INDENT> if field not in ['product_id', 'product_type', 'quantity']: <NEW_LINE> <INDENT> options[field] = self.cleaned_data[field] <NEW_LINE> <DEDENT> <DEDENT> return options <NEW_LINE> <DEDENT> def get_quantity(self): <NEW_LINE> <INDENT> return self.cleaned_data['quantity'] <NEW_LINE> <DEDENT> def add(self, request): <NEW_LINE> <INDENT> Cart(request).add(self.get_product(), self.cleaned_data['quantity'], self.get_options()) <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> single = kwargs.pop('single', False) <NEW_LINE> product_instance = kwargs.pop('product_instance', False) <NEW_LINE> returnval = super(AddToCartForm, self).__init__(*args, **kwargs) <NEW_LINE> if single: <NEW_LINE> <INDENT> self.fields['quantity'].initial = 1 <NEW_LINE> self.fields['quantity'].widget = forms.widgets.HiddenInput() <NEW_LINE> <DEDENT> if product_instance: <NEW_LINE> <INDENT> self.fields['product_id'].initial = product_instance.id <NEW_LINE> self.fields['product_type'].initial = ContentType.objects.get_for_model(product_instance).id <NEW_LINE> <DEDENT> return returnval | A generic form for adding a product to a cart - should post to
cart.views.add | 62598fa7462c4b4f79dbb926 |
class NFSessionBase(object): <NEW_LINE> <INDENT> slots = None <NEW_LINE> session = None <NEW_LINE> verify_ssl = True <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.verify_ssl = bool(g.ADDON.getSettingBool('ssl_verification')) <NEW_LINE> self.is_prefetch_login = False <NEW_LINE> self._init_session() <NEW_LINE> <DEDENT> @common.time_execution(immediate=True) <NEW_LINE> def _init_session(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.session.close() <NEW_LINE> common.info('Session closed') <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.session = requests.session() <NEW_LINE> self.session.headers.update({ 'User-Agent': common.get_user_agent(enable_android_mediaflag_fix=True), 'Accept-Encoding': 'gzip' }) <NEW_LINE> common.info('Initialized new session') <NEW_LINE> <DEDENT> def update_session_data(self, old_esn=None): <NEW_LINE> <INDENT> self.set_session_header_data() <NEW_LINE> cookies.save(self.account_hash, self.session.cookies) <NEW_LINE> cookies.log_cookie(self.session.cookies) <NEW_LINE> _update_esn(g.get_esn() if old_esn is None else old_esn) <NEW_LINE> <DEDENT> def set_session_header_data(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.session.headers.update({ 'x-netflix.nq.stack': 'prod', 'x-netflix.request.client.user.guid': g.LOCAL_DB.get_active_profile_guid() }) <NEW_LINE> <DEDENT> except ProfilesMissing: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def account_hash(self): <NEW_LINE> <INDENT> from base64 import urlsafe_b64encode <NEW_LINE> return urlsafe_b64encode( common.get_credentials().get('email', 'NoMail').encode('utf-8')).decode('utf-8') <NEW_LINE> <DEDENT> @property <NEW_LINE> def auth_url(self): <NEW_LINE> <INDENT> return g.LOCAL_DB.get_value('auth_url', table=TABLE_SESSION) <NEW_LINE> <DEDENT> @auth_url.setter <NEW_LINE> def auth_url(self, value): <NEW_LINE> <INDENT> g.LOCAL_DB.set_value('auth_url', value, TABLE_SESSION) | Initialize the netflix session | 62598fa74e4d56256637233e |
class MedicalFile(models.Model): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True) <NEW_LINE> patient = models.ForeignKey(PatientInfo, on_delete=models.CASCADE, related_name='medical_file_of') <NEW_LINE> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated = models.DateTimeField(auto_now=True) | 病理档案表 | 62598fa7379a373c97d98f2b |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.