code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class LogModel(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def create(cls): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def put(self): <NEW_LINE> <INDENT> return app.log_mongo.insert(self) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find(cls, query): <NEW_LINE> <INDENT> return app.log_mongo.find(cls,query) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def aggregate(cls, statement): <NEW_LINE> <INDENT> return app.log_mongo.aggregate(cls,statement) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, data): <NEW_LINE> <INDENT> obj = cls() <NEW_LINE> [setattr(obj, k, data.get(k)) for k in data] <NEW_LINE> return obj
存储log数据 写一次后基本不修改 有查询需求
62598f9f3c8af77a43b67e42
class BOARD: <NEW_LINE> <INDENT> DIO0 = 5 <NEW_LINE> DIO1 = 23 <NEW_LINE> DIO2 = 24 <NEW_LINE> DIO3 = 25 <NEW_LINE> LED = 18 <NEW_LINE> SWITCH = 4 <NEW_LINE> spi = None <NEW_LINE> low_band = True <NEW_LINE> @staticmethod <NEW_LINE> def setup(): <NEW_LINE> <INDENT> GPIO.setmode(GPIO.BCM) <NEW_LINE> GPIO.setup(BOARD.LED, GPIO.OUT) <NEW_LINE> GPIO.output(BOARD.LED, 0) <NEW_LINE> GPIO.setup(BOARD.SWITCH, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) <NEW_LINE> for gpio_pin in [BOARD.DIO0, BOARD.DIO1, BOARD.DIO2, BOARD.DIO3]: <NEW_LINE> <INDENT> GPIO.setup(gpio_pin, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) <NEW_LINE> <DEDENT> BOARD.blink(.1, 2) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def teardown(): <NEW_LINE> <INDENT> GPIO.cleanup() <NEW_LINE> BOARD.spi.close() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def SpiDev(spi_bus=0, spi_cs=0): <NEW_LINE> <INDENT> BOARD.spi = spidev.SpiDev() <NEW_LINE> BOARD.spi.open(spi_bus, spi_cs) <NEW_LINE> BOARD.spi.max_speed_hz = 5000000 <NEW_LINE> return BOARD.spi <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def add_event_detect(dio_number, callback): <NEW_LINE> <INDENT> GPIO.add_event_detect(dio_number, GPIO.RISING, callback=callback) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def add_events(cb_dio0, cb_dio1, cb_dio2, cb_dio3, cb_dio4, cb_dio5, switch_cb=None): <NEW_LINE> <INDENT> BOARD.add_event_detect(BOARD.DIO0, callback=cb_dio0) <NEW_LINE> BOARD.add_event_detect(BOARD.DIO1, callback=cb_dio1) <NEW_LINE> BOARD.add_event_detect(BOARD.DIO2, callback=cb_dio2) <NEW_LINE> BOARD.add_event_detect(BOARD.DIO3, callback=cb_dio3) <NEW_LINE> if switch_cb is not None: <NEW_LINE> <INDENT> GPIO.add_event_detect(BOARD.SWITCH, GPIO.RISING, callback=switch_cb, bouncetime=300) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def led_on(value=1): <NEW_LINE> <INDENT> GPIO.output(BOARD.LED, value) <NEW_LINE> return value <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def led_off(): <NEW_LINE> <INDENT> GPIO.output(BOARD.LED, 0) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def blink(time_sec, n_blink): <NEW_LINE> <INDENT> if n_blink == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> BOARD.led_on() <NEW_LINE> for i in range(n_blink): <NEW_LINE> <INDENT> time.sleep(time_sec) <NEW_LINE> BOARD.led_off() <NEW_LINE> time.sleep(time_sec) <NEW_LINE> BOARD.led_on() <NEW_LINE> <DEDENT> BOARD.led_off()
Board initialisation/teardown and pin configuration is kept here. Also, information about the RF module is kept here. This is the Raspberry Pi board with one LED and a modtronix inAir9B.
62598f9fb7558d5895463433
class Tweet(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User) <NEW_LINE> text = models.CharField(max_length=160) <NEW_LINE> created_date = models.DateTimeField(auto_now_add=True) <NEW_LINE> country = models.CharField(max_length=30) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.text
Tweet model
62598f9fd6c5a102081e1f4b
class NameStartsWithDollar(Exception): <NEW_LINE> <INDENT> pass
this exception is raised when a routine which name starts with a dollar sign ($) is ade
62598f9f7d43ff2487427305
class HubDescription(ComponentData): <NEW_LINE> <INDENT> def __init__(self, num, numeric_prefix=False, is_icetop=False, hub_type=HubType.ALL): <NEW_LINE> <INDENT> super(HubDescription, self).__init__("stringHub", num, numeric_prefix=numeric_prefix) <NEW_LINE> connlist = [] <NEW_LINE> if hub_type in (HubType.ALL, HubType.PHYSICS_ONLY): <NEW_LINE> <INDENT> connlist += [ ("rdoutReq", Connector.INPUT), ("rdoutData", Connector.OUTPUT), ] <NEW_LINE> if is_icetop: <NEW_LINE> <INDENT> connlist.append(("icetopHit", Connector.OUTPUT)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> connlist.append(("stringHit", Connector.OUTPUT)) <NEW_LINE> <DEDENT> <DEDENT> if hub_type in (HubType.ALL, HubType.SECONDARY_ONLY): <NEW_LINE> <INDENT> connlist += [ ("moniData", Connector.OUTPUT), ("snData", Connector.OUTPUT), ("tcalData", Connector.OUTPUT), ] <NEW_LINE> <DEDENT> self.connections = connlist <NEW_LINE> <DEDENT> def get_fake_client(self, def_dom_geom, quiet=False): <NEW_LINE> <INDENT> if not self.is_fake: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return StringHub(self.name, self.num, def_dom_geom, self.connections, quiet=quiet)
StringHub data
62598f9f2ae34c7f260aaee7
class CNMLRadio(object): <NEW_LINE> <INDENT> def __init__(self, rid, protocol, snmp_name, snmp_index, ssid, mode, gain, angle, channel, clients, parent): <NEW_LINE> <INDENT> self.id = rid <NEW_LINE> self.protocol = protocol <NEW_LINE> self.snmp_name = snmp_name <NEW_LINE> self.snmp_index = snmp_index <NEW_LINE> self.ssid = ssid <NEW_LINE> self.mode = mode <NEW_LINE> self.antenna_gain = gain <NEW_LINE> self.antenna_angle = angle <NEW_LINE> self.channel = channel <NEW_LINE> self.clients_accepted = clients <NEW_LINE> self.interfaces = dict() <NEW_LINE> self.parentDevice = parent <NEW_LINE> <DEDENT> def getInterfaces(self): <NEW_LINE> <INDENT> return self.interfaces.values() <NEW_LINE> <DEDENT> def addInterface(self, iface): <NEW_LINE> <INDENT> self.interfaces[iface.id] = iface <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def parseLxml(r, parent): <NEW_LINE> <INDENT> rid = int(r.get('id')) <NEW_LINE> protocol = r.get('protocol') <NEW_LINE> snmp_name = r.get('snmp_name') or None <NEW_LINE> snmp_index = r.get('snmp_index') or None <NEW_LINE> ssid = r.get('ssid') <NEW_LINE> mode = r.get('mode') <NEW_LINE> antenna_gain = r.get('antenna_gain') <NEW_LINE> antenna_angle = r.get('antenna_angle') <NEW_LINE> channel = r.get('channel') or 0 <NEW_LINE> channel = int(channel) <NEW_LINE> clients = r.get('clients_accepted') == 'Yes' <NEW_LINE> newradio = CNMLRadio(rid, protocol, snmp_name, snmp_index, ssid, mode, antenna_gain, antenna_angle, channel, clients, parent) <NEW_LINE> return newradio <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def parseMinidom(r, parent): <NEW_LINE> <INDENT> rid = int(r.getAttribute('id')) <NEW_LINE> protocol = r.getAttribute('protocol') <NEW_LINE> snmp_name = r.getAttribute('snmp_name') or None <NEW_LINE> snmp_index = r.getAttribute('snmp_index') or None <NEW_LINE> ssid = r.getAttribute('ssid') <NEW_LINE> mode = r.getAttribute('mode') <NEW_LINE> antenna_gain = r.getAttribute('antenna_gain') <NEW_LINE> antenna_angle = r.getAttribute('antenna_angle') <NEW_LINE> channel = r.getAttribute('channel') or 0 <NEW_LINE> channel = int(channel) <NEW_LINE> clients = r.getAttribute('clients_accepted') == 'Yes' <NEW_LINE> newradio = CNMLRadio(rid, protocol, snmp_name, snmp_index, ssid, mode, antenna_gain, antenna_angle, channel, clients, parent) <NEW_LINE> return newradio <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def parse(r, parent): <NEW_LINE> <INDENT> if LXML: <NEW_LINE> <INDENT> return CNMLRadio.parseLxml(r, parent) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return CNMLRadio.parseMinidom(r, parent) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "<CNMLRadio Id:%s>" % (self.id)
This CNMLRadio class represents a radio of a device in the network
62598f9f66656f66f7d5a1f7
class Singleton(ManagedProperties): <NEW_LINE> <INDENT> _instances = {} <NEW_LINE> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> result = super().__new__(cls, *args, **kwargs) <NEW_LINE> S.register(result) <NEW_LINE> return result <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> if self not in Singleton._instances: <NEW_LINE> <INDENT> Singleton._instances[self] = super().__call__(*args, **kwargs) <NEW_LINE> <DEDENT> return Singleton._instances[self] <NEW_LINE> def __getnewargs__(self): <NEW_LINE> <INDENT> return () <NEW_LINE> <DEDENT> self.__getnewargs__ = __getnewargs__
Metaclass for singleton classes. A singleton class has only one instance which is returned every time the class is instantiated. Additionally, this instance can be accessed through the global registry object ``S`` as ``S.<class_name>``. Examples ======== >>> from sympy import S, Basic >>> from sympy.core.singleton import Singleton >>> from sympy.core.compatibility import with_metaclass >>> class MySingleton(Basic, metaclass=Singleton): ... pass >>> Basic() is Basic() False >>> MySingleton() is MySingleton() True >>> S.MySingleton is MySingleton() True Notes ===== Instance creation is delayed until the first time the value is accessed. (SymPy versions before 1.0 would create the instance during class creation time, which would be prone to import cycles.) This metaclass is a subclass of ManagedProperties because that is the metaclass of many classes that need to be Singletons (Python does not allow subclasses to have a different metaclass than the superclass, except the subclass may use a subclassed metaclass).
62598f9f55399d3f05626328
class TestNotifyGroup(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.hass = get_test_home_assistant() <NEW_LINE> self.events = [] <NEW_LINE> self.assertTrue(setup_component(self.hass, notify.DOMAIN, { 'notify': [{ 'name': 'demo1', 'platform': 'demo' }, { 'name': 'demo2', 'platform': 'demo' }] })) <NEW_LINE> self.service = group.get_service(self.hass, {'services': [ {'service': 'demo1'}, {'service': 'demo2', 'data': {'target': 'unnamed device', 'data': {'test': 'message'}}}]}) <NEW_LINE> assert self.service is not None <NEW_LINE> def record_event(event): <NEW_LINE> <INDENT> self.events.append(event) <NEW_LINE> <DEDENT> self.hass.bus.listen("notify", record_event) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.hass.stop() <NEW_LINE> <DEDENT> def test_send_message_to_group(self): <NEW_LINE> <INDENT> self.service.send_message('Hello', title='Test notification') <NEW_LINE> self.hass.block_till_done() <NEW_LINE> self.assertTrue(len(self.events) == 2) <NEW_LINE> last_event = self.events[-1] <NEW_LINE> self.assertEqual(last_event.data[notify.ATTR_TITLE], 'Test notification') <NEW_LINE> self.assertEqual(last_event.data[notify.ATTR_MESSAGE], 'Hello') <NEW_LINE> <DEDENT> def test_send_message_with_data(self): <NEW_LINE> <INDENT> notify_data = {'hello': 'world'} <NEW_LINE> self.service.send_message('Hello', title='Test notification', data=notify_data) <NEW_LINE> self.hass.block_till_done() <NEW_LINE> last_event = self.events[-1] <NEW_LINE> self.assertEqual(last_event.data[notify.ATTR_TITLE], 'Test notification') <NEW_LINE> self.assertEqual(last_event.data[notify.ATTR_MESSAGE], 'Hello') <NEW_LINE> self.assertEqual(last_event.data[notify.ATTR_DATA], notify_data) <NEW_LINE> <DEDENT> def test_entity_data_passes_through(self): <NEW_LINE> <INDENT> notify_data = {'hello': 'world'} <NEW_LINE> self.service.send_message('Hello', title='Test notification', data=notify_data) <NEW_LINE> self.hass.block_till_done() <NEW_LINE> data = self.events[-1].data <NEW_LINE> assert { 'message': 'Hello', 'target': ['unnamed device'], 'title': 'Test notification', 'data': {'hello': 'world', 'test': 'message'} } == data
Test the notify.group platform.
62598f9f4f6381625f1993bf
class Message(Base): <NEW_LINE> <INDENT> __tablename__ = 'message' <NEW_LINE> message_id = Column(Integer, primary_key=True, index=True) <NEW_LINE> chat_user_id = Column(Integer, ForeignKey('chat_user.chat_user_id'), nullable=False) <NEW_LINE> account_id = Column(Integer, ForeignKey('account.account_id'), nullable=False) <NEW_LINE> channel_id = Column(Integer, ForeignKey('channel.channel_id'), nullable=False) <NEW_LINE> keyword_id = Column(Integer, ForeignKey('keyword.keyword_id'), nullable=False) <NEW_LINE> message_text = Column(String(10000), default=None) <NEW_LINE> message_is_mention = Column(Boolean(), default=None) <NEW_LINE> message_is_scheduled = Column(Boolean(), default=None) <NEW_LINE> message_is_fwd = Column(Boolean(), default=None) <NEW_LINE> message_is_reply = Column(Boolean(), default=None) <NEW_LINE> message_is_bot = Column(Boolean(), default=None) <NEW_LINE> message_is_group = Column(Boolean(), default=None) <NEW_LINE> message_is_private = Column(Boolean(), default=None) <NEW_LINE> message_is_channel = Column(Boolean(), default=None) <NEW_LINE> message_channel_size = Column(Integer, default=None) <NEW_LINE> message_tcreate = Column(DateTime, default=datetime.now()) <NEW_LINE> user = relationship('ChatUser', back_populates='messages') <NEW_LINE> account = relationship('Account', back_populates='messages') <NEW_LINE> channel = relationship('Channel', back_populates='messages') <NEW_LINE> notifications = relationship('Notification')
The actual message from a channel and from a user
62598f9f460517430c431f5e
class Cupcake: <NEW_LINE> <INDENT> cache = {} <NEW_LINE> class_name = 'Cupcake' <NEW_LINE> @staticmethod <NEW_LINE> def scale_recipe(ingredients, amount): <NEW_LINE> <INDENT> return [(name, qty * amount) for name, qty in ingredients] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(cls, name): <NEW_LINE> <INDENT> if name not in cls.cache: <NEW_LINE> <INDENT> print(f"Sorry, that {cls.class_name.lower()} doesn't exist") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return cls.cache[name] <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, name, flavor, price): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.flavor = flavor <NEW_LINE> self.price = price <NEW_LINE> self.qty = 0 <NEW_LINE> self.cache[self.name] = self <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'<{self.class_name} name="{self.name}" qty={self.qty}>' <NEW_LINE> <DEDENT> def add_stock(self, amount): <NEW_LINE> <INDENT> self.qty += amount <NEW_LINE> <DEDENT> def sell(self, amount): <NEW_LINE> <INDENT> if self.qty == 0: <NEW_LINE> <INDENT> print(f'Sorry, these {self.class_name.lower()}s are sold out') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.qty -= amount <NEW_LINE> if self.qty < 0: <NEW_LINE> <INDENT> self.qty = 0
A cupcake.
62598f9fa219f33f346c6620
class Follower(PyoObject): <NEW_LINE> <INDENT> def __init__(self, input, freq=20, mul=1, add=0): <NEW_LINE> <INDENT> PyoObject.__init__(self, mul, add) <NEW_LINE> self._input = input <NEW_LINE> self._freq = freq <NEW_LINE> self._in_fader = InputFader(input) <NEW_LINE> in_fader, freq, mul, add, lmax = convertArgsToLists(self._in_fader, freq, mul, add) <NEW_LINE> self._base_objs = [Follower_base(wrap(in_fader,i), wrap(freq,i), wrap(mul,i), wrap(add,i)) for i in range(lmax)] <NEW_LINE> <DEDENT> def setInput(self, x, fadetime=0.05): <NEW_LINE> <INDENT> self._input = x <NEW_LINE> self._in_fader.setInput(x, fadetime) <NEW_LINE> <DEDENT> def setFreq(self, x): <NEW_LINE> <INDENT> self._freq = x <NEW_LINE> x, lmax = convertArgsToLists(x) <NEW_LINE> [obj.setFreq(wrap(x,i)) for i, obj in enumerate(self._base_objs)] <NEW_LINE> <DEDENT> def out(self, chnl=0, inc=1, dur=0, delay=0): <NEW_LINE> <INDENT> return self.play(dur, delay) <NEW_LINE> <DEDENT> def ctrl(self, map_list=None, title=None, wxnoserver=False): <NEW_LINE> <INDENT> self._map_list = [SLMap(1., 500., 'log', 'freq', self._freq)] <NEW_LINE> PyoObject.ctrl(self, map_list, title, wxnoserver) <NEW_LINE> <DEDENT> @property <NEW_LINE> def input(self): <NEW_LINE> <INDENT> return self._input <NEW_LINE> <DEDENT> @input.setter <NEW_LINE> def input(self, x): self.setInput(x) <NEW_LINE> @property <NEW_LINE> def freq(self): <NEW_LINE> <INDENT> return self._freq <NEW_LINE> <DEDENT> @freq.setter <NEW_LINE> def freq(self, x): self.setFreq(x)
Envelope follower. Output signal is the continuous mean amplitude of an input signal. :Parent: :py:class:`PyoObject` :Args: input : PyoObject Input signal to process. freq : float or PyoObject, optional Cutoff frequency of the filter in hertz. Default to 20. .. note:: The out() method is bypassed. Follower's signal can not be sent to audio outs. >>> s = Server().boot() >>> s.start() >>> sf = SfPlayer(SNDS_PATH + "/transparent.aif", loop=True, mul=.4).out() >>> fol = Follower(sf, freq=30) >>> n = Noise(mul=fol).out(1)
62598f9f442bda511e95c262
class HttpExchange(models.Model): <NEW_LINE> <INDENT> id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) <NEW_LINE> content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) <NEW_LINE> object_id = models.PositiveIntegerField() <NEW_LINE> related_object = GenericForeignKey('content_type', 'object_id') <NEW_LINE> date = models.DateTimeField(_('Date'), auto_now_add=True) <NEW_LINE> request_headers = models.JSONField( _('Request headers'), null=True, blank=True, ) <NEW_LINE> request_body = models.TextField(_('Request body')) <NEW_LINE> response_headers = models.JSONField( _('Request headers'), null=True, blank=True, ) <NEW_LINE> response_body = models.TextField(_('Response body')) <NEW_LINE> status_code = models.IntegerField( _('Status code'), default=status.HTTP_200_OK, ) <NEW_LINE> objects = HttpExchangeManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['-date'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return _('Exchange {0}').format(self.pk) <NEW_LINE> <DEDENT> @property <NEW_LINE> def failed(self): <NEW_LINE> <INDENT> return not (200 <= self.status_code < 300) <NEW_LINE> <DEDENT> def formatted_json(self, field): <NEW_LINE> <INDENT> value = getattr(self, field) or '' <NEW_LINE> try: <NEW_LINE> <INDENT> if not isinstance(value, dict): <NEW_LINE> <INDENT> value = json.loads(value) <NEW_LINE> <DEDENT> json_value = json.dumps(value, sort_keys=True, indent=2) <NEW_LINE> formatter = HtmlFormatter() <NEW_LINE> html = highlight(json_value, JsonLexer(), formatter) <NEW_LINE> return mark_safe(html) <NEW_LINE> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def formatted_request_body(self): <NEW_LINE> <INDENT> return self.formatted_json('request_body') <NEW_LINE> <DEDENT> @property <NEW_LINE> def formatted_response_body(self): <NEW_LINE> <INDENT> return self.formatted_json('response_body')
HTTP request/response exchange.
62598f9f07f4c71912baf252
class StatsHuntCronFlow(cronjobs.SystemCronFlow): <NEW_LINE> <INDENT> frequency = rdfvalue.Duration("1d") <NEW_LINE> lifetime = rdfvalue.Duration("30m") <NEW_LINE> def GetOutputPlugins(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> @flow.StateHandler() <NEW_LINE> def Start(self): <NEW_LINE> <INDENT> with hunts.GRRHunt.StartHunt( hunt_name="StatsHunt", output_plugins=self.GetOutputPlugins(), regex_rules=[], token=self.token) as hunt: <NEW_LINE> <INDENT> runner = hunt.GetRunner() <NEW_LINE> runner.args.client_rate = 0 <NEW_LINE> runner.args.client_limit = config_lib.CONFIG.Get("StatsHunt.ClientLimit") <NEW_LINE> runner.args.expiry_time = self.frequency <NEW_LINE> runner.args.description = "Stats hunt for high-res client info." <NEW_LINE> runner.Start()
A cron job which runs a continuous stats hunt on all clients. This hunt is designed to collect lightweight information from all clients with very high resolution (similar to poll period). We roll over to a new hunt to move to a new collection, and pick up any clients that might have fallen out of the collection loop due to a worker dying or some other problem.
62598f9f85dfad0860cbf978
class FSGlobalEventHandler(FSLocalEventHandler): <NEW_LINE> <INDENT> NODE_PREFIX = True <NEW_LINE> DEFAULT_LEVEL = 'verbose' <NEW_LINE> def __init__(self, command): <NEW_LINE> <INDENT> FSLocalEventHandler.__init__(self, command) <NEW_LINE> self._timer = None <NEW_LINE> self.status_changed = False <NEW_LINE> <DEDENT> def event_callback(self, evtype, **kwargs): <NEW_LINE> <INDENT> FSLocalEventHandler.event_callback(self, evtype, **kwargs) <NEW_LINE> if kwargs.get('status') in ('start', 'done', 'failed'): <NEW_LINE> <INDENT> self._update() <NEW_LINE> <DEDENT> <DEDENT> def handle_pre(self): <NEW_LINE> <INDENT> header = self.command.NAME.capitalize() <NEW_LINE> comps = self.fs.components.managed(supports=self.fs_action) <NEW_LINE> self.log_verbose("%s of %d component(s) of %s on %s" % (header, len(comps), self.fs.fs_name, comps.servers())) <NEW_LINE> <DEDENT> def pre(self, fs): <NEW_LINE> <INDENT> FSLocalEventHandler.pre(self, fs) <NEW_LINE> self._update() <NEW_LINE> <DEDENT> def ev_timer(self, timer): <NEW_LINE> <INDENT> filter_key = lambda t: t.state == INPROGRESS or t._list_action() <NEW_LINE> targets = self.fs.components.managed().filter(key=filter_key) <NEW_LINE> target_servers = targets.servers() <NEW_LINE> target_count = len(targets) <NEW_LINE> if target_count > 0 and self.status_changed: <NEW_LINE> <INDENT> self.status_changed = False <NEW_LINE> now = datetime.datetime.now().strftime("%H:%M") <NEW_LINE> if len(target_servers) > 8: <NEW_LINE> <INDENT> print("[%s] In progress for %d component(s) on %d servers ..." % (now, target_count, len(target_servers))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("[%s] In progress for %d component(s) on %s ..." % (now, target_count, target_servers)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _update(self): <NEW_LINE> <INDENT> self.status_changed = True <NEW_LINE> if self.verbose > 0 and not (self._timer and self._timer.is_valid()): <NEW_LINE> <INDENT> self._timer = task_self().timer(2.0, handler=self, interval=20, autoclose=True) <NEW_LINE> assert self._timer != None
Command event handler used when Shine is called for a global (admin) processing. This means local and distant commands could be executed.
62598f9f3539df3088ecc0bc
class Talkey(object): <NEW_LINE> <INDENT> def __init__(self, preferred_languages=None, preferred_factor=80.0, engine_preference=None, **config): <NEW_LINE> <INDENT> self.preferred_languages = preferred_languages or [] <NEW_LINE> self.preferred_factor = preferred_factor <NEW_LINE> engine_preference = engine_preference or enumerate_engines() <NEW_LINE> for ename in enumerate_engines(): <NEW_LINE> <INDENT> if ename not in engine_preference: <NEW_LINE> <INDENT> engine_preference.append(ename) <NEW_LINE> <DEDENT> <DEDENT> self.engines = [] <NEW_LINE> self.languages = set() <NEW_LINE> for ename in engine_preference: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> options = config.get(ename, {}).get('options', {}) <NEW_LINE> defaults = config.get(ename, {}).get('defaults', {}) <NEW_LINE> eng = create_engine(ename, options=options, defaults=defaults) <NEW_LINE> self.engines.append(eng) <NEW_LINE> languages = config.get(ename, {}).get('languages', {}) <NEW_LINE> for lang, conf in languages.items(): <NEW_LINE> <INDENT> eng.configure(language=lang, **conf) <NEW_LINE> <DEDENT> <DEDENT> except TTSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> for eng in self.engines: <NEW_LINE> <INDENT> self.languages.update(eng.languages.keys()) <NEW_LINE> <DEDENT> langid.set_languages(self.languages) <NEW_LINE> if not self.languages: <NEW_LINE> <INDENT> raise TTSError('No supported languages') <NEW_LINE> <DEDENT> <DEDENT> def classify(self, txt): <NEW_LINE> <INDENT> ranks = [] <NEW_LINE> for lang, score in langid.rank(txt): <NEW_LINE> <INDENT> if lang in self.preferred_languages: <NEW_LINE> <INDENT> score += self.preferred_factor <NEW_LINE> <DEDENT> ranks.append((lang, score)) <NEW_LINE> <DEDENT> ranks.sort(key=lambda x: x[1], reverse=True) <NEW_LINE> return ranks[0][0] <NEW_LINE> <DEDENT> def get_engine_for_lang(self, lang): <NEW_LINE> <INDENT> for eng in self.engines: <NEW_LINE> <INDENT> if lang in eng.languages.keys(): <NEW_LINE> <INDENT> return eng <NEW_LINE> <DEDENT> <DEDENT> raise TTSError('Could not match language') <NEW_LINE> <DEDENT> def say(self, txt, lang=None): <NEW_LINE> <INDENT> lang = lang or self.classify(txt) <NEW_LINE> self.get_engine_for_lang(lang).say(txt, language=lang)
Manages engines and allows multi-lingual say() ``preferred_languages`` A list of languages that are weighted in preference. This is a weighting to assist the detection of language by classify(). ``preferred_factor`` The weighting factor to prefer the ``preferred_languages`` list. Higher number skews towards preference. ``engine_preference`` Specify preferred engines in order of preference. ``**config`` Engine-specfic configuration, e.g.: .. code-block:: python # Key is the engine SLUG, in this case ``espeak`` espeak={ # Specify the engine options: 'options': { 'enabled': True, }, # Specify some default voice options 'defaults': { 'words_per_minute': 150, 'variant': 'f4', }, # Here you specify language-specific voice options # e.g. for english we prefer the mbrola en1 voice 'languages': { 'en': { 'voice': 'english-mb-en1', 'words_per_minute': 130 }, } }
62598f9f3d592f4c4edbacd5
class Unit: <NEW_LINE> <INDENT> def __init__(self, name, symbol, symbol_latex): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.symbol = symbol <NEW_LINE> self.symbol_latex = symbol_latex <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Unit({})".format(self.name) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Unit of quantity. .. note:: Perhaps inherit from tuple or :class:`collections.namedTuple`?
62598f9f4527f215b58e9ceb
@register(names=('F[]', 'Float_t[]'), builtin=True) <NEW_LINE> class FloatArray(BaseArray): <NEW_LINE> <INDENT> type = 'F' <NEW_LINE> typename = 'Float_t' <NEW_LINE> convert = Float.convert <NEW_LINE> def __new__(cls, length, default=0., **kwargs): <NEW_LINE> <INDENT> return BaseArray.__new__( cls, 'f', [Float.convert(default)] * length) <NEW_LINE> <DEDENT> def __init__(self, length, default=0., **kwargs): <NEW_LINE> <INDENT> BaseArray.__init__(self, **kwargs) <NEW_LINE> self.default = Float.convert(default)
This is an array of floats
62598f9f56ac1b37e6301ff2
class SpamScores(models.Model): <NEW_LINE> <INDENT> score = models.FloatField() <NEW_LINE> count = models.IntegerField() <NEW_LINE> objects = SpamScoresManager() <NEW_LINE> def obj_to_dict(self): <NEW_LINE> <INDENT> vals = [(field.name, getattr(self, field.name)) for field in self._meta.fields] <NEW_LINE> return dict(vals) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> managed = False
spam scores
62598f9ff7d966606f747def
class Tsig(object): <NEW_LINE> <INDENT> algs = { "hmac-md5": 16, "hmac-sha1": 20, "hmac-sha224": 28, "hmac-sha256": 32, "hmac-sha384": 48, "hmac-sha512": 64 } <NEW_LINE> vocabulary = string.ascii_uppercase + string.ascii_lowercase + string.digits <NEW_LINE> def __init__(self, name=None, alg=None, key=None): <NEW_LINE> <INDENT> if not name: <NEW_LINE> <INDENT> nlabels = random.randint(1, 10) <NEW_LINE> self.name = "" <NEW_LINE> for i in range(nlabels): <NEW_LINE> <INDENT> label_len = random.randint(1, 63) <NEW_LINE> if len(self.name) + 1 + label_len > 253: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if i > 0: <NEW_LINE> <INDENT> self.name += "." <NEW_LINE> <DEDENT> self.name += "".join(random.choice(Tsig.vocabulary) for x in range(label_len)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.name = str(name) <NEW_LINE> <DEDENT> if not alg: <NEW_LINE> <INDENT> self.alg = random.choice(list(Tsig.algs.keys())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if alg not in Tsig.algs: <NEW_LINE> <INDENT> raise Failed("Unsupported TSIG algorithm %s" % alg) <NEW_LINE> <DEDENT> self.alg = alg <NEW_LINE> <DEDENT> if not key: <NEW_LINE> <INDENT> self.key = base64.b64encode(os.urandom(Tsig.algs[self.alg])). decode('ascii') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.key = str(key) <NEW_LINE> <DEDENT> if self.alg == "hmac-md5": <NEW_LINE> <INDENT> _alg = "hmac-md5.sig-alg.reg.int" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _alg = self.alg <NEW_LINE> <DEDENT> _key = dns.tsigkeyring.from_text({ self.name: self.key }) <NEW_LINE> self.key_params = dict(keyname=self.name, keyalgorithm=_alg, keyring=_key) <NEW_LINE> <DEDENT> def dump(self, filename): <NEW_LINE> <INDENT> s = dnstest.config.BindConf() <NEW_LINE> s.begin("key", self.name) <NEW_LINE> s.item("algorithm", self.alg) <NEW_LINE> s.item_str("secret", self.key) <NEW_LINE> s.end() <NEW_LINE> file = open(filename, mode="w") <NEW_LINE> file.write(s.conf) <NEW_LINE> file.close()
TSIG key generator
62598f9f16aa5153ce400307
class PrimitivePoint: <NEW_LINE> <INDENT> def __init__(self, x, y): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return type(self) == type(other) and self.x == other.x and self.y == other.y <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '(%s, %s)' % (self.x, self.y) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'PrimitivePoint(%r, %r)' % (self.x, self.y)
Test class for adding additional primitive
62598f9f6fb2d068a7693d38
class OrderEditVoucherForm(forms.ModelForm): <NEW_LINE> <INDENT> voucher = AjaxSelect2ChoiceField( queryset=Voucher.objects.all(), fetch_data_url=reverse_lazy('dashboard:ajax-vouchers'), min_input=0) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Order <NEW_LINE> fields = ['voucher'] <NEW_LINE> labels = { 'voucher': pgettext_lazy('Order voucher', 'Voucher')} <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.old_voucher = self.instance.voucher <NEW_LINE> if self.instance.voucher: <NEW_LINE> <INDENT> self.fields['voucher'].set_initial(self.instance.voucher) <NEW_LINE> <DEDENT> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> voucher = self.instance.voucher <NEW_LINE> if self.old_voucher != voucher: <NEW_LINE> <INDENT> if self.old_voucher: <NEW_LINE> <INDENT> decrease_voucher_usage(self.old_voucher) <NEW_LINE> <DEDENT> increase_voucher_usage(voucher) <NEW_LINE> <DEDENT> self.instance.discount_name = voucher.name or '' <NEW_LINE> recalculate_order(self.instance) <NEW_LINE> return super().save(commit)
Edit discount amount in an order.
62598f9f3eb6a72ae038a449
class WorkQueueManagerCleaner(BaseWorkerThread): <NEW_LINE> <INDENT> def __init__(self, queue, config): <NEW_LINE> <INDENT> BaseWorkerThread.__init__(self) <NEW_LINE> self.forbiddenStatus = ["aborted", "aborted-completed", "force-complete", "completed"] <NEW_LINE> self.queue = queue <NEW_LINE> self.config = config <NEW_LINE> self.reqmgr2Svc = ReqMgr(self.config.General.ReqMgr2ServiceURL) <NEW_LINE> myThread = threading.currentThread() <NEW_LINE> daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi) <NEW_LINE> self.finishedWorflowCheck = daoFactory(classname="Subscriptions.CountFinishedSubscriptionsByWorkflow") <NEW_LINE> <DEDENT> def setup(self, parameters): <NEW_LINE> <INDENT> t = random.randrange(self.idleTime) <NEW_LINE> self.logger.info('Sleeping for %d seconds before 1st loop' % t) <NEW_LINE> time.sleep(t) <NEW_LINE> <DEDENT> @timeFunction <NEW_LINE> def algorithm(self, parameters): <NEW_LINE> <INDENT> self.queue.logger.info("Start updating & cleaning...") <NEW_LINE> try: <NEW_LINE> <INDENT> self.queue.performQueueCleanupActions() <NEW_LINE> requests = self.reqmgr2Svc.getRequestByStatusFromMemoryCache(self.forbiddenStatus).getData() <NEW_LINE> results = self.finishedWorflowCheck.execute(workflowNames=requests) <NEW_LINE> requestsToKill = [reqInfo["workflow"] for reqInfo in results if reqInfo["open"] > 0] <NEW_LINE> self.queue.logger.info("Killing %d requests in WMBS ...", len(requestsToKill)) <NEW_LINE> self.queue.killWMBSWorkflows(requestsToKill) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> self.queue.logger.exception("Error cleaning queue: %s", str(ex)) <NEW_LINE> <DEDENT> self.queue.logger.info("Finished updating & cleaning.")
Cleans expired items, updates element status.
62598f9fa17c0f6771d5c042
class StochasticGradientDescent(TradingFactory): <NEW_LINE> <INDENT> def initialize(self, properties): <NEW_LINE> <INDENT> for middleware in common_middlewares(properties, self.identity): <NEW_LINE> <INDENT> self.use(middleware) <NEW_LINE> <DEDENT> self.rebalance_period = properties.get('rebalance_period', 5) <NEW_LINE> self.bet_amount = self.capital_base <NEW_LINE> self.max_notional = self.capital_base + 0.1 <NEW_LINE> self.min_notional = -self.capital_base <NEW_LINE> self.gradient_iterations = int( properties.get('gradient_iterations', 5)) <NEW_LINE> self.calculate_theta = calculate_theta( refresh_period=int(properties.get('refresh', 1)), window_length=int(properties.get('window', 60)) ) <NEW_LINE> <DEDENT> def event(self, data): <NEW_LINE> <INDENT> signals = {'buy': {}, 'sell': {}} <NEW_LINE> scale = {} <NEW_LINE> for stock in data: <NEW_LINE> <INDENT> thetaAndPrices = self.calculate_theta.handle_data( data, stock, self.gradient_iterations) <NEW_LINE> if thetaAndPrices is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> theta, historicalPrices = thetaAndPrices <NEW_LINE> indicator = np.dot(theta, historicalPrices) <NEW_LINE> hlen = sum([k * k for k in historicalPrices]) <NEW_LINE> tlen = sum([j * j for j in theta]) <NEW_LINE> indicator /= float(hlen * tlen) <NEW_LINE> current_Prices = data[stock].price <NEW_LINE> notional = self.portfolio.positions[stock].amount * current_Prices <NEW_LINE> transaction_price = indicator * self.capital_base * 10000 <NEW_LINE> if indicator >= 0 and notional < self.max_notional and self.elapsed_time.days % self.rebalance_period == 0: <NEW_LINE> <INDENT> if self.manager: <NEW_LINE> <INDENT> scale[stock] = abs(indicator * self.capital_base) <NEW_LINE> signals['buy'][stock] = data[stock] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.order(stock, transaction_price) <NEW_LINE> self.logger.notice("[{}] {} shares of {} bought.".format( self.datetime, transaction_price, stock)) <NEW_LINE> <DEDENT> <DEDENT> if indicator < 0 and notional > self.min_notional and self.elapsed_time.days % self.rebalance_period == 0: <NEW_LINE> <INDENT> if self.manager: <NEW_LINE> <INDENT> scale[stock] = abs(indicator * self.capital_base) <NEW_LINE> signals['sell'][stock] = data[stock] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.order(stock, transaction_price) <NEW_LINE> self.logger.notice("[{}] {} shares of {} sold.".format( self.datetime, abs(transaction_price), stock)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.manager.advise(scale=scale) <NEW_LINE> return signals
doc: Randomly chooses training data, gradually decrease the learning rate, and penalize data points which deviate significantly from what's predicted. Here I used an average SGD method that is tested to outperform if I simply pick the last predictor value trained after certain iterations. parameters: rebalance_perdiod: time period to rebalance portfolio [default 5] gradient_iterations: hinge loss iterations [default 5] refresh: refresh period for theta computation [default 1] window: loopback window for theta computation [default 60]
62598f9f4e4d56256637222c
class GitRelease(github.GithubObject.CompletableGithubObject): <NEW_LINE> <INDENT> @property <NEW_LINE> def body(self): <NEW_LINE> <INDENT> self._completeIfNotSet(self._body) <NEW_LINE> return self._body.value <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> self._completeIfNotSet(self._title) <NEW_LINE> return self._title.value <NEW_LINE> <DEDENT> @property <NEW_LINE> def tag_name(self): <NEW_LINE> <INDENT> self._completeIfNotSet(self._tag_name) <NEW_LINE> return self._tag_name.value <NEW_LINE> <DEDENT> @property <NEW_LINE> def author(self): <NEW_LINE> <INDENT> self._completeIfNotSet(self._author) <NEW_LINE> return self._author.value <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> self._completeIfNotSet(self._url) <NEW_LINE> return self._url.value <NEW_LINE> <DEDENT> @property <NEW_LINE> def upload_url(self): <NEW_LINE> <INDENT> self._completeIfNotSet(self._upload_url) <NEW_LINE> return self._upload_url.value <NEW_LINE> <DEDENT> def delete_release(self): <NEW_LINE> <INDENT> headers, data = self._requester.requestJsonAndCheck( "DELETE", self.url ) <NEW_LINE> return True <NEW_LINE> <DEDENT> def update_release(self, name, message, draft=False, prerelease=False): <NEW_LINE> <INDENT> assert isinstance(name, str), name <NEW_LINE> assert isinstance(message, str), message <NEW_LINE> assert isinstance(draft, bool), draft <NEW_LINE> assert isinstance(prerelease, bool), prerelease <NEW_LINE> post_parameters = { "tag_name": self.tag_name, "name": name, "body": message, "draft": draft, "prerelease": prerelease, } <NEW_LINE> headers, data = self._requester.requestJsonAndCheck( "PATCH", self.url, input=post_parameters ) <NEW_LINE> return github.GitRelease.GitRelease(self._requester, headers, data, completed=True) <NEW_LINE> <DEDENT> def _initAttributes(self): <NEW_LINE> <INDENT> self._body = github.GithubObject.NotSet <NEW_LINE> self._title = github.GithubObject.NotSet <NEW_LINE> self._tag_name = github.GithubObject.NotSet <NEW_LINE> self._author = github.GithubObject.NotSet <NEW_LINE> self._url = github.GithubObject.NotSet <NEW_LINE> self._upload_url = github.GithubObject.NotSet <NEW_LINE> <DEDENT> def _useAttributes(self, attributes): <NEW_LINE> <INDENT> if "body" in attributes: <NEW_LINE> <INDENT> self._body = self._makeStringAttribute(attributes["body"]) <NEW_LINE> <DEDENT> if "name" in attributes: <NEW_LINE> <INDENT> self._title = self._makeStringAttribute(attributes["name"]) <NEW_LINE> <DEDENT> if "tag_name" in attributes: <NEW_LINE> <INDENT> self._tag_name = self._makeStringAttribute(attributes["tag_name"]) <NEW_LINE> <DEDENT> if "author" in attributes: <NEW_LINE> <INDENT> self._author = self._makeClassAttribute(github.GitAuthor.GitAuthor, attributes["author"]) <NEW_LINE> <DEDENT> if "url" in attributes: <NEW_LINE> <INDENT> self._url = self._makeStringAttribute(attributes["url"]) <NEW_LINE> <DEDENT> if "upload_url" in attributes: <NEW_LINE> <INDENT> self._upload_url = self._makeStringAttribute(attributes["upload_url"])
This class represents GitRelease as returned for example by https://developer.github.com/v3/repos/releases
62598f9f92d797404e388a6a
class NavierStokesScalar(PDESubSystem): <NEW_LINE> <INDENT> def form(self, u, v_u, p, v_p, u_, nu, dt, u_1, u_2, f, c, v_c, c_, c_1, c_2, Pr, **kwargs): <NEW_LINE> <INDENT> if not self.prm['iteration_type'] == 'Newton': <NEW_LINE> <INDENT> info_red('Scheme is not linearized and requires Newton iteration type') <NEW_LINE> <DEDENT> U = 0.5*(u + u_1) <NEW_LINE> C = 0.5*(c + c_1) <NEW_LINE> U1 = 1.5*u_1 - 0.5*u_2 <NEW_LINE> C1 = 1.5*c_1 - 0.5*c_2 <NEW_LINE> Fu = (1./dt)*inner(u - u_1, v_u)*dx + self.conv(v_u, U, U1)*dx + nu*inner(grad(v_u), grad(U) + grad(U).T)*dx - inner(div(v_u), p)*dx - inner(v_p, div(u))*dx - inner(v_u, f)*dx <NEW_LINE> Fc = (1./dt)*inner(c - c_1, v_c)*dx + inner(dot(U1, grad(C)), v_c)*dx + nu/Pr*inner(grad(v_c), grad(C))*dx <NEW_LINE> return Fu + Fc
Fully coupled transient Navier-Stokes solver plus one passive scalar
62598f9fa17c0f6771d5c043
class ShtatToExcel: <NEW_LINE> <INDENT> def __init__(self, file: str): <NEW_LINE> <INDENT> self.file = file <NEW_LINE> self.db = SqliteDB() <NEW_LINE> with self.db as cur: <NEW_LINE> <INDENT> cur.execute("SELECT * from salaries WHERE fio NOT LIKE '%Вакансия%' ORDER BY department_code;") <NEW_LINE> <DEDENT> self.work_data = cur.fetchall() <NEW_LINE> self.fill = PatternFill(fill_type='solid', start_color='c1c1c1', end_color='c2c2c2') <NEW_LINE> self.border = Border(left=Side(border_style='thin', color='FF000000'), right=Side(border_style='thin', color='FF000000'), top=Side(border_style='thin', color='FF000000'), bottom=Side(border_style='thin', color='FF000000'), diagonal=Side(border_style='thin', color='FF000000'), diagonal_direction=0, outline=Side(border_style='thin', color='FF000000'), vertical=Side(border_style='thin', color='FF000000'), horizontal=Side(border_style='thin', color='FF000000') ) <NEW_LINE> self.align_center = Alignment(horizontal='center', vertical='center', text_rotation=0, wrap_text=False, shrink_to_fit=False, indent=0) <NEW_LINE> self.align_left = Alignment(horizontal='right', vertical='center', text_rotation=0, wrap_text=False, shrink_to_fit=False, indent=0) <NEW_LINE> self.wb = Workbook() <NEW_LINE> self.ws = self.wb.active <NEW_LINE> self.ws.title = 'Штатное расписание' <NEW_LINE> <DEDENT> def shtat_to_excel(self) -> None: <NEW_LINE> <INDENT> rows = [["№ позиции", "Подразделение", "Должность", "ФИО", "Больничные дни", "Отпуск", "Простой"]] <NEW_LINE> for i in self.work_data: <NEW_LINE> <INDENT> rows.append([i[0], i[1], i[2], i[6], 0, 0, 0]) <NEW_LINE> <DEDENT> for row in rows: <NEW_LINE> <INDENT> self.ws.append(row) <NEW_LINE> <DEDENT> for i in range(0, self.ws.max_column): <NEW_LINE> <INDENT> self.ws[f'{chr(65 + i)}1'].fill = self.fill <NEW_LINE> <DEDENT> max_row = self.ws.max_row <NEW_LINE> i = 1 <NEW_LINE> while i <= max_row: <NEW_LINE> <INDENT> rd = self.ws.row_dimensions[i] <NEW_LINE> rd.height = 16 <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> for cellObj in self.ws[f'A1:{chr(64 + self.ws.max_column)}{self.ws.max_row}']: <NEW_LINE> <INDENT> for cell in cellObj: <NEW_LINE> <INDENT> self.ws[cell.coordinate].border = self.border <NEW_LINE> self.ws[cell.coordinate].alignment = self.align_center <NEW_LINE> <DEDENT> <DEDENT> for cellObj in self.ws[f'A1:{chr(64 + self.ws.max_column)}{self.ws.max_row}']: <NEW_LINE> <INDENT> for cell in cellObj: <NEW_LINE> <INDENT> self.ws[cell.coordinate].alignment = self.align_left <NEW_LINE> <DEDENT> <DEDENT> self.wb.save(self.file)
Класс, выгружающий штатное расписание в файл Excel для заполнения отклонений
62598f9f57b8e32f52508020
class WeakOrderedSet(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._items = {} <NEW_LINE> self._order = [] <NEW_LINE> <DEDENT> def add(self, item): <NEW_LINE> <INDENT> oid = id(item) <NEW_LINE> if oid in self._items: <NEW_LINE> <INDENT> self._order.remove(oid) <NEW_LINE> self._order.append(oid) <NEW_LINE> return <NEW_LINE> <DEDENT> ref = weakref.ref(item, lambda x: self.remove(item)) <NEW_LINE> self._items[oid] = ref <NEW_LINE> self._order.append(oid) <NEW_LINE> <DEDENT> def remove(self, item): <NEW_LINE> <INDENT> oid = id(item) <NEW_LINE> if oid in self._items: <NEW_LINE> <INDENT> del self._items[oid] <NEW_LINE> self._order.remove(oid) <NEW_LINE> <DEDENT> <DEDENT> def empty(self): <NEW_LINE> <INDENT> self._items = {} <NEW_LINE> self._order = [] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._order) <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> oid = id(item) <NEW_LINE> return oid in self._items <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return (self._items[oid]() for oid in self._order) <NEW_LINE> <DEDENT> @property <NEW_LINE> def last(self): <NEW_LINE> <INDENT> if self._order: <NEW_LINE> <INDENT> oid = self._order[-1] <NEW_LINE> return self._items[oid]()
Maintain a set of items. Each item is stored as a weakref to avoid extending their lifetime. The values may be iterated over or the last item added may be accessed via the ``last`` property. If items are added more than once, the most recent addition will be remembered in the order: order = WeakOrderedSet() order.add('1') order.add('2') order.add('1') list(order) == ['2', '1'] order.last == '1'
62598f9f07f4c71912baf253
class AsyncContextManager: <NEW_LINE> <INDENT> async def __aenter__(self): <NEW_LINE> <INDENT> print('__aenter__') <NEW_LINE> await gen_cor() <NEW_LINE> <DEDENT> async def __aexit__(self, exec_type, exec_val, traceback): <NEW_LINE> <INDENT> print('__aexit__') <NEW_LINE> await Future('FUTURE')
a new protocol for asynchronous context managers __aenter__ and __aexit__ both must return an awaitable .
62598f9f442bda511e95c263
class ClassDetailsGoogleClassroom(object): <NEW_LINE> <INDENT> openapi_types = { 'name': 'str', 'alternate_link': 'str', 'section': 'str', 'id': 'str' } <NEW_LINE> attribute_map = { 'name': 'name', 'alternate_link': 'alternateLink', 'section': 'section', 'id': 'id' } <NEW_LINE> def __init__(self, name=None, alternate_link=None, section=None, id=None): <NEW_LINE> <INDENT> self._name = None <NEW_LINE> self._alternate_link = None <NEW_LINE> self._section = None <NEW_LINE> self._id = None <NEW_LINE> self.discriminator = None <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if alternate_link is not None: <NEW_LINE> <INDENT> self.alternate_link = alternate_link <NEW_LINE> <DEDENT> if section is not None: <NEW_LINE> <INDENT> self.section = section <NEW_LINE> <DEDENT> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def alternate_link(self): <NEW_LINE> <INDENT> return self._alternate_link <NEW_LINE> <DEDENT> @alternate_link.setter <NEW_LINE> def alternate_link(self, alternate_link): <NEW_LINE> <INDENT> self._alternate_link = alternate_link <NEW_LINE> <DEDENT> @property <NEW_LINE> def section(self): <NEW_LINE> <INDENT> return self._section <NEW_LINE> <DEDENT> @section.setter <NEW_LINE> def section(self, section): <NEW_LINE> <INDENT> self._section = section <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ClassDetailsGoogleClassroom): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598f9f379a373c97d98e1e
class Trust(Enum): <NEW_LINE> <INDENT> NOTHING = auto() <NEW_LINE> INDEX = auto() <NEW_LINE> DISK = auto()
Which side do we trust in a sync? The disk, or the index? 'None' means don't do anything when there's an unknown dataset (they'll have to be indexed elsewhere)
62598f9f2ae34c7f260aaee9
class Section_6( Section ): <NEW_LINE> <INDENT> variables= [ ('B', 'Width of top & bottom flanges' ), ('D', 'Height of section' ), ('S', 'Thickness of top & bottom flanges' ), ('T', 'Thickness of vertical web' ), ] <NEW_LINE> solver= beamsect.Beam_6()
I-Section Built-Up Beam
62598f9f24f1403a926857b6
class C_A(object): <NEW_LINE> <INDENT> def __init__(self, years=[2020]): <NEW_LINE> <INDENT> self.years = years <NEW_LINE> self.df = pd.DataFrame() <NEW_LINE> <DEDENT> def extract(self): <NEW_LINE> <INDENT> init_df = pd.DataFrame({'pypeds_init': [True]}) <NEW_LINE> for year in self.years: <NEW_LINE> <INDENT> year = int(year) <NEW_LINE> year_info = get_ca(year) <NEW_LINE> year_fpath = zip_parser(url=year_info['url'], survey=year_info['survey']) <NEW_LINE> tmp_df = read_survey(year_fpath) <NEW_LINE> tmp_df.columns = tmp_df.columns.str.lower() <NEW_LINE> tmp_df.columns = tmp_df.columns.str.strip() <NEW_LINE> tmp_df['survey_year'] = int(year) <NEW_LINE> tmp_df['fall_year'] = int(year) -1 <NEW_LINE> init_df = init_df.append(tmp_df, ignore_index=True, sort=False) <NEW_LINE> <DEDENT> pd.options.mode.chained_assignment = None <NEW_LINE> init_df = init_df.loc[init_df.pypeds_init != True,] <NEW_LINE> init_df.drop(columns=['pypeds_init'], inplace=True) <NEW_LINE> self.df = self.df.append(init_df, ignore_index=True) <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> return (self.df) <NEW_LINE> <DEDENT> def transform(self, cip_label=True, award_level=True, first_major=True, grand_total=False, level_keep=None, cols=None): <NEW_LINE> <INDENT> tmpdf = self.df <NEW_LINE> if cip_label: <NEW_LINE> <INDENT> cips = datasets.cipcodes() <NEW_LINE> tmp = tmpdf <NEW_LINE> tmp = pd.merge(left=tmp, right=cips, on="cipcode", how="left") <NEW_LINE> tmpdf = tmp <NEW_LINE> <DEDENT> if award_level: <NEW_LINE> <INDENT> al = datasets.award_levels() <NEW_LINE> tmp = tmpdf <NEW_LINE> tmp = pd.merge(left=tmp, right=al, on="awlevel", how="left") <NEW_LINE> tmpdf = tmp <NEW_LINE> <DEDENT> if first_major: <NEW_LINE> <INDENT> tmp = tmpdf <NEW_LINE> tmp = tmp.loc[tmp.majornum == 1, ] <NEW_LINE> tmpdf = tmp <NEW_LINE> <DEDENT> if level_keep is not None: <NEW_LINE> <INDENT> assert isinstance(level_keep, list), 'level_keep must be a list' <NEW_LINE> if len(level_keep) > 0: <NEW_LINE> <INDENT> tmp = tmpdf <NEW_LINE> tmp = tmp.loc[tmp.awlevel.isin(level_keep), ] <NEW_LINE> tmpdf = tmp <NEW_LINE> <DEDENT> <DEDENT> if cols is not None: <NEW_LINE> <INDENT> assert isinstance(cols, list), 'cols must be a list' <NEW_LINE> if len(cols) > 0: <NEW_LINE> <INDENT> tmp = tmpdf <NEW_LINE> tmp_f = tmp >> select(cols) <NEW_LINE> tmpdf = tmp_f <NEW_LINE> <DEDENT> <DEDENT> self.df = tmpdf
Awards/degrees conferred by program (6-digit CIP code), award level, race/ethnicity, and gender
62598f9f627d3e7fe0e06cb3
class ShippingLineBuilder(SpecialOrderLineBuilder): <NEW_LINE> <INDENT> _model_name = None <NEW_LINE> def __init__(self, environment): <NEW_LINE> <INDENT> super(ShippingLineBuilder, self).__init__(environment) <NEW_LINE> self.product_ref = ('connector_ecommerce', 'product_product_shipping') <NEW_LINE> self.sequence = 999
Return values for a Shipping line
62598f9f66656f66f7d5a1f9
class XLSRenderer(gridlib.PyGridCellRenderer): <NEW_LINE> <INDENT> def __init__(self, cell): <NEW_LINE> <INDENT> gridlib.PyGridCellRenderer.__init__(self) <NEW_LINE> self.cell = cell <NEW_LINE> <DEDENT> def Draw(self, grid, attr, dc, rect, row, col, isSelected): <NEW_LINE> <INDENT> dc.SetBackgroundMode(wx.SOLID) <NEW_LINE> cell = self.cell <NEW_LINE> cell.background.Draw(dc, rect) <NEW_LINE> if cell.borders: <NEW_LINE> <INDENT> cell.borders.Draw(dc, rect) <NEW_LINE> <DEDENT> cell.text.Draw(dc, rect) <NEW_LINE> if cell.comment: <NEW_LINE> <INDENT> cell.comment.Draw(dc, rect) <NEW_LINE> <DEDENT> if isSelected: <NEW_LINE> <INDENT> gdc = wx.GCDC(dc) <NEW_LINE> sys_colour = wx.SystemSettings.GetColour(wx.SYS_COLOUR_HIGHLIGHT) <NEW_LINE> brush_colour = wx.Colour(sys_colour.Red(), sys_colour.Green(), sys_colour.Blue(), 90) <NEW_LINE> gdc.SetBrush(wx.Brush(brush_colour)) <NEW_LINE> gdc.SetPen(wx.TRANSPARENT_PEN) <NEW_LINE> gdc.DrawRectangleRect(rect)
This class is responsible for actually drawing the cell in the grid.
62598f9f460517430c431f5f
class IEModel: <NEW_LINE> <INDENT> def __init__(self, exec_net, inputs_info, input_key, output_key): <NEW_LINE> <INDENT> self.net = exec_net <NEW_LINE> self.inputs_info = inputs_info <NEW_LINE> self.input_key = input_key <NEW_LINE> self.output_key = output_key <NEW_LINE> self.reqs_ids = [] <NEW_LINE> <DEDENT> def _preprocess(self, img): <NEW_LINE> <INDENT> _, _, h, w = self.get_input_shape() <NEW_LINE> img = np.expand_dims(cv.resize(img, (w, h)).transpose(2, 0, 1), axis=0) <NEW_LINE> return img <NEW_LINE> <DEDENT> def forward(self, img): <NEW_LINE> <INDENT> res = self.net.infer(inputs={self.input_key: self._preprocess(img)}) <NEW_LINE> return list(res.values()) <NEW_LINE> <DEDENT> def forward_async(self, img): <NEW_LINE> <INDENT> id_ = len(self.reqs_ids) <NEW_LINE> self.net.start_async(request_id=id_, inputs={self.input_key: self._preprocess(img)}) <NEW_LINE> self.reqs_ids.append(id_) <NEW_LINE> <DEDENT> def grab_all_async(self): <NEW_LINE> <INDENT> outputs = [] <NEW_LINE> for id_ in self.reqs_ids: <NEW_LINE> <INDENT> self.net.requests[id_].wait(-1) <NEW_LINE> output_list = [self.net.requests[id_].output_blobs[key].buffer for key in self.output_key] <NEW_LINE> outputs.append(output_list) <NEW_LINE> <DEDENT> self.reqs_ids = [] <NEW_LINE> return outputs <NEW_LINE> <DEDENT> def get_input_shape(self): <NEW_LINE> <INDENT> return self.inputs_info[self.input_key].input_data.shape
Class for inference of models in the Inference Engine format
62598f9fb7558d5895463436
class carvergui(Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Module.__init__(self, 'carvergui', CarverGui) <NEW_LINE> self.conf.addArgument({"name": "file", "input": typeId.Node|Argument.Single|Argument.Required, "description": "Node to search data in"}) <NEW_LINE> self.tags = "Search"
Search for header and footer of a selected mime-type in a node and create the corresponding file. You can use this modules for finding deleted data or data in slack space or in an unknown file system.
62598f9f6e29344779b00464
class FakeHost(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create_one_host(attrs=None): <NEW_LINE> <INDENT> attrs = attrs or {} <NEW_LINE> host_info = { "id": 1, "service_id": 1, "host": "host1", "uuid": 'host-id-' + uuid.uuid4().hex, "vcpus": 10, "memory_mb": 100, "local_gb": 100, "vcpus_used": 5, "memory_mb_used": 50, "local_gb_used": 10, "hypervisor_type": "xen", "hypervisor_version": 1, "hypervisor_hostname": "devstack1", "free_ram_mb": 50, "free_disk_gb": 50, "current_workload": 10, "running_vms": 1, "cpu_info": "", "disk_available_least": 1, "host_ip": "10.10.10.10", "supported_instances": "", "metrics": "", "pci_stats": "", "extra_resources": "", "stats": "", "numa_topology": "", "ram_allocation_ratio": 1.0, "cpu_allocation_ratio": 1.0 } <NEW_LINE> host_info.update(attrs) <NEW_LINE> host = fakes.FakeResource( info=copy.deepcopy(host_info), loaded=True) <NEW_LINE> return host
Fake one host.
62598f9f97e22403b383ad14
class Message(object): <NEW_LINE> <INDENT> def __init__(self, id, body, timestamp, attempts): <NEW_LINE> <INDENT> self._async_enabled = False <NEW_LINE> self._has_responded = False <NEW_LINE> self.id = id <NEW_LINE> self.body = body <NEW_LINE> self.timestamp = timestamp <NEW_LINE> self.attempts = attempts <NEW_LINE> <DEDENT> def enable_async(self): <NEW_LINE> <INDENT> self._async_enabled = True <NEW_LINE> <DEDENT> def is_async(self): <NEW_LINE> <INDENT> return self._async_enabled <NEW_LINE> <DEDENT> def has_responded(self): <NEW_LINE> <INDENT> return self._has_responded <NEW_LINE> <DEDENT> def finish(self): <NEW_LINE> <INDENT> assert not self._has_responded <NEW_LINE> self._has_responded = True <NEW_LINE> self.respond(FIN) <NEW_LINE> <DEDENT> def requeue(self, **kwargs): <NEW_LINE> <INDENT> assert not self._has_responded <NEW_LINE> self._has_responded = True <NEW_LINE> self.respond(REQ, **kwargs) <NEW_LINE> <DEDENT> def touch(self): <NEW_LINE> <INDENT> assert not self._has_responded <NEW_LINE> self.respond(TOUCH)
A class representing a message received from ``nsqd``. If you want to perform asynchronous message processing use the :meth:`nsq.Message.enable_async` method, pass the message around, and respond using the appropriate instance method. :param id: the ID of the message :type id: string :param body: the raw message body :type body: string :param timestamp: the timestamp the message was produced :type timestamp: int :param attempts: the number of times this message was attempted :type attempts: int
62598f9f498bea3a75a5792a
class CarProperGenerator: <NEW_LINE> <INDENT> def __init__(self, probability_info): <NEW_LINE> <INDENT> self.__probability_info = probability_info <NEW_LINE> <DEDENT> def generate(self, direction, lane_index): <NEW_LINE> <INDENT> probabilities = self.__probability_info[direction][lane_index] <NEW_LINE> possibilities = [None, Car(direction, TurnDirection.RIGHT), Car(direction, TurnDirection.STRAIGHT), Car(direction, TurnDirection.LEFT)] <NEW_LINE> none_prob = 1. - sum(probabilities) <NEW_LINE> ret = choice(possibilities, 1, p=[none_prob] + probabilities) <NEW_LINE> if direction == 0: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return ret[0]
CarProperGenerator class
62598f9f8a43f66fc4bf1f84
class cached_property(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> <DEDENT> def __get__(self, instance, type=None): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> res = instance.__dict__[self.func.__name__] = self.func(instance) <NEW_LINE> return res
This is a direct copy-paste of Django's cached property from https://github.com/django/django/blob/2456ffa42c33d63b54579eae0f5b9cf2a8cd3714/django/utils/functional.py#L38-50
62598f9f0a50d4780f7051e2
@config_entries.HANDLERS.register(HANGOUTS_DOMAIN) <NEW_LINE> class HangoutsFlowHandler(data_entry_flow.FlowHandler): <NEW_LINE> <INDENT> VERSION = 1 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._credentials = None <NEW_LINE> self._refresh_token = None <NEW_LINE> <DEDENT> async def async_step_user(self, user_input=None): <NEW_LINE> <INDENT> errors = {} <NEW_LINE> if configured_hangouts(self.hass) is not None: <NEW_LINE> <INDENT> return self.async_abort(reason="already_configured") <NEW_LINE> <DEDENT> if user_input is not None: <NEW_LINE> <INDENT> from hangups import get_auth <NEW_LINE> from .hangups_utils import (HangoutsCredentials, HangoutsRefreshToken, GoogleAuthError, Google2FAError) <NEW_LINE> self._credentials = HangoutsCredentials(user_input[CONF_EMAIL], user_input[CONF_PASSWORD]) <NEW_LINE> self._refresh_token = HangoutsRefreshToken(None) <NEW_LINE> try: <NEW_LINE> <INDENT> await self.hass.async_add_executor_job(get_auth, self._credentials, self._refresh_token) <NEW_LINE> return await self.async_step_final() <NEW_LINE> <DEDENT> except GoogleAuthError as err: <NEW_LINE> <INDENT> if isinstance(err, Google2FAError): <NEW_LINE> <INDENT> return await self.async_step_2fa() <NEW_LINE> <DEDENT> msg = str(err) <NEW_LINE> if msg == 'Unknown verification code input': <NEW_LINE> <INDENT> errors['base'] = 'invalid_2fa_method' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> errors['base'] = 'invalid_login' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self.async_show_form( step_id='user', data_schema=vol.Schema({ vol.Required(CONF_EMAIL): str, vol.Required(CONF_PASSWORD): str }), errors=errors ) <NEW_LINE> <DEDENT> async def async_step_2fa(self, user_input=None): <NEW_LINE> <INDENT> errors = {} <NEW_LINE> if user_input is not None: <NEW_LINE> <INDENT> from hangups import get_auth <NEW_LINE> from .hangups_utils import GoogleAuthError <NEW_LINE> self._credentials.set_verification_code(user_input[CONF_2FA]) <NEW_LINE> try: <NEW_LINE> <INDENT> await self.hass.async_add_executor_job(get_auth, self._credentials, self._refresh_token) <NEW_LINE> return await self.async_step_final() <NEW_LINE> <DEDENT> except GoogleAuthError: <NEW_LINE> <INDENT> errors['base'] = 'invalid_2fa' <NEW_LINE> <DEDENT> <DEDENT> return self.async_show_form( step_id=CONF_2FA, data_schema=vol.Schema({ vol.Required(CONF_2FA): str, }), errors=errors ) <NEW_LINE> <DEDENT> async def async_step_final(self): <NEW_LINE> <INDENT> return self.async_create_entry( title=self._credentials.get_email(), data={ CONF_EMAIL: self._credentials.get_email(), CONF_REFRESH_TOKEN: self._refresh_token.get() }) <NEW_LINE> <DEDENT> async def async_step_import(self, _): <NEW_LINE> <INDENT> return await self.async_step_user()
Config flow Google Hangouts.
62598f9fa219f33f346c6622
class BTAudioSink(BTAudio): <NEW_LINE> <INDENT> SIGNAL_CONNECTED = 'Connected' <NEW_LINE> SIGNAL_DISCONNECTED = 'Disconnected' <NEW_LINE> SIGNAL_PLAYING = 'Playing' <NEW_LINE> SIGNAL_STOPPED = 'Stopped' <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> BTGenericDevice.__init__(self, addr='org.bluez.AudioSink', *args, **kwargs) <NEW_LINE> self._register_signal_name(BTAudioSink.SIGNAL_CONNECTED) <NEW_LINE> self._register_signal_name(BTAudioSink.SIGNAL_DISCONNECTED) <NEW_LINE> self._register_signal_name(BTAudioSink.SIGNAL_PLAYING) <NEW_LINE> self._register_signal_name(BTAudioSink.SIGNAL_STOPPED) <NEW_LINE> <DEDENT> def is_connected(self): <NEW_LINE> <INDENT> return self._interface.IsConnected()
Wrapper around dbus to encapsulate the org.bluez.AudioSink interface * **Connected(boolean) [readonly]**: Indicates if a stream is setup to a A2DP sink on the remote device. * **Playing(boolean) [readonly]**: Indicates if a stream is active to a A2DP sink on the remote device. See also: :py:class:`.BTAudio`
62598f9f097d151d1a2c0e31
class PDFComponent(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def close ( self, force = False ): <NEW_LINE> <INDENT> pass
Common base class.
62598f9f0c0af96317c5618a
class LoadCurveSupplyPlot(cea.plots.demand.DemandPlotBase): <NEW_LINE> <INDENT> name = "Load Curve Supply" <NEW_LINE> expected_parameters = { 'buildings': 'plots:buildings', 'scenario-name': 'general:scenario-name', 'timeframe': 'plots:timeframe', } <NEW_LINE> def __init__(self, project, parameters, cache): <NEW_LINE> <INDENT> super(LoadCurveSupplyPlot, self).__init__(project, parameters, cache) <NEW_LINE> self.analysis_fields = ["DH_hs_kWh", "DH_ww_kWh", 'SOLAR_ww_kWh', 'SOLAR_hs_kWh', "DC_cs_kWh", 'DC_cdata_kWh', 'DC_cre_kWh', 'PV_kWh', 'NG_hs_kWh', 'COAL_hs_kWh', 'OIL_hs_kWh', 'WOOD_hs_kWh', 'NG_ww_kWh', 'COAL_ww_kWh', 'OIL_ww_kWh', 'WOOD_ww_kWh', 'GRID_a_kWh', 'GRID_l_kWh', 'GRID_v_kWh', 'GRID_ve_kWh', 'GRID_cs_kWh', 'GRID_aux_kWh', 'GRID_data_kWh', 'GRID_pro_kWh', 'GRID_ww_kWh', 'GRID_hs_kWh', 'GRID_cdata_kWh', 'GRID_cre_kWh'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def layout(self): <NEW_LINE> <INDENT> return dict(barmode='relative', yaxis=dict(title='Final Energy Demand [MW]'), yaxis2=dict(title='Temperature [C]', overlaying='y', side='right')) <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> if set(self.buildings) != set(self.locator.get_zone_building_names()): <NEW_LINE> <INDENT> if len(self.buildings) == 1: <NEW_LINE> <INDENT> return "%s for Building %s (%s)" % (self.name, self.buildings[0], self.timeframe) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "%s for Selected Buildings (%s)" % (self.name, self.timeframe) <NEW_LINE> <DEDENT> <DEDENT> return "%s for District (%s)" % (self.name, self.timeframe) <NEW_LINE> <DEDENT> def calc_graph(self): <NEW_LINE> <INDENT> data = self.calculate_hourly_loads() <NEW_LINE> traces = [] <NEW_LINE> analysis_fields = self.remove_unused_fields(data, self.analysis_fields) <NEW_LINE> for field in analysis_fields: <NEW_LINE> <INDENT> y = data[field].values / 1E3 <NEW_LINE> name = NAMING[field] <NEW_LINE> trace = go.Bar(x=data.index, y=y, name=name, marker=dict(color=COLOR[field])) <NEW_LINE> traces.append(trace) <NEW_LINE> <DEDENT> data_T = self.calculate_external_temperature() <NEW_LINE> for field in ["T_ext_C"]: <NEW_LINE> <INDENT> y = data_T[field].values <NEW_LINE> name = NAMING[field] <NEW_LINE> trace = go.Scattergl(x=data_T.index, y=y, name=name, yaxis='y2', opacity=0.2) <NEW_LINE> traces.append(trace) <NEW_LINE> <DEDENT> return traces
Implement the load-curve-supply plot
62598f9fac7a0e7691f72314
class AbsolutePathError(MalformedRecordError): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"RECORD entry has an absolute path: {self.path!r}"
Raised when an entry in a wheel's :file:`RECORD` has an absolute path
62598f9f6aa9bd52df0d4cd5
class SameWorkers(ResourceConstraint): <NEW_LINE> <INDENT> def __init__( self, select_workers_1, select_workers_2, optional: Optional[bool] = False ): <NEW_LINE> <INDENT> super().__init__(optional) <NEW_LINE> self.select_workers_1 = select_workers_1 <NEW_LINE> self.select_workers_2 = select_workers_2 <NEW_LINE> for res_work_1 in select_workers_1.selection_dict: <NEW_LINE> <INDENT> if res_work_1 in select_workers_2.selection_dict: <NEW_LINE> <INDENT> self.set_z3_assertions( select_workers_1.selection_dict[res_work_1] == select_workers_2.selection_dict[res_work_1] )
Selected workers by both AlternateWorkers are constrained to be the same
62598f9f4527f215b58e9ced
class TestLoginInteractor(InteractorTestBase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.__get_hash = lambda hash_text: "myhashedhash" <NEW_LINE> self.__hash_provider = Mock(HashProvider) <NEW_LINE> self.__hash_provider.hash_text = Mock(side_effect=self.__get_hash) <NEW_LINE> self.__hash_provider.verify_password = Mock(side_effect=self.__verify_password) <NEW_LINE> self.__target = LoginInteractor() <NEW_LINE> self.__persistence = Mock(AbstractPersistence) <NEW_LINE> self.__persistence.get_user = Mock(side_effect=self.__get_user_from_persistence) <NEW_LINE> self.__target.persistence = self.__persistence <NEW_LINE> self.__target.logger = Mock(Logger) <NEW_LINE> self.__target.set_hash_provider(self.__hash_provider) <NEW_LINE> <DEDENT> def __verify_password(self, entered_password, hashed_password): <NEW_LINE> <INDENT> return entered_password == hashed_password <NEW_LINE> <DEDENT> def __get_user_from_persistence(self, user): <NEW_LINE> <INDENT> if user.user_id == "correctpass": <NEW_LINE> <INDENT> return User.from_dict({"userid": "correctpass", "password": "mypassword"}) <NEW_LINE> <DEDENT> if user.user_id == "incorrectpass": <NEW_LINE> <INDENT> return User.from_dict({"userid": "incorrectpass", "password": "wrongpassword"}) <NEW_LINE> <DEDENT> return User() <NEW_LINE> <DEDENT> def test_is_logging_interactor(self): <NEW_LINE> <INDENT> self.assertIsInstance(self.__target, LoggingInteractor) <NEW_LINE> <DEDENT> def test_execute_correct_user_password_logs_in(self): <NEW_LINE> <INDENT> u = self.__get_user("correctpass", "mypassword") <NEW_LINE> self.assertTrue(self.__target.execute(u)) <NEW_LINE> <DEDENT> def test_execute_incorrect_user_password_does_not_log_in(self): <NEW_LINE> <INDENT> u = self.__get_user("incorrectpass", "somethingsilly") <NEW_LINE> self.assertFalse(self.__target.execute(u)) <NEW_LINE> <DEDENT> def test_execute_user_does_not_exist_returns_false(self): <NEW_LINE> <INDENT> u = self.__get_user("unknown", "somepass") <NEW_LINE> self.assertFalse(self.__target.execute(u)) <NEW_LINE> <DEDENT> def test_execute_null_user_gives_type_error(self): <NEW_LINE> <INDENT> self.assertRaises(TypeError, self.__target.execute, None) <NEW_LINE> <DEDENT> def test_execute_empty_userid_gives_value_error(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, self.__target.execute, User()) <NEW_LINE> <DEDENT> def test_execute_empty_password_gives_value_error(self): <NEW_LINE> <INDENT> u = self.__get_user("userid", "") <NEW_LINE> self.assertRaises(ValueError, self.__target.execute, u) <NEW_LINE> <DEDENT> def __get_user(self, user_id, password): <NEW_LINE> <INDENT> return User.from_dict({"userid": user_id, "password": password})
Unit tests for the LoginInteractor class
62598f9f38b623060ffa8e9c
class FSUseRuletype(PolicyEnum): <NEW_LINE> <INDENT> fs_use_xattr = qpol.QPOL_FS_USE_XATTR <NEW_LINE> fs_use_trans = qpol.QPOL_FS_USE_TRANS <NEW_LINE> fs_use_task = qpol.QPOL_FS_USE_TASK
Enumeration of fs_use_* rule types.
62598f9fd7e4931a7ef3bea2
class Kill(command.Command): <NEW_LINE> <INDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> for serv in service.list_services(): <NEW_LINE> <INDENT> service.kill_service(serv['service'])
Kill all the running services.
62598f9fcc0a2c111447ae16
class TestTDescr(tb.IsDescription): <NEW_LINE> <INDENT> x = tb.Int32Col(dflt=0, shape=2, pos=0) <NEW_LINE> y = tb.FloatCol(dflt=1, shape=(2, 2)) <NEW_LINE> z = tb.UInt8Col(dflt=1) <NEW_LINE> z3 = tb.EnumCol({'r': 4, 'g': 2, 'b': 1}, 'r', 'int32', shape=2) <NEW_LINE> color = tb.StringCol(itemsize=4, dflt=b"ab", pos=2) <NEW_LINE> info = Info() <NEW_LINE> class Info(tb.IsDescription): <NEW_LINE> <INDENT> _v_pos = 1 <NEW_LINE> name = tb.StringCol(itemsize=2) <NEW_LINE> value = tb.ComplexCol(itemsize=16, pos=0) <NEW_LINE> y2 = tb.FloatCol(pos=1) <NEW_LINE> z2 = tb.UInt8Col() <NEW_LINE> class Info2(tb.IsDescription): <NEW_LINE> <INDENT> y3 = tb.Time64Col(shape=2) <NEW_LINE> name = tb.StringCol(itemsize=2) <NEW_LINE> value = tb.ComplexCol(itemsize=16, shape=2)
A description that has several nested columns.
62598f9f656771135c48948d
class RawAlterTableVisitor(object): <NEW_LINE> <INDENT> def _to_table(self, param): <NEW_LINE> <INDENT> if isinstance(param, (sa.Column, sa.Index, sa.schema.Constraint)): <NEW_LINE> <INDENT> ret = param.table <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = param <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def _to_table_name(self, param): <NEW_LINE> <INDENT> ret = self._to_table(param) <NEW_LINE> if isinstance(ret, sa.Table): <NEW_LINE> <INDENT> ret = ret.fullname <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def _do_quote_table_identifier(self, identifier): <NEW_LINE> <INDENT> return '"%s"'%identifier <NEW_LINE> <DEDENT> def start_alter_table(self, param): <NEW_LINE> <INDENT> table = self._to_table(param) <NEW_LINE> table_name = self._to_table_name(table) <NEW_LINE> self.append('\nALTER TABLE %s ' % self._do_quote_table_identifier(table_name)) <NEW_LINE> return table <NEW_LINE> <DEDENT> def _pk_constraint(self, table, column, status): <NEW_LINE> <INDENT> if isinstance(column, basestring): <NEW_LINE> <INDENT> column = getattr(table.c, name) <NEW_LINE> <DEDENT> ret = constraint.PrimaryKeyConstraint(*table.primary_key) <NEW_LINE> if status: <NEW_LINE> <INDENT> ret.c.append(column) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> names = [c.name for c in cons.c] <NEW_LINE> index = names.index(col.name) <NEW_LINE> del ret.c[index] <NEW_LINE> <DEDENT> if isinstance(pk, basestring): <NEW_LINE> <INDENT> ret.name = pk <NEW_LINE> <DEDENT> return ret
Common operations for ``ALTER TABLE`` statements.
62598f9f16aa5153ce400309
class Solution: <NEW_LINE> <INDENT> def reverse(self, head): <NEW_LINE> <INDENT> prev = None <NEW_LINE> while head != None: <NEW_LINE> <INDENT> tmp = head.next <NEW_LINE> head.next = prev <NEW_LINE> prev = head <NEW_LINE> head = tmp <NEW_LINE> <DEDENT> return prev
@param head: The first node of the linked list. @return: You should return the head of the reversed linked list. Reverse it in-place.
62598f9fd53ae8145f918298
class BusinessAccount(Account): <NEW_LINE> <INDENT> def __init__(self, acct_num: str, open_deposit: float): <NEW_LINE> <INDENT> super().__init__(acct_num, open_deposit) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'Business Account #{self.acct_num}\n\tBalance: {super().__str__()}'
Business Account as a child class to Account
62598f9f7cff6e4e811b582d
class IPv6Address(_BaseV6, _BaseAddress): <NEW_LINE> <INDENT> __slots__ = ('_ip', '__weakref__') <NEW_LINE> def __init__(self, address): <NEW_LINE> <INDENT> if isinstance(address, _compat_int_types): <NEW_LINE> <INDENT> self._check_int_address(address) <NEW_LINE> self._ip = address <NEW_LINE> return <NEW_LINE> <DEDENT> if isinstance(address, bytes): <NEW_LINE> <INDENT> self._check_packed_address(address, 16) <NEW_LINE> bvs = _compat_bytes_to_byte_vals(address) <NEW_LINE> self._ip = _compat_int_from_byte_vals(bvs, 'big') <NEW_LINE> return <NEW_LINE> <DEDENT> addr_str = _compat_str(address) <NEW_LINE> if '/' in addr_str: <NEW_LINE> <INDENT> raise AddressValueError("Unexpected '/' in %r" % address) <NEW_LINE> <DEDENT> self._ip = self._ip_int_from_string(addr_str) <NEW_LINE> <DEDENT> @property <NEW_LINE> def packed(self): <NEW_LINE> <INDENT> return v6_int_to_packed(self._ip) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_multicast(self): <NEW_LINE> <INDENT> return self in self._constants._multicast_network <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_reserved(self): <NEW_LINE> <INDENT> return any(self in x for x in self._constants._reserved_networks) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_link_local(self): <NEW_LINE> <INDENT> return self in self._constants._linklocal_network <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_site_local(self): <NEW_LINE> <INDENT> return self in self._constants._sitelocal_network <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_private(self): <NEW_LINE> <INDENT> return any(self in net for net in self._constants._private_networks) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_global(self): <NEW_LINE> <INDENT> return not self.is_private <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_unspecified(self): <NEW_LINE> <INDENT> return self._ip == 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_loopback(self): <NEW_LINE> <INDENT> return self._ip == 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def ipv4_mapped(self): <NEW_LINE> <INDENT> if (self._ip >> 32) != 0xFFFF: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return IPv4Address(self._ip & 0xFFFFFFFF) <NEW_LINE> <DEDENT> @property <NEW_LINE> def teredo(self): <NEW_LINE> <INDENT> if (self._ip >> 96) != 0x20010000: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), IPv4Address(~self._ip & 0xFFFFFFFF)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def sixtofour(self): <NEW_LINE> <INDENT> if (self._ip >> 112) != 0x2002: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
Represent and manipulate single IPv6 Addresses.
62598f9fa8ecb03325871017
class SetFrameRangeLoader(api.Loader): <NEW_LINE> <INDENT> families = ["colorbleed.animation", "colorbleed.camera", "colorbleed.pointcache", "colorbleed.vdbcache", "colorbleed.usd"] <NEW_LINE> representations = ["abc", "vdb", "usd"] <NEW_LINE> label = "Set frame range" <NEW_LINE> order = 11 <NEW_LINE> icon = "clock-o" <NEW_LINE> color = "white" <NEW_LINE> def load(self, context, name, namespace, data): <NEW_LINE> <INDENT> import hou <NEW_LINE> version = context['version'] <NEW_LINE> version_data = version.get("data", {}) <NEW_LINE> start = version_data.get("startFrame", None) <NEW_LINE> end = version_data.get("endFrame", None) <NEW_LINE> if start is None or end is None: <NEW_LINE> <INDENT> print("Skipping setting frame range because start or " "end frame data is missing..") <NEW_LINE> return <NEW_LINE> <DEDENT> hou.playbar.setFrameRange(start, end) <NEW_LINE> hou.playbar.setPlaybackRange(start, end)
Set Maya frame range
62598f9f2ae34c7f260aaeea
class CmdQuell(COMMAND_DEFAULT_CLASS): <NEW_LINE> <INDENT> key = "@quell" <NEW_LINE> aliases = ["@unquell"] <NEW_LINE> locks = "cmd:pperm(Player)" <NEW_LINE> help_category = "General" <NEW_LINE> account_caller = True <NEW_LINE> def _recache_locks(self, account): <NEW_LINE> <INDENT> if self.session: <NEW_LINE> <INDENT> char = self.session.puppet <NEW_LINE> if char: <NEW_LINE> <INDENT> char.locks.reset() <NEW_LINE> <DEDENT> <DEDENT> account.locks.reset() <NEW_LINE> <DEDENT> def func(self): <NEW_LINE> <INDENT> account = self.account <NEW_LINE> permstr = account.is_superuser and " (superuser)" or "(%s)" % (", ".join(account.permissions.all())) <NEW_LINE> if self.cmdstring in ('unquell', '@unquell'): <NEW_LINE> <INDENT> if not account.attributes.get('_quell'): <NEW_LINE> <INDENT> self.msg("Already using normal Account permissions %s." % permstr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> account.attributes.remove('_quell') <NEW_LINE> self.msg("Account permissions %s restored." % permstr) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if account.attributes.get('_quell'): <NEW_LINE> <INDENT> self.msg("Already quelling Account %s permissions." % permstr) <NEW_LINE> return <NEW_LINE> <DEDENT> account.attributes.add('_quell', True) <NEW_LINE> puppet = self.session.puppet <NEW_LINE> if puppet: <NEW_LINE> <INDENT> cpermstr = "(%s)" % ", ".join(puppet.permissions.all()) <NEW_LINE> cpermstr = "Quelling to current puppet's permissions %s." % cpermstr <NEW_LINE> cpermstr += "\n(Note: If this is higher than Account permissions %s," " the lowest of the two will be used.)" % permstr <NEW_LINE> cpermstr += "\nUse @unquell to return to normal permission usage." <NEW_LINE> self.msg(cpermstr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.msg("Quelling Account permissions%s. Use @unquell to get them back." % permstr) <NEW_LINE> <DEDENT> <DEDENT> self._recache_locks(account)
use character's permissions instead of account's Usage: quell unquell Normally the permission level of the Account is used when puppeting a Character/Object to determine access. This command will switch the lock system to make use of the puppeted Object's permissions instead. This is useful mainly for testing. Hierarchical permission quelling only work downwards, thus an Account cannot use a higher-permission Character to escalate their permission level. Use the unquell command to revert back to normal operation.
62598f9f63b5f9789fe84f7f
class CatalogSource(TypedDict): <NEW_LINE> <INDENT> apiVersion: str <NEW_LINE> kind: str <NEW_LINE> metadata: ObjectMeta <NEW_LINE> spec: CatalogSourceSpec
Notes ----- https://docs.openshift.com/container-platform/latest/rest_api/operatorhub_apis/catalogsource-operators-coreos-com-v1alpha1.html#catalogsource-operators-coreos-com-v1alpha1
62598f9f3c8af77a43b67e44
class ControllerRedirectionError(ChulaException): <NEW_LINE> <INDENT> def msg(self): <NEW_LINE> <INDENT> return "Unable to redirect as requested"
Exception indicating that the controller was unable to perform the requested redirect.
62598f9fa79ad16197769e6e
class SparkJobAvailableForm(forms.Form): <NEW_LINE> <INDENT> identifier = forms.CharField(required=True)
A form used in the views that checks for the availability of identifiers.
62598f9f92d797404e388a6b
class Config(object): <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> SECRET = os.getenv('SECRET') <NEW_LINE> SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL') <NEW_LINE> SQLALCHEMY_TRACK_MODIFICATIONS = False <NEW_LINE> PROPAGATE_ERRORS = True <NEW_LINE> PROPAGATE_EXCEPTIONS = True <NEW_LINE> JWT_ACCESS_TOKEN_EXPIRES = timedelta(hours=48) <NEW_LINE> JWT_SECRET_KEY = os.getenv('JWT_SECRET_KEY') <NEW_LINE> JWT_BLACKLIST_ENABLED = True <NEW_LINE> JWT_BLACKLIST_TOKEN_CHECKS = ['access'] <NEW_LINE> MAIL_USE_TLS = True <NEW_LINE> MAIL_DEBUG = False <NEW_LINE> MAIL_PORT = os.getenv('MAIL_PORT') <NEW_LINE> MAIL_SERVER = os.getenv('MAIL_SERVER') <NEW_LINE> MAIL_USERNAME = os.getenv('MAIL_USERNAME') <NEW_LINE> MAIL_PASSWORD = os.getenv('MAIL_PASSWORD')
Base configuration
62598f9fd6c5a102081e1f4f
class Target(abc.ABC): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def request(self): <NEW_LINE> <INDENT> print('普通请求')
这是客户期待的接口,目标可以是具体的或抽象的类,也可以是接口
62598f9ff7d966606f747df2
class Entry(models.Model): <NEW_LINE> <INDENT> topic = models.ForeignKey(Topic) <NEW_LINE> text = models.TextField() <NEW_LINE> date_added = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> verbose_name_plural = 'entries' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.text[0:50] + "..."
docstring for Entry
62598f9f097d151d1a2c0e32
class OutOfContextCWSearchView(BaseOutOfContextView): <NEW_LINE> <INDENT> __select__ = EntityView.__select__ & is_instance("CWSearch") <NEW_LINE> def entity_description(self, entity): <NEW_LINE> <INDENT> desc = {} <NEW_LINE> desc["Tile"] = entity.title <NEW_LINE> desc["RQL"] = entity.path <NEW_LINE> desc["Expiration data"] = entity.expiration_date <NEW_LINE> desc["Type"] = entity.rset_type <NEW_LINE> return desc
CWSearch secondary rendering.
62598f9f7b25080760ed72b2
class Action(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.priority = 100 <NEW_LINE> from .messagetoaction import treatedAction <NEW_LINE> self.actionNumber = treatedAction <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> selfPriority = (self.priority, self.actionNumber) <NEW_LINE> otherPriority = (other.priority, other.actionNumber) <NEW_LINE> return selfPriority < otherPriority <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s action no %d" % (self.__class__.__name__, self.actionNumber)
General class, defining priority of the actions
62598f9ff548e778e596b3b7
class Interruption(Event): <NEW_LINE> <INDENT> def __init__(self, process, cause): <NEW_LINE> <INDENT> self.env = process.env <NEW_LINE> self.callbacks = [self._interrupt] <NEW_LINE> self._value = Interrupt(cause) <NEW_LINE> self._ok = False <NEW_LINE> self._defused = True <NEW_LINE> if process._value is not PENDING: <NEW_LINE> <INDENT> raise RuntimeError('%s has terminated and cannot be interrupted.' % process) <NEW_LINE> <DEDENT> if process is self.env.active_process: <NEW_LINE> <INDENT> raise RuntimeError('A process is not allowed to interrupt itself.') <NEW_LINE> <DEDENT> self.process = process <NEW_LINE> self.env.schedule(self, URGENT) <NEW_LINE> <DEDENT> def _interrupt(self, event): <NEW_LINE> <INDENT> if self.process._value is not PENDING: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.process._target.callbacks.remove(self.process._resume) <NEW_LINE> self.process._resume(self)
Immediately schedules an :class:`Interrupt` exception with the given *cause* to be thrown into *process*. This event is automatically triggered when it is created.
62598f9f2ae34c7f260aaeeb
class IPSubnet(object): <NEW_LINE> <INDENT> swagger_types = { 'ip_addresses': 'list[str]', 'prefix_length': 'int' } <NEW_LINE> attribute_map = { 'ip_addresses': 'ip_addresses', 'prefix_length': 'prefix_length' } <NEW_LINE> def __init__(self, ip_addresses=None, prefix_length=None): <NEW_LINE> <INDENT> self._ip_addresses = None <NEW_LINE> self._prefix_length = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.ip_addresses = ip_addresses <NEW_LINE> self.prefix_length = prefix_length <NEW_LINE> <DEDENT> @property <NEW_LINE> def ip_addresses(self): <NEW_LINE> <INDENT> return self._ip_addresses <NEW_LINE> <DEDENT> @ip_addresses.setter <NEW_LINE> def ip_addresses(self, ip_addresses): <NEW_LINE> <INDENT> if ip_addresses is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `ip_addresses`, must not be `None`") <NEW_LINE> <DEDENT> self._ip_addresses = ip_addresses <NEW_LINE> <DEDENT> @property <NEW_LINE> def prefix_length(self): <NEW_LINE> <INDENT> return self._prefix_length <NEW_LINE> <DEDENT> @prefix_length.setter <NEW_LINE> def prefix_length(self, prefix_length): <NEW_LINE> <INDENT> if prefix_length is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `prefix_length`, must not be `None`") <NEW_LINE> <DEDENT> self._prefix_length = prefix_length <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(IPSubnet, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, IPSubnet): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598f9f627d3e7fe0e06cb5
class InformationValueNotEqual(InformationUnit): <NEW_LINE> <INDENT> def set_information(self): <NEW_LINE> <INDENT> subjects = self.information_obj.get_topic_subject() <NEW_LINE> self.information = "Value1 " + subjects[0] <NEW_LINE> self.information += " must not be equal to value2 " <NEW_LINE> self.information += subjects[1] <NEW_LINE> self.check_status_and_add_information(self.information_obj.get_status())
Small InformationUnit class which contains information in human language.
62598f9fa219f33f346c6624
class _BlastDb(object): <NEW_LINE> <INDENT> def set_peek(self, dataset, is_multi_byte=False): <NEW_LINE> <INDENT> if not dataset.dataset.purged: <NEW_LINE> <INDENT> dataset.peek = "BLAST database (multiple files)" <NEW_LINE> dataset.blurb = "BLAST database (multiple files)" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dataset.peek = 'file does not exist' <NEW_LINE> dataset.blurb = 'file purged from disk' <NEW_LINE> <DEDENT> <DEDENT> def display_peek(self, dataset): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return dataset.peek <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return "BLAST database (multiple files)" <NEW_LINE> <DEDENT> <DEDENT> def display_data(self, trans, data, preview=False, filename=None, to_ext=None, size=None, offset=None, **kwd): <NEW_LINE> <INDENT> if filename is not None and filename != "index": <NEW_LINE> <INDENT> return Data.display_data(self, trans, data, preview, filename, to_ext, size, offset, **kwd) <NEW_LINE> <DEDENT> if self.file_ext == "blastdbn": <NEW_LINE> <INDENT> title = "This is a nucleotide BLAST database" <NEW_LINE> <DEDENT> elif self.file_ext == "blastdbp": <NEW_LINE> <INDENT> title = "This is a protein BLAST database" <NEW_LINE> <DEDENT> elif self.file_ext == "blastdbd": <NEW_LINE> <INDENT> title = "This is a domain BLAST database" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> title = "This is a BLAST database." <NEW_LINE> <DEDENT> msg = "" <NEW_LINE> try: <NEW_LINE> <INDENT> handle = open(data.file_name, "rU") <NEW_LINE> msg = handle.read().strip() <NEW_LINE> handle.close() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if not msg: <NEW_LINE> <INDENT> msg = title <NEW_LINE> <DEDENT> return "<html><head><title>%s</title></head><body><pre>%s</pre></body></html>" % (title, msg) <NEW_LINE> <DEDENT> def merge(split_files, output_file): <NEW_LINE> <INDENT> raise NotImplementedError("Merging BLAST databases is non-trivial (do this via makeblastdb?)") <NEW_LINE> <DEDENT> def split(cls, input_datasets, subdir_generator_function, split_params): <NEW_LINE> <INDENT> if split_params is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> raise NotImplementedError("Can't split BLAST databases")
Base class for BLAST database datatype.
62598f9f32920d7e50bc5e60
class TestCompareXLSXFiles(ExcelComparisonTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.maxDiff = None <NEW_LINE> filename = 'optimize06.xlsx' <NEW_LINE> test_dir = 'xlsxwriter/test/comparison/' <NEW_LINE> self.got_filename = test_dir + '_test_' + filename <NEW_LINE> self.exp_filename = test_dir + 'xlsx_files/' + filename <NEW_LINE> self.ignore_files = [] <NEW_LINE> self.ignore_elements = {} <NEW_LINE> <DEDENT> def test_create_file(self): <NEW_LINE> <INDENT> workbook = Workbook(self.got_filename, {'constant_memory': True, 'in_memory': False}) <NEW_LINE> worksheet = workbook.add_worksheet() <NEW_LINE> ordinals = list(range(0, 34)) <NEW_LINE> ordinals.extend(range(35, 128)) <NEW_LINE> for i in ordinals: <NEW_LINE> <INDENT> worksheet.write_string(i, 0, chr(i)) <NEW_LINE> <DEDENT> workbook.close() <NEW_LINE> self.assertExcelEqual()
Test file created by XlsxWriter against a file created by Excel.
62598f9f9c8ee82313040073
class Command(object): <NEW_LINE> <INDENT> def __init__(self, command_type: CommandTypeABC, execute_fn: Optional[Callable[[Glif], Items]] = None, apply_fn: Optional[Callable[[Glif, Items], Items]] = None, itemsFromArgs: Optional[Items] = None): <NEW_LINE> <INDENT> self.command_type = command_type <NEW_LINE> self.execute_fn = execute_fn <NEW_LINE> self.apply_fn = apply_fn <NEW_LINE> self.itemsFromArgs = itemsFromArgs <NEW_LINE> <DEDENT> def execute(self, glif: Glif) -> Items: <NEW_LINE> <INDENT> if self.itemsFromArgs: <NEW_LINE> <INDENT> return self._internal_apply(glif, self.itemsFromArgs) <NEW_LINE> <DEDENT> elif self.execute_fn: <NEW_LINE> <INDENT> return self.execute_fn(glif) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Items([]).with_errors([f'No input was provided for command {self.command_type.get_main_name()}']) <NEW_LINE> <DEDENT> <DEDENT> def apply(self, glif: Glif, items: Items) -> Items: <NEW_LINE> <INDENT> if self.itemsFromArgs and self.itemsFromArgs.items: <NEW_LINE> <INDENT> return self.itemsFromArgs.with_errors([f'No input was expected for command {self.command_type.get_main_name()}']) <NEW_LINE> <DEDENT> return self._internal_apply(glif, items) <NEW_LINE> <DEDENT> def _internal_apply(self, glif: Glif, items: Items) -> Items: <NEW_LINE> <INDENT> if not self.apply_fn: <NEW_LINE> <INDENT> return items.with_errors([f'No input was expected for the command {self.command_type.get_main_name()}']) <NEW_LINE> <DEDENT> return self.apply_fn(glif, items)
A command that may be executed or applied to items.
62598f9f85dfad0860cbf97a
class ServerDensityWebhook(WebhookBase): <NEW_LINE> <INDENT> def incoming(self, path, query_string, payload): <NEW_LINE> <INDENT> if payload['fixed']: <NEW_LINE> <INDENT> severity = 'ok' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> severity = 'critical' <NEW_LINE> <DEDENT> return Alert( resource=payload['item_name'], event=payload['alert_type'], environment='Production', severity=severity, service=[payload['item_type']], group=payload['alert_section'], value=payload['configured_trigger_value'], text='Alert created for {}:{}'.format(payload['item_type'], payload['item_name']), tags=['cloud'] if payload['item_cloud'] else [], attributes={ 'alertId': payload['alert_id'], 'itemId': payload['item_id'] }, origin='ServerDensity', event_type='serverDensityAlert', raw_data=payload )
Server Density notification webhook See https://support.serverdensity.com/hc/en-us/articles/360001067183-Setting-up-webhooks
62598f9f1b99ca400228f433
class Deck: <NEW_LINE> <INDENT> def __init__(self, cards, seed=None): <NEW_LINE> <INDENT> self.cards = cards <NEW_LINE> self.random = random.Random() <NEW_LINE> if seed is None: <NEW_LINE> <INDENT> seed = time() <NEW_LINE> <DEDENT> self.random.seed(seed) <NEW_LINE> logging.getLogger('history').info("Deck's random seed is: {}".format(seed)) <NEW_LINE> <DEDENT> def count(self): <NEW_LINE> <INDENT> return len(self.cards) <NEW_LINE> <DEDENT> def draw(self, count): <NEW_LINE> <INDENT> drawn = [] <NEW_LINE> while len(drawn) < count: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> drawn.append(self.cards.pop()) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return drawn <NEW_LINE> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> self.random.shuffle(self.cards) <NEW_LINE> <DEDENT> def add(self, cards): <NEW_LINE> <INDENT> self.cards.extend(cards)
A deck holds an ordered set of cards and can pop or shuffle them
62598f9fac7a0e7691f72316
class LFApplier(BaseLFApplier): <NEW_LINE> <INDENT> def apply( self, data_points: Union[DataPoints, np.ndarray], progress_bar: bool = True, fault_tolerant: bool = False, return_meta: bool = False, ) -> Union[np.ndarray, Tuple[np.ndarray, ApplierMetadata]]: <NEW_LINE> <INDENT> labels = [] <NEW_LINE> f_caller = _FunctionCaller(fault_tolerant) <NEW_LINE> for i, x in tqdm(enumerate(data_points), disable=(not progress_bar)): <NEW_LINE> <INDENT> labels.append(apply_lfs_to_data_point(x, i, self._lfs, f_caller)) <NEW_LINE> <DEDENT> L = self._numpy_from_row_data(labels) <NEW_LINE> if return_meta: <NEW_LINE> <INDENT> return L, ApplierMetadata(f_caller.fault_counts) <NEW_LINE> <DEDENT> return L
LF applier for a list of data points (e.g. ``SimpleNamespace``) or a NumPy array. Parameters ---------- lfs LFs that this applier executes on examples Example ------- >>> from snorkel.labeling import labeling_function >>> @labeling_function() ... def is_big_num(x): ... return 1 if x.num > 42 else 0 >>> applier = LFApplier([is_big_num]) >>> from types import SimpleNamespace >>> applier.apply([SimpleNamespace(num=10), SimpleNamespace(num=100)]) array([[0], [1]]) >>> @labeling_function() ... def is_big_num_np(x): ... return 1 if x[0] > 42 else 0 >>> applier = LFApplier([is_big_num_np]) >>> applier.apply(np.array([[10], [100]])) array([[0], [1]])
62598f9f6aa9bd52df0d4cd7
class Deck: <NEW_LINE> <INDENT> def __init__(self, cardFamily): <NEW_LINE> <INDENT> self.__cardFamily = cardFamily <NEW_LINE> self.__deck = [] <NEW_LINE> for s in cardFamily.seeds: <NEW_LINE> <INDENT> for v in cardFamily.values: <NEW_LINE> <INDENT> self.addCard(Card(v, s)) <NEW_LINE> <DEDENT> <DEDENT> self.shuffleDeck() <NEW_LINE> <DEDENT> @property <NEW_LINE> def deck(self): <NEW_LINE> <INDENT> return self.__deck <NEW_LINE> <DEDENT> @property <NEW_LINE> def cardFamily(self): <NEW_LINE> <INDENT> return self.__cardFamily <NEW_LINE> <DEDENT> def addCard(self, card): <NEW_LINE> <INDENT> self.__deck.append(card) <NEW_LINE> <DEDENT> def shuffleDeck(self): <NEW_LINE> <INDENT> for iter in range(0, random.randrange(1, 20)): <NEW_LINE> <INDENT> for pcard1 in range(0, random.randrange(1, 39)): <NEW_LINE> <INDENT> for pcard2 in range(0, random.randrange(1, 39)): <NEW_LINE> <INDENT> self.swapCards(pcard1, pcard2) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def validateCard(self, card) -> bool: <NEW_LINE> <INDENT> if card.seed in self.cardFamily.validSeeds: <NEW_LINE> <INDENT> if card.value in self.cardFamily.validValues: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def printDeck(self): <NEW_LINE> <INDENT> for c in self.deck: <NEW_LINE> <INDENT> print(c.name + " ", end='') <NEW_LINE> <DEDENT> <DEDENT> def swapCards(self, pcard1, pcard2): <NEW_LINE> <INDENT> card1 = self.deck[pcard1] <NEW_LINE> self.__deck[pcard1] = self.deck[pcard2] <NEW_LINE> self.__deck[pcard2] = card1
The deck is an ordered sequence of cards. First card (bottom card) has index 1, last card (top card) has index 40 Attributes ---------- Methods -------
62598f9f4527f215b58e9cef
class Entity(entity.Entity): <NEW_LINE> <INDENT> _domain = None <NEW_LINE> def __init__(self, endpoint, in_clusters, out_clusters, manufacturer, model, application_listener, unique_id, **kwargs): <NEW_LINE> <INDENT> self._device_state_attributes = {} <NEW_LINE> ieee = endpoint.device.ieee <NEW_LINE> ieeetail = ''.join(['%02x' % (o, ) for o in ieee[-4:]]) <NEW_LINE> if manufacturer and model is not None: <NEW_LINE> <INDENT> self.entity_id = "{}.{}_{}_{}_{}{}".format( self._domain, slugify(manufacturer), slugify(model), ieeetail, endpoint.endpoint_id, kwargs.get('entity_suffix', ''), ) <NEW_LINE> self._device_state_attributes['friendly_name'] = "{} {}".format( manufacturer, model, ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.entity_id = "{}.zha_{}_{}{}".format( self._domain, ieeetail, endpoint.endpoint_id, kwargs.get('entity_suffix', ''), ) <NEW_LINE> <DEDENT> self._endpoint = endpoint <NEW_LINE> self._in_clusters = in_clusters <NEW_LINE> self._out_clusters = out_clusters <NEW_LINE> self._state = None <NEW_LINE> self._unique_id = unique_id <NEW_LINE> self._in_listeners = {} <NEW_LINE> self._out_listeners = {} <NEW_LINE> application_listener.register_entity(ieee, self) <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> for cluster_id, cluster in self._in_clusters.items(): <NEW_LINE> <INDENT> cluster.add_listener(self._in_listeners.get(cluster_id, self)) <NEW_LINE> <DEDENT> for cluster_id, cluster in self._out_clusters.items(): <NEW_LINE> <INDENT> cluster.add_listener(self._out_listeners.get(cluster_id, self)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def unique_id(self) -> str: <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> return self._device_state_attributes <NEW_LINE> <DEDENT> def attribute_updated(self, attribute, value): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def zdo_command(self, tsn, command_id, args): <NEW_LINE> <INDENT> pass
A base class for ZHA entities.
62598f9f76e4537e8c3ef3c2
class DelReserve(BrowserView): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> self.portal = api.portal.get() <NEW_LINE> context = self.context <NEW_LINE> request = self.request <NEW_LINE> id = request.form.get('id') <NEW_LINE> if not id: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> sqlStr = "DELETE FROM `reg_course` WHERE id = %s and isReserve = 1" % id <NEW_LINE> sqlInstance = SqlObj() <NEW_LINE> sqlInstance.execSql(sqlStr)
刪除預約
62598f9f0c0af96317c5618d
class ItemDetailHandler(ItemBaseHandler): <NEW_LINE> <INDENT> @authentication_required <NEW_LINE> def get(self, resource_id): <NEW_LINE> <INDENT> pref_model = self.get_model_by_id_or_error(resource_id) <NEW_LINE> result = self.model_to_rest_resource(pref_model, self.cleaned_params.get('verbose')) <NEW_LINE> self.serve_success(result)
Handler for a single Preference
62598f9f21a7993f00c65d8f
class CastAlohomoraSerializer(serializers.Serializer): <NEW_LINE> <INDENT> user = None <NEW_LINE> username = serializers.CharField() <NEW_LINE> new_password = serializers.CharField(default='') <NEW_LINE> def validate_username(self, username): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.user = get_user(username=username) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> raise serializers.ValidationError('Invalid username') <NEW_LINE> <DEDENT> return username <NEW_LINE> <DEDENT> def validate_new_password(self, new_password): <NEW_LINE> <INDENT> if not new_password: <NEW_LINE> <INDENT> allowed_chars = list(string.ascii_uppercase + string.digits) <NEW_LINE> allowed_chars.remove('0') <NEW_LINE> allowed_chars.remove('O') <NEW_LINE> allowed_chars.remove('1') <NEW_LINE> allowed_chars.remove('I') <NEW_LINE> allowed_chars.remove('5') <NEW_LINE> allowed_chars.remove('S') <NEW_LINE> password_length = 8 <NEW_LINE> new_password = ''.join( random.SystemRandom().choice(allowed_chars) for _ in range(password_length) ) <NEW_LINE> <DEDENT> return new_password
Stores the user whose Alohomora access is being hijacked by a maintainer
62598f9f38b623060ffa8e9e
class RDSWaiter: <NEW_LINE> <INDENT> def __init__(self, client, db_instance_id, pg_engine_version, sleep_time=60): <NEW_LINE> <INDENT> self.engine_version = pg_engine_version <NEW_LINE> self.instance_id = db_instance_id <NEW_LINE> self.sleep_time = sleep_time <NEW_LINE> self.client = client <NEW_LINE> self.rds_waiter = self.client.get_waiter("db_instance_available") <NEW_LINE> _operation_method = self.rds_waiter._operation_method <NEW_LINE> def wait_with_status_reporting(**kwargs): <NEW_LINE> <INDENT> print("Polling: {} for availability".format(self.instance_id)) <NEW_LINE> response = _operation_method(**kwargs) <NEW_LINE> print( "Status of: {} is: {}".format( self.instance_id, response["DBInstances"][0]["DBInstanceStatus"] ) ) <NEW_LINE> return response <NEW_LINE> <DEDENT> self.rds_waiter._operation_method = wait_with_status_reporting <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.rds_waiter.wait(DBInstanceIdentifier=self.instance_id) <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> print("Upgrading {} to: {}".format(self.instance_id, self.engine_version)) <NEW_LINE> time.sleep(self.sleep_time) <NEW_LINE> self.rds_waiter.wait(DBInstanceIdentifier=self.instance_id) <NEW_LINE> print( "Successfully upgraded {} to: {}".format( self.instance_id, self.engine_version ) )
Context manager that provides the waiting functionality when modifying/upgrading an RDSInstance >>> from models import rds_client >>> from moto import mock_rds2; mock_rds2().start() >>> from test_data.utils import make_rds_instance >>> make_rds_instance() RDSInstance id: test-rds-id, status: available, engine: postgres, engine_version: 9.3.14 >>> with RDSWaiter(rds_client, "test-rds-id", "9.4.18", sleep_time=0): ... print("Upgrading soon!") Polling: test-rds-id for availability Status of: test-rds-id is: available Upgrading soon! Upgrading test-rds-id to: 9.4.18 Polling: test-rds-id for availability Status of: test-rds-id is: available Successfully upgraded test-rds-id to: 9.4.18
62598f9f56ac1b37e6301ff6
class AutolinkWihtNamePattern(AutolinkPattern): <NEW_LINE> <INDENT> def handleMatch(self, m): <NEW_LINE> <INDENT> el = super(AutolinkWihtNamePattern, self).handleMatch(m) <NEW_LINE> el.text = util.AtomicString(m.group(3)) <NEW_LINE> return el
Return a link Element given an autolink (`<http://example/com|Please click>`).
62598f9f6fb2d068a7693d3a
class ImagesViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Images.objects.all() <NEW_LINE> serializer_class = ImagesSerializer <NEW_LINE> parser_classes = (JSONParser, FormParser, MultiPartParser) <NEW_LINE> filter_backends = [DjangoFilterBackend, ] <NEW_LINE> filter_fields = ['ad'] <NEW_LINE> @action(detail=True, methods=['POST']) <NEW_LINE> def uploadImage(self, request, pk=None): <NEW_LINE> <INDENT> ad = self.get_object() <NEW_LINE> image = ad.image <NEW_LINE> serializer = ImagesSerializer(image, data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response(serializer.data, status=200) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response(serializer.errors, status=400)
Картинки в объявлении для prod.urls
62598f9fa79ad16197769e70
class Shipment(InfoObject): <NEW_LINE> <INDENT> artifact_type = 'label' <NEW_LINE> def __init__(self, xml=None, **kwargs): <NEW_LINE> <INDENT> if xml is not None: <NEW_LINE> <INDENT> self._from_xml(xml) <NEW_LINE> <DEDENT> super(Shipment, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def _from_xml(self, xml): <NEW_LINE> <INDENT> for child in xml.getchildren(): <NEW_LINE> <INDENT> if child.tag == "shipment-id": <NEW_LINE> <INDENT> self.id = child.text <NEW_LINE> <DEDENT> elif child.tag == "shipment-status": <NEW_LINE> <INDENT> self.status = child.text <NEW_LINE> <DEDENT> elif child.tag == "links": <NEW_LINE> <INDENT> self.links = dict((link['rel'], link) for link in map(lambda l: dict(l.attrib), child.findall("link"))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attrname = child.tag.replace("-", "_") <NEW_LINE> setattr(self, attrname, child.text)
Shipment class, is the return value of the CreateShipment service. It contains * tracking pin * return tracking pin * [shipment ]id * [shipment ]status * links is a dict of string --> dict where the keys are the rel attribute of each link (see the CreateShipment docs in the canadapost site. See the CreateShipment docstring for a link) and the keys have an 'href' which is the link, the same 'rel' value and some other attributes, depending on each link
62598f9fd6c5a102081e1f51
class LogicalNetlist( namedtuple( 'LogicalNetlist', 'name property_map top_instance_name top_instance libraries')): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def read_from_capnp(f, interchange, *args, **kwargs): <NEW_LINE> <INDENT> return interchange.read_logical_netlist(f, *args, **kwargs) <NEW_LINE> <DEDENT> def convert_to_capnp(self, interchange, indexed_strings=None): <NEW_LINE> <INDENT> return interchange.output_logical_netlist( name=self.name, libraries=self.libraries, top_instance=self.top_instance, top_instance_name=self.top_instance_name, property_map=self.property_map, indexed_strings=indexed_strings) <NEW_LINE> <DEDENT> def get_master_cell_list(self): <NEW_LINE> <INDENT> master_cell_list = {} <NEW_LINE> for lib in self.libraries.values(): <NEW_LINE> <INDENT> for cell in lib.cells.values(): <NEW_LINE> <INDENT> assert cell.name not in master_cell_list <NEW_LINE> master_cell_list[cell.name] = cell <NEW_LINE> <DEDENT> <DEDENT> return master_cell_list <NEW_LINE> <DEDENT> def yield_leaf_cells(self): <NEW_LINE> <INDENT> master_cell_list = self.get_master_cell_list() <NEW_LINE> for leaf_cell in yield_leaf_cells( master_cell_list, self.top_instance_name, self.top_instance): <NEW_LINE> <INDENT> yield leaf_cell
Object that represents a logical netlist. name (str) - Name of the logical netlist property_map (dict) - Top level properties for the netlist itself. This is seperate from the properties on the top level instance, which can be found in the top_instance field. top_instance_name (str) - Name of top level cell instance top_instance (CellInstance) - Top level cell instance libraries (dict of str to Library) - Cell libraries used in logical netlist. All cells referenced in the top_instance and its children must be found in one of the libraries.
62598f9f097d151d1a2c0e34
class Return(Parser, SingleMix("_x")): <NEW_LINE> <INDENT> def parse(self, string): return self._x, string
Always returns specified value. type: a -> Parser a
62598f9f7b25080760ed72b4
class Subject(object): <NEW_LINE> <INDENT> _sort_key = None <NEW_LINE> _order = 1 <NEW_LINE> def __init__(self, sid='', dset='', atrs=None): <NEW_LINE> <INDENT> self.sid = sid <NEW_LINE> self.dset = dset <NEW_LINE> self.atrs = None <NEW_LINE> self.ddir = '.' <NEW_LINE> self.dfile = '' <NEW_LINE> self.maxlinelen = 0 <NEW_LINE> dir, file = os.path.split(dset) <NEW_LINE> if dir: self.ddir = dir <NEW_LINE> self.dfile = file <NEW_LINE> self.atrs = VO.VarsObject('subject %s' % sid) <NEW_LINE> if atrs != None: self.atrs.merge(atrs) <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> natr = self.atrs.count()-1 <NEW_LINE> print("Subject %s, dset %s, natr = %d" % (self.sid, self.dset, natr)) <NEW_LINE> print(" ddir = %s\n dfile = %s\n" % (self.ddir, self.dfile)) <NEW_LINE> if natr > 0: <NEW_LINE> <INDENT> self.atrs.show(' attributes: ')
a simple subject object holding an ID, dataset name, and an attribute dictionary
62598f9f67a9b606de545dd6
class Gpu: <NEW_LINE> <INDENT> def __init__( self, gpu_type=none, count=none): <NEW_LINE> <INDENT> self.type = gpu_type <NEW_LINE> self.count = count
# Arguments gpu_type: str count: int
62598f9ff7d966606f747df4
class Var(Node): <NEW_LINE> <INDENT> def __init__(self, name: str = None) -> None: <NEW_LINE> <INDENT> super().__init__(Empty) <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def __hash__(self) -> int: <NEW_LINE> <INDENT> return hash(('var', self.name, self.value)) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f'Var({self.name}={self.value})' <NEW_LINE> <DEDENT> def assign(self, value: PrimaryType) -> 'Var': <NEW_LINE> <INDENT> self.value = value <NEW_LINE> return self <NEW_LINE> <DEDENT> def render(self, ctx: Ctx) -> str: <NEW_LINE> <INDENT> assert self.value is not None <NEW_LINE> return cast(str, _update_ctx(ctx, self, str(self.value)))
A variable that can be assigned a value.
62598f9fd7e4931a7ef3bea5
class TextInfo: <NEW_LINE> <INDENT> def __init__( self, text_id: str, has_chunks: bool, local_path: str, chunk_separator: str = "\n", url: str = "", ): <NEW_LINE> <INDENT> self.text_id = text_id <NEW_LINE> self.has_chunks = has_chunks <NEW_LINE> self.local_path = local_path <NEW_LINE> self.chunk_separator = chunk_separator <NEW_LINE> self.url = url <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "text info: id {0}, path {1}".format(self.text_id, self.local_path) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> info = {self.text_id: {}} <NEW_LINE> info[self.text_id]["has_chunks"] = self.has_chunks <NEW_LINE> info[self.text_id]["local_path"] = self.local_path <NEW_LINE> info[self.text_id]["chunk_separator"] = self.chunk_separator <NEW_LINE> info[self.text_id]["url"] = self.url <NEW_LINE> update_text_info_db(info=info) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_text_id(cls, text_id: str): <NEW_LINE> <INDENT> info = get_text_info(text_id) <NEW_LINE> if info is None: <NEW_LINE> <INDENT> raise KeyError( "text with id " + text_id + "not found in text info database" ) <NEW_LINE> <DEDENT> tinfo = TextInfo( text_id=text_id, has_chunks=info["has_chunks"], local_path=info["local_path"], chunk_separator=info["chunk_separator"], url=info["url"], ) <NEW_LINE> return tinfo <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_info(cls, info: dict, text_id: str): <NEW_LINE> <INDENT> tinfo = TextInfo( text_id=text_id, has_chunks=info["has_chunks"], local_path=info["local_path"], chunk_separator=info["chunk_separator"], url=info["url"], ) <NEW_LINE> return tinfo
! Text info database member specified by several parameters.
62598f9fdd821e528d6d8d40
class ClosedH5FileTestCase(TempFileMixin, TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(ClosedH5FileTestCase, self).setUp() <NEW_LINE> self.fnode = filenode.new_node(self.h5file, where='/', name='test') <NEW_LINE> self.h5file.close() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> warnings.filterwarnings('ignore', category=UserWarning) <NEW_LINE> try: <NEW_LINE> <INDENT> self.fnode.close() <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> warnings.filterwarnings('default', category=UserWarning) <NEW_LINE> <DEDENT> self.fnode = None <NEW_LINE> super(ClosedH5FileTestCase, self).tearDown() <NEW_LINE> <DEDENT> def test00_Write(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, self.fnode.write, 'data') <NEW_LINE> <DEDENT> def test01_Attrs(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, getattr, self.fnode, 'attrs')
Tests accessing a file node in a closed PyTables file.
62598f9fdd821e528d6d8d41
class DbProcessingFactory(ProcessingFactory): <NEW_LINE> <INDENT> @classproperty <NEW_LINE> def transition_exception_mapper(cls): <NEW_LINE> <INDENT> return DbTransitionAction <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def before_object(eng, objects, obj): <NEW_LINE> <INDENT> obj.save(status=obj.known_statuses.RUNNING, id_workflow=eng.db_obj.uuid) <NEW_LINE> super(DbProcessingFactory, DbProcessingFactory).before_object( eng, objects, obj ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def after_object(eng, objects, obj): <NEW_LINE> <INDENT> obj.save(status=obj.known_statuses.COMPLETED, id_workflow=eng.db_obj.uuid) <NEW_LINE> super(DbProcessingFactory, DbProcessingFactory).after_object( eng, objects, obj ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def before_processing(eng, objects): <NEW_LINE> <INDENT> eng.save(WorkflowStatus.RUNNING) <NEW_LINE> super(DbProcessingFactory, DbProcessingFactory).before_processing( eng, objects ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def after_processing(eng, objects): <NEW_LINE> <INDENT> if eng.has_completed: <NEW_LINE> <INDENT> eng.save(WorkflowStatus.COMPLETED) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> eng.save(WorkflowStatus.HALTED)
Processing factory for persistence requirements.
62598f9f97e22403b383ad18
class MasterRep(object): <NEW_LINE> <INDENT> def __init__(self, name, gtid_mode, exe_gtid, filename, pos): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.gtid_mode = gtid_mode <NEW_LINE> self.exe_gtid = exe_gtid <NEW_LINE> self.file = filename <NEW_LINE> self.pos = pos
Class: MasterRep Description: Class stub holder for mysql_class.MasterRep class. Methods: __init__ -> Class initialization.
62598f9f99cbb53fe6830cdf
class OnlineSpiderPartnerView(View): <NEW_LINE> <INDENT> answer = None <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.kwargs['promocode'] = request.GET.get('promo') <NEW_LINE> return super().dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_form(self): <NEW_LINE> <INDENT> form_class = OnlinePartnerForm <NEW_LINE> kwargs = dict( initial=self.kwargs, ) <NEW_LINE> if self.request.GET.get('last_name') or self.request.GET.get('last_name') == '': <NEW_LINE> <INDENT> kwargs.update(dict( data=self.request.GET )) <NEW_LINE> form = form_class(**kwargs) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> person = form.save() <NEW_LINE> mlm_agent = Agent.create_pretender(person) <NEW_LINE> send_mail('Новый претендент', 'Персона: %s' % person, 'admin@pravkaatlanta.ru', ['tayursky@gmail.com'], fail_silently=False ) <NEW_LINE> self.answer = 'В ближайшее время, мы вам перезвоним.' <NEW_LINE> <DEDENT> <DEDENT> return form_class(**kwargs) <NEW_LINE> <DEDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> form = self.get_form() <NEW_LINE> context = dict( title='Регистрация в партнерской программе', form=RemoteForm(form).as_dict() if form else None, answer=self.answer ) <NEW_LINE> return JsonResponse(context)
Онлайн регистрация партнера
62598f9f7d43ff2487427308
class ConsentPolicy(backboneelement.BackboneElement): <NEW_LINE> <INDENT> resource_type = "ConsentPolicy" <NEW_LINE> def __init__(self, jsondict=None, strict=True, **kwargs): <NEW_LINE> <INDENT> self.authority = None <NEW_LINE> self.uri = None <NEW_LINE> super(ConsentPolicy, self).__init__(jsondict=jsondict, strict=strict, **kwargs) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(ConsentPolicy, self).elementProperties() <NEW_LINE> js.extend([ ("authority", "authority", str, False, None, False), ("uri", "uri", str, False, None, False), ]) <NEW_LINE> return js
Policies covered by this consent. The references to the policies that are included in this consent scope. Policies may be organizational, but are often defined jurisdictionally, or in law.
62598f9f4a966d76dd5eecee
class DeviceListAnnounceRequest(DeviceRedirectionPDU): <NEW_LINE> <INDENT> def __init__(self, deviceList: List[DeviceAnnounce]): <NEW_LINE> <INDENT> super().__init__(DeviceRedirectionComponent.RDPDR_CTYP_CORE, DeviceRedirectionPacketId.PAKID_CORE_DEVICELIST_ANNOUNCE) <NEW_LINE> self.deviceList = deviceList
https://msdn.microsoft.com/en-us/library/cc241355.aspx
62598f9f442bda511e95c268
class SculptGeometryPanel(bpy.types.Panel): <NEW_LINE> <INDENT> bl_label = "Geometry" <NEW_LINE> bl_idname = "OBJECT_PT_sculpt_geometry" <NEW_LINE> bl_space_type = 'VIEW_3D' <NEW_LINE> bl_region_type = 'TOOLS' <NEW_LINE> bl_category = 'Sculpt' <NEW_LINE> def draw(self, context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> row = layout.row(align=True) <NEW_LINE> row.alignment = 'EXPAND' <NEW_LINE> row.operator("sculpt.geometry_smooth", text="Smooth") <NEW_LINE> row = layout.row(align=True) <NEW_LINE> row.alignment = 'EXPAND' <NEW_LINE> row.operator("sculpt.geometry_laplacian_smooth", text="Laplacian Smooth") <NEW_LINE> row = layout.row(align=True) <NEW_LINE> row.alignment = 'EXPAND' <NEW_LINE> row.operator("sculpt.geometry_decimate", text="Decimate") <NEW_LINE> row = layout.row(align=True) <NEW_LINE> row.alignment = 'EXPAND' <NEW_LINE> row.operator("sculpt.geometry_displace", text="Displace") <NEW_LINE> row = layout.row(align=True) <NEW_LINE> row.alignment = 'EXPAND' <NEW_LINE> row.operator("sculpt.geometry_subdivide_faces", text="Subdivide") <NEW_LINE> row = layout.row(align=True) <NEW_LINE> row.alignment = 'EXPAND' <NEW_LINE> row.operator("sculpt.geometry_subdivide_faces_smooth", text="Subdivide Smooth") <NEW_LINE> row = layout.row(align=True) <NEW_LINE> row.alignment = 'EXPAND' <NEW_LINE> row.operator("sculpt.geometry_beautify_faces", text="Beautify")
UI panel for the various Sculpt->Edit->Sculpt buttons
62598f9f32920d7e50bc5e63
class Menu(models.Model): <NEW_LINE> <INDENT> title = models.CharField(verbose_name='菜单', max_length=32) <NEW_LINE> icon = models.CharField(verbose_name='图标', max_length=32) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title
菜单表 菜单名|菜单图标
62598f9fe1aae11d1e7ce72b
class BM25(ScoringFunction): <NEW_LINE> <INDENT> def __init__(self, k=1, b=.5): <NEW_LINE> <INDENT> self.k = k <NEW_LINE> self.b = b <NEW_LINE> <DEDENT> def score(self, query_vector, index): <NEW_LINE> <INDENT> bm25 = defaultdict(float) <NEW_LINE> for query_term in query_vector: <NEW_LINE> <INDENT> for posting in index.index[query_term]: <NEW_LINE> <INDENT> score = idf(query_term, index) * ( ((self.k + 1) * posting[1]) / (self.k * ((1 - self.b) + (self.b * (index.doc_lengths[posting[0]]) / index.mean_doc_length) + posting[1]))) <NEW_LINE> bm25[posting[0]] = score <NEW_LINE> <DEDENT> <DEDENT> return bm25 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'BM25 k=%d b=%.2f' % (self.k, self.b)
See lecture notes for definition of BM25. log10(3) * (2*2) / (1(.5 + .5(4/3.333)) + 2) = log10(3) * 4 / 3.1 = .6156... >>> idx = index.Index(['a a b c', 'c d e', 'c e f']) >>> bm = BM25(k=1, b=.5) >>> bm.score({'a': 1.}, idx)[1] # doctest:+ELLIPSIS 0.61564032...
62598f9fd58c6744b42dc1d9
class SystersUserAccountAdapter(DefaultAccountAdapter): <NEW_LINE> <INDENT> def get_login_redirect_url(self, request): <NEW_LINE> <INDENT> return reverse('user', args=[request.user.username]) <NEW_LINE> <DEDENT> def get_signup_redirect_url(self, request): <NEW_LINE> <INDENT> return reverse('user', args=[request.user.username])
Custom account adapter with different than default redirect URLs
62598f9f6fb2d068a7693d3b
class ChildrenListView(ListView): <NEW_LINE> <INDENT> model = Child <NEW_LINE> template_name = 'adminPortal/child_list.html' <NEW_LINE> def this_day(self): <NEW_LINE> <INDENT> return date.today() <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> self.parent = get_object_or_404(User, id=self.kwargs['pid']) <NEW_LINE> queryset = Child.objects.filter(parent=self.parent) <NEW_LINE> return queryset
Data about this Admin's children
62598f9f8e71fb1e983bb8c4
class GetUploadFederationTokenRequest(AbstractModel): <NEW_LINE> <INDENT> pass
GetUploadFederationToken请求参数结构体
62598f9f5f7d997b871f92e6