code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class BaseProd(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self, pub_key: str = '', version_major: int = current_version_major, version_minor: int = current_version_minor, version_patch: int = current_version_patch, name: str = '', signature: str = ''): <NEW_LINE> <INDENT> if not pub_key: <NEW_LINE> <INDENT> self._generate_key_pair() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pub_key = pub_key <NEW_LINE> self.priv_key = '' <NEW_LINE> <DEDENT> self.version_major = version_major <NEW_LINE> self.version_minor = version_minor <NEW_LINE> self.version_patch = version_patch <NEW_LINE> self.name = name <NEW_LINE> self.signature = signature <NEW_LINE> <DEDENT> def _generate_key_pair(self): <NEW_LINE> <INDENT> _pair = generate_key_pair() <NEW_LINE> self.pub_key = _pair['pub_key'] <NEW_LINE> self.priv_key = _pair['priv_key'] <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def is_valid(self) -> bool: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _payload(self) -> str: <NEW_LINE> <INDENT> pass | The Base class for Producer and Product | 62598f9f1f5feb6acb162a38 |
class TableError(Error): <NEW_LINE> <INDENT> pass | On operation on a table failed. | 62598f9f24f1403a926857bd |
class django_salted_md5_test(HandlerCase, _DjangoHelper): <NEW_LINE> <INDENT> handler = hash.django_salted_md5 <NEW_LINE> max_django_version = (1,9) <NEW_LINE> django_has_encoding_glitch = True <NEW_LINE> known_correct_hashes = [ ("password", 'md5$123abcdef$c8272612932975ee80e8a35995708e80'), ("test", 'md5$3OpqnFAHW5CT$54b29300675271049a1ebae07b395e20'), (UPASS_USD, 'md5$c2e86$92105508419a81a6babfaecf876a2fa0'), (UPASS_TABLE, 'md5$d9eb8$01495b32852bffb27cf5d4394fe7a54c'), ] <NEW_LINE> known_unidentified_hashes = [ 'sha1$aa$bb', ] <NEW_LINE> known_malformed_hashes = [ 'md5$aa$bb', ] <NEW_LINE> class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): <NEW_LINE> <INDENT> def random_salt_size(self): <NEW_LINE> <INDENT> handler = self.handler <NEW_LINE> default = handler.default_salt_size <NEW_LINE> assert handler.min_salt_size == 0 <NEW_LINE> lower = 1 <NEW_LINE> upper = handler.max_salt_size or default*4 <NEW_LINE> return self.randintgauss(lower, upper, default, default*.5) | test django_salted_md5 | 62598f9fe5267d203ee6b723 |
class UpdateGroupCall(TLObject): <NEW_LINE> <INDENT> __slots__: List[str] = ["chat_id", "call"] <NEW_LINE> ID = 0xa45eb99b <NEW_LINE> QUALNAME = "types.UpdateGroupCall" <NEW_LINE> def __init__(self, *, chat_id: int, call: "raw.base.GroupCall") -> None: <NEW_LINE> <INDENT> self.chat_id = chat_id <NEW_LINE> self.call = call <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(data: BytesIO, *args: Any) -> "UpdateGroupCall": <NEW_LINE> <INDENT> chat_id = Int.read(data) <NEW_LINE> call = TLObject.read(data) <NEW_LINE> return UpdateGroupCall(chat_id=chat_id, call=call) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> data = BytesIO() <NEW_LINE> data.write(Int(self.ID, False)) <NEW_LINE> data.write(Int(self.chat_id)) <NEW_LINE> data.write(self.call.write()) <NEW_LINE> return data.getvalue() | This object is a constructor of the base type :obj:`~pyrogram.raw.base.Update`.
Details:
- Layer: ``122``
- ID: ``0xa45eb99b``
Parameters:
chat_id: ``int`` ``32-bit``
call: :obj:`GroupCall <pyrogram.raw.base.GroupCall>` | 62598f9f7047854f4633f1fa |
class ResponseBuilder(object): <NEW_LINE> <INDENT> base_response = eval(RAW_RESPONSE) <NEW_LINE> @classmethod <NEW_LINE> def create_response(self, message=None, end_session=False, card_obj=None, reprompt_message=None, is_ssml=None): <NEW_LINE> <INDENT> response = self.base_response <NEW_LINE> if message: <NEW_LINE> <INDENT> response['response'] = self.create_speech(message, is_ssml) <NEW_LINE> <DEDENT> response['response']['shouldEndSession'] = end_session <NEW_LINE> if card_obj: <NEW_LINE> <INDENT> response['response']['card'] = card_obj <NEW_LINE> <DEDENT> if reprompt_message: <NEW_LINE> <INDENT> response['reprompt'] = self.create_speech(reprompt_message, is_ssml) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_speech(cls, message=None, is_ssml=False): <NEW_LINE> <INDENT> data = {} <NEW_LINE> if is_ssml: <NEW_LINE> <INDENT> data['type'] = "SSML" <NEW_LINE> data['ssml'] = message <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data['type'] = "PlainText" <NEW_LINE> data['text'] = message <NEW_LINE> <DEDENT> return {"outputSpeech" : data } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_card(self, title=None, subtitle=None, content=None, card_type="Simple"): <NEW_LINE> <INDENT> card = {"type": card_type} <NEW_LINE> if title: card["title"] = title <NEW_LINE> if subtitle: card["subtitle"] = subtitle <NEW_LINE> if content: card["content"] = content <NEW_LINE> return card | Simple class to help users to build responses | 62598f9f6e29344779b00472 |
class Token: <NEW_LINE> <INDENT> def __init__(self, parent: 'GcdConnector', credentials: Credentials): <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> self.credentials = credentials <NEW_LINE> self._lock = asyncio.Lock() <NEW_LINE> <DEDENT> async def get(self): <NEW_LINE> <INDENT> async with self._lock: <NEW_LINE> <INDENT> if not self.credentials.valid: <NEW_LINE> <INDENT> await self.credentials.refresh(self.parent._session) <NEW_LINE> <DEDENT> return self.credentials.token <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def valid(self): <NEW_LINE> <INDENT> return self.credentials.valid | Wrapper around Credentials to replace aiogcd Token | 62598f9f66656f66f7d5a207 |
class CreateLists: <NEW_LINE> <INDENT> def org_list(file): <NEW_LINE> <INDENT> olist = [] <NEW_LINE> olist.append('') <NEW_LINE> o = open(file) <NEW_LINE> x = csv.reader(o) <NEW_LINE> for org in x: <NEW_LINE> <INDENT> org = str(org) <NEW_LINE> org = org.replace("'", "") <NEW_LINE> org = org.replace("[", "") <NEW_LINE> org = org.replace("]", "") <NEW_LINE> org = org.replace(" ", "_") <NEW_LINE> olist.append(org) <NEW_LINE> <DEDENT> print("This is the list of organisms: " + "\n") <NEW_LINE> print(olist) | Use this class to create various lists need for the related scripts. | 62598f9f2ae34c7f260aaef7 |
class CustomPropertiesType(GeneratedsSuper): <NEW_LINE> <INDENT> subclass = None <NEW_LINE> superclass = None <NEW_LINE> def __init__(self, Property=None): <NEW_LINE> <INDENT> if Property is None: <NEW_LINE> <INDENT> self.Property = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.Property = Property <NEW_LINE> <DEDENT> <DEDENT> def factory(*args_, **kwargs_): <NEW_LINE> <INDENT> if CustomPropertiesType.subclass: <NEW_LINE> <INDENT> return CustomPropertiesType.subclass(*args_, **kwargs_) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return CustomPropertiesType(*args_, **kwargs_) <NEW_LINE> <DEDENT> <DEDENT> factory = staticmethod(factory) <NEW_LINE> def get_Property(self): return self.Property <NEW_LINE> def set_Property(self, Property): self.Property = Property <NEW_LINE> def add_Property(self, value): self.Property.append(value) <NEW_LINE> def insert_Property(self, index, value): self.Property[index] = value <NEW_LINE> def hasContent_(self): <NEW_LINE> <INDENT> if ( self.Property ): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def export(self, lwrite, level, namespace_='cyboxCommon:', name_='CustomPropertiesType', namespacedef_='', pretty_print=True): <NEW_LINE> <INDENT> if pretty_print: <NEW_LINE> <INDENT> eol_ = '\n' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> eol_ = '' <NEW_LINE> <DEDENT> showIndent(lwrite, level, pretty_print) <NEW_LINE> lwrite('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) <NEW_LINE> already_processed = set() <NEW_LINE> self.exportAttributes(lwrite, level, already_processed, namespace_, name_='CustomPropertiesType') <NEW_LINE> if self.hasContent_(): <NEW_LINE> <INDENT> lwrite('>%s' % (eol_, )) <NEW_LINE> self.exportChildren(lwrite, level + 1, namespace_, name_, pretty_print=pretty_print) <NEW_LINE> showIndent(lwrite, level, pretty_print) <NEW_LINE> lwrite('</%s%s>%s' % (namespace_, name_, eol_)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lwrite('/>%s' % (eol_, )) <NEW_LINE> <DEDENT> <DEDENT> def exportAttributes(self, lwrite, level, already_processed, namespace_='cyboxCommon:', name_='CustomPropertiesType'): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def exportChildren(self, lwrite, level, namespace_='cyboxCommon:', name_='CustomPropertiesType', fromsubclass_=False, pretty_print=True): <NEW_LINE> <INDENT> if pretty_print: <NEW_LINE> <INDENT> eol_ = '\n' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> eol_ = '' <NEW_LINE> <DEDENT> for Property_ in self.Property: <NEW_LINE> <INDENT> Property_.export(lwrite, level, 'cyboxCommon:', name_='Property', pretty_print=pretty_print) <NEW_LINE> <DEDENT> <DEDENT> def build(self, node): <NEW_LINE> <INDENT> self.__sourcenode__ = node <NEW_LINE> already_processed = set() <NEW_LINE> self.buildAttributes(node, node.attrib, already_processed) <NEW_LINE> for child in node: <NEW_LINE> <INDENT> nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] <NEW_LINE> self.buildChildren(child, node, nodeName_) <NEW_LINE> <DEDENT> <DEDENT> def buildAttributes(self, node, attrs, already_processed): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): <NEW_LINE> <INDENT> if nodeName_ == 'Property': <NEW_LINE> <INDENT> obj_ = PropertyType.factory() <NEW_LINE> obj_.build(child_) <NEW_LINE> self.Property.append(obj_) | The CustomPropertiesType enables the specification of a set of
custom Object Properties that may not be defined in existing
Properties schemas. | 62598f9f8e71fb1e983bb8cd |
class ProxyCaller: <NEW_LINE> <INDENT> def __init__(self, c_path=os.path.join(syspaths.CONFIG_DIR, "proxy"), mopts=None): <NEW_LINE> <INDENT> import salt.minion <NEW_LINE> self.opts = mopts or salt.config.proxy_config(c_path) <NEW_LINE> self.sminion = salt.minion.SProxyMinion(self.opts) <NEW_LINE> <DEDENT> def cmd(self, fun, *args, **kwargs): <NEW_LINE> <INDENT> func = self.sminion.functions[fun] <NEW_LINE> data = {"arg": args, "fun": fun} <NEW_LINE> data.update(kwargs) <NEW_LINE> executors = getattr(self.sminion, "module_executors", []) or self.opts.get( "module_executors", ["direct_call"] ) <NEW_LINE> if isinstance(executors, str): <NEW_LINE> <INDENT> executors = [executors] <NEW_LINE> <DEDENT> for name in executors: <NEW_LINE> <INDENT> fname = "{}.execute".format(name) <NEW_LINE> if fname not in self.sminion.executors: <NEW_LINE> <INDENT> raise SaltInvocationError("Executor '{}' is not available".format(name)) <NEW_LINE> <DEDENT> return_data = self.sminion.executors[fname]( self.opts, data, func, args, kwargs ) <NEW_LINE> if return_data is not None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return return_data | ``ProxyCaller`` is the same interface used by the :command:`salt-call`
with the args ``--proxyid <proxyid>`` command-line tool on the Salt Proxy
Minion.
Importing and using ``ProxyCaller`` must be done on the same machine as a
Salt Minion and it must be done using the same user that the Salt Minion is
running as.
Usage:
.. code-block:: python
import salt.client
caller = salt.client.ProxyCaller()
caller.cmd('test.ping')
Note, a running master or minion daemon is not required to use this class.
Running ``salt-call --local`` simply sets :conf_minion:`file_client` to
``'local'``. The same can be achieved at the Python level by including that
setting in a minion config file.
.. code-block:: python
import salt.client
import salt.config
__opts__ = salt.config.proxy_config('/etc/salt/proxy', minion_id='quirky_edison')
__opts__['file_client'] = 'local'
caller = salt.client.ProxyCaller(mopts=__opts__)
.. note::
To use this for calling proxies, the :py:func:`is_proxy functions
<salt.utils.platform.is_proxy>` requires that ``--proxyid`` be an
argument on the commandline for the script this is used in, or that the
string ``proxy`` is in the name of the script. | 62598f9f01c39578d7f12b94 |
class ModelInheritanceTestCase(ModelsBaseTestCase): <NEW_LINE> <INDENT> def test_abstract(self): <NEW_LINE> <INDENT> from django.db import models <NEW_LINE> class CommonInfo(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100) <NEW_LINE> age = models.PositiveIntegerField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> <DEDENT> class Student(CommonInfo): <NEW_LINE> <INDENT> home_group = models.CharField(max_length=5) <NEW_LINE> <DEDENT> self.resetDB() <NEW_LINE> alias = self.adapter.DjangoClassAlias(Student) <NEW_LINE> x = Student() <NEW_LINE> attrs = alias.getEncodableAttributes(x) <NEW_LINE> self.assertEquals(attrs, { 'age': None, 'home_group': '', 'id': None, 'name': '' }) <NEW_LINE> <DEDENT> def test_concrete(self): <NEW_LINE> <INDENT> from django.db import models <NEW_LINE> class Place(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=50) <NEW_LINE> address = models.CharField(max_length=80) <NEW_LINE> <DEDENT> class Restaurant(Place): <NEW_LINE> <INDENT> serves_hot_dogs = models.BooleanField() <NEW_LINE> serves_pizza = models.BooleanField() <NEW_LINE> <DEDENT> self.resetDB() <NEW_LINE> alias = self.adapter.DjangoClassAlias(Place) <NEW_LINE> x = Place() <NEW_LINE> attrs = alias.getEncodableAttributes(x) <NEW_LINE> self.assertEquals(attrs, { 'id': None, 'name': '', 'address': '' }) <NEW_LINE> alias = self.adapter.DjangoClassAlias(Restaurant) <NEW_LINE> x = Restaurant() <NEW_LINE> attrs = alias.getEncodableAttributes(x) <NEW_LINE> self.assertEquals(attrs, { 'id': None, 'name': '', 'address': '', 'serves_hot_dogs': False, 'serves_pizza': False }) | Tests for L{Django model inheritance<http://docs.djangoproject.com/en/dev/topics/db/models/#model-inheritance>} | 62598f9feab8aa0e5d30bb9d |
class MigrationItem(Resource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'MigrationItemProperties'}, } <NEW_LINE> def __init__( self, *, location: Optional[str] = None, properties: Optional["MigrationItemProperties"] = None, **kwargs ): <NEW_LINE> <INDENT> super(MigrationItem, self).__init__(location=location, **kwargs) <NEW_LINE> self.properties = properties | Migration item.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:ivar type: Resource Type.
:vartype type: str
:param location: Resource Location.
:type location: str
:param properties: The migration item properties.
:type properties: ~azure.mgmt.recoveryservicessiterecovery.models.MigrationItemProperties | 62598f9f99cbb53fe6830ce9 |
class OutgoingAckProtocolEntity(AckProtocolEntity): <NEW_LINE> <INDENT> def __init__(self, _id, _class, _type, to, participant = None): <NEW_LINE> <INDENT> super(OutgoingAckProtocolEntity, self).__init__(_id, _class) <NEW_LINE> self.setOutgoingData(_type, to, participant) <NEW_LINE> <DEDENT> def setOutgoingData(self, _type, _to, _participant): <NEW_LINE> <INDENT> self._type = _type <NEW_LINE> self._to = _to <NEW_LINE> self._participant = _participant <NEW_LINE> <DEDENT> def toProtocolTreeNode(self): <NEW_LINE> <INDENT> node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode() <NEW_LINE> if self._type: <NEW_LINE> <INDENT> node.setAttribute("type", self._type) <NEW_LINE> <DEDENT> node.setAttribute("to", self._to) <NEW_LINE> if self._participant: <NEW_LINE> <INDENT> node.setAttribute("participant", self._participant) <NEW_LINE> <DEDENT> return node <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> out = super(OutgoingAckProtocolEntity, self).__str__() <NEW_LINE> out += "Type: %s\n" % self._type <NEW_LINE> out += "To: %s\n" % self._to <NEW_LINE> if self._participant: <NEW_LINE> <INDENT> out += "Participant: %s\n" % self._participant <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fromProtocolTreeNode(node): <NEW_LINE> <INDENT> entity = AckProtocolEntity.fromProtocolTreeNode(node) <NEW_LINE> entity.__class__ = OutgoingAckProtocolEntity <NEW_LINE> entity.setOutgoingData( node.getAttributeValue("type"), node.getAttributeValue("to"), node.getAttributeValue("participant") ) <NEW_LINE> return entity | <ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
<ack to="{{GROUP_JID}}" participant="{{JID}}" id="{{MESSAGE_ID}}" class="receipt" type="{{read | }}">
</ack> | 62598f9f07f4c71912baf262 |
class ISmartLinkControlPanelForm(IPloneControlPanelForm): <NEW_LINE> <INDENT> pass | Interface for configuration panel inside SmartLink | 62598f9fadb09d7d5dc0a3a1 |
class MemeSimGUI(): <NEW_LINE> <INDENT> def __init__(self, master): <NEW_LINE> <INDENT> self._master = master <NEW_LINE> self._master.title("Welcome to the MemeSimGUI") <NEW_LINE> self._master.geometry('950x500') <NEW_LINE> self._frame = tk.Frame(self._master) <NEW_LINE> self._memelbl = tk.Label(self._frame, text="A meme:") <NEW_LINE> self._memelbl.pack(fill=tk.X, padx=10, pady=10) <NEW_LINE> self._memebtn = tk.Button(self._frame, text="Click me", command= self._clicked) <NEW_LINE> self._memebtn.pack(fill=tk.X, padx=(10,750), pady=10) <NEW_LINE> self._robot_pos_txt = tk.Text(self._frame, height=5) <NEW_LINE> self._robot_pos_txt.insert(tk.INSERT, "Robot Positions:\n\n\n\n") <NEW_LINE> self._robot_pos_txt.pack(fill=tk.X, padx=10, pady=10) <NEW_LINE> self._balance_lbl = tk.Label(self._frame, text="<value>", anchor='w') <NEW_LINE> self._balance_lbl.pack(fill=tk.X, padx=10, pady=10) <NEW_LINE> self._closebtn = tk.Button(self._frame, text="Close", command=self.close) <NEW_LINE> self._closebtn.pack(fill=tk.X, padx=(10,750), pady=10) <NEW_LINE> self._frame.pack(fill=tk.X) <NEW_LINE> self._master.protocol("WM_DELETE_WINDOW", self.close) <NEW_LINE> self.is_closing = False <NEW_LINE> self.button_callback = None <NEW_LINE> <DEDENT> def set_button_callback(self, f): <NEW_LINE> <INDENT> self.button_callback = f <NEW_LINE> <DEDENT> def _clicked(self): <NEW_LINE> <INDENT> if not self.button_callback is None: <NEW_LINE> <INDENT> self.button_callback() <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.is_closing = True <NEW_LINE> <DEDENT> def show_meme(self, meme): <NEW_LINE> <INDENT> self._memelbl.configure(text=meme.genomestring()) <NEW_LINE> <DEDENT> def show_location(self, id, x, y, phi): <NEW_LINE> <INDENT> newtext = "Robot: {0:d} at ({1:.1f}, {2:.1f}), {3:.1f} degrees\n".format(id, x, y, phi/3.14159265*180) <NEW_LINE> self._robot_pos_txt.replace("{0}.0".format((id-1)%3 + 2), "{0}.0".format((id-1)%3 + 3), newtext) <NEW_LINE> <DEDENT> def show_balance(self, b): <NEW_LINE> <INDENT> self._balance_lbl.configure(text="Bank account: {0:.2f} euro.".format(float(b))) | Class that handles all GUI related activity. On instantiation, provide a reference to the tkinter root. | 62598f9f56b00c62f0fb26c7 |
class LoadButton(Button): <NEW_LINE> <INDENT> def __init__(self, x, y): <NEW_LINE> <INDENT> super(LoadButton, self).__init__(x, y, 'Load Game', ['Load a game which you saved previously']) <NEW_LINE> <DEDENT> def activate(self): <NEW_LINE> <INDENT> name = get_input("Enter the name of the saved game.") <NEW_LINE> if name.strip() == '': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> path = get_save_path(name) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(path, 'r') as f: <NEW_LINE> <INDENT> saved_level = int(f.read()) <NEW_LINE> level.current_level = saved_level <NEW_LINE> level.levels[level.current_level].begin() <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> show_message("No save file with that name was found.") | A button for loading a saved game. | 62598f9fbaa26c4b54d4f0c6 |
class InfoType2(InfoType) : <NEW_LINE> <INDENT> infoType = "2" <NEW_LINE> @property <NEW_LINE> def protocols_Id(self): <NEW_LINE> <INDENT> return ["2"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def dmgDevice_Id(self): <NEW_LINE> <INDENT> if self.data['header']['protocol'] in self.protocols_Id: <NEW_LINE> <INDENT> return "{0}.{1}".format(self.data['infos']['id'], self.data['infos']['subType']) <NEW_LINE> <DEDENT> return "" <NEW_LINE> <DEDENT> def get_RFP_data_to_sensor(self, sensor): <NEW_LINE> <INDENT> try : <NEW_LINE> <INDENT> qualifier = int(self.data['infos']['qualifier']) <NEW_LINE> if self.data['infos']['subType'] == "0" : <NEW_LINE> <INDENT> if sensor['data_type'] == 'DT_OpenClose' : <NEW_LINE> <INDENT> return 1 if qualifier & 1 else 0 <NEW_LINE> <DEDENT> elif sensor['reference'] == 'alarm' : <NEW_LINE> <INDENT> return 1 if qualifier & 2 else 0 <NEW_LINE> <DEDENT> elif sensor['reference'] == 'low_battery' : <NEW_LINE> <INDENT> return 1 if qualifier & 4 else 0 <NEW_LINE> <DEDENT> <DEDENT> elif self.data['infos']['subType'] == 1 : <NEW_LINE> <INDENT> if sensor['reference'] == 'button_1' and (qualifier and 0x08): return 1 <NEW_LINE> elif sensor['reference'] == 'button_2' and (qualifier and 0x10) : return 1 <NEW_LINE> elif sensor['reference'] == 'button_3' and (qualifier and 0x20) : return 1 <NEW_LINE> elif sensor['reference'] == 'button_3' and (qualifier and 0x40) : return 1 <NEW_LINE> <DEDENT> <DEDENT> except : <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_Available_Sensors(self): <NEW_LINE> <INDENT> if self.data['infos']['subType'] == "0" : <NEW_LINE> <INDENT> return [["tamper", "alarm", "low_battery", "rf_quality"]] <NEW_LINE> <DEDENT> elif self.data['infos']['subType'] == "1" : <NEW_LINE> <INDENT> return [["button_1", "button_2", "button_3", "button_4", "rf_quality"]] <NEW_LINE> <DEDENT> return [] <NEW_LINE> <DEDENT> def get_Available_Commands(self): <NEW_LINE> <INDENT> if self.data['infos']['subType'] == "1" : <NEW_LINE> <INDENT> return [["button_1", "button_2", "button_3", "button_4"]] <NEW_LINE> <DEDENT> return [] | Info Type for VISONIC protocol | 62598f9fa8370b77170f01fb |
class Attention(Layer): <NEW_LINE> <INDENT> def __init__(self, step_dim, W_regularizer=None, b_regularizer=None, W_constraint=None, b_constraint=None, bias=True, **kwargs): <NEW_LINE> <INDENT> self.supports_masking = True <NEW_LINE> self.init = initializers.get('glorot_uniform') <NEW_LINE> self.W_regularizer = regularizers.get(W_regularizer) <NEW_LINE> self.b_regularizer = regularizers.get(b_regularizer) <NEW_LINE> self.W_constraint = constraints.get(W_constraint) <NEW_LINE> self.b_constraint = constraints.get(b_constraint) <NEW_LINE> self.bias = bias <NEW_LINE> self.step_dim = step_dim <NEW_LINE> self.features_dim = 0 <NEW_LINE> super(Attention, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> assert len(input_shape) == 3 <NEW_LINE> self.W = self.add_weight(shape=(input_shape[-1],), initializer=self.init, name=f'{self.name}_W', regularizer=self.W_regularizer, constraint=self.W_constraint) <NEW_LINE> self.features_dim = input_shape[-1] <NEW_LINE> if self.bias: <NEW_LINE> <INDENT> self.b = self.add_weight(shape=(input_shape[1],), initializer='zero', name=f'{self.name}_b', regularizer=self.b_regularizer, constraint=self.b_constraint) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.b = None <NEW_LINE> <DEDENT> self.built = True <NEW_LINE> <DEDENT> def compute_mask(self, input, input_mask=None): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def call(self, x, mask=None): <NEW_LINE> <INDENT> features_dim = self.features_dim <NEW_LINE> step_dim = self.step_dim <NEW_LINE> e = K.reshape(K.dot(K.reshape(x, (-1, features_dim)), K.reshape(self.W, (features_dim, 1))), (-1, step_dim)) <NEW_LINE> if self.bias: <NEW_LINE> <INDENT> e += self.b <NEW_LINE> <DEDENT> e = K.tanh(e) <NEW_LINE> a = K.exp(e) <NEW_LINE> if mask is not None: <NEW_LINE> <INDENT> a *= K.cast(mask, K.floatx()) <NEW_LINE> <DEDENT> a /= K.cast(K.sum(a, axis=1, keepdims=True) + K.epsilon(), K.floatx()) <NEW_LINE> a = K.expand_dims(a) <NEW_LINE> c = K.sum(a * x, axis=1) <NEW_LINE> return c <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return input_shape[0], self.features_dim | 定义注意力层 | 62598f9f4527f215b58e9cfb |
class CourseStructureTestCase(TransformerRegistryTestMixin, ModuleStoreTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(CourseStructureTestCase, self).setUp() <NEW_LINE> self.password = 'test' <NEW_LINE> self.user = UserFactory.create(password=self.password) <NEW_LINE> self.staff = UserFactory.create(password=self.password, is_staff=True) <NEW_LINE> <DEDENT> def create_block_id(self, block_type, block_ref): <NEW_LINE> <INDENT> return '{}_{}'.format(block_type, block_ref) <NEW_LINE> <DEDENT> def build_xblock(self, block_hierarchy, block_map, parent): <NEW_LINE> <INDENT> block_type = block_hierarchy['#type'] <NEW_LINE> block_ref = block_hierarchy['#ref'] <NEW_LINE> factory = (CourseFactory if block_type == 'course' else ItemFactory) <NEW_LINE> kwargs = {key: value for key, value in block_hierarchy.iteritems() if key[0] != '#'} <NEW_LINE> if block_type != 'course': <NEW_LINE> <INDENT> kwargs['category'] = block_type <NEW_LINE> kwargs['publish_item'] = True, <NEW_LINE> <DEDENT> if parent: <NEW_LINE> <INDENT> kwargs['parent'] = parent <NEW_LINE> <DEDENT> xblock = factory.create( display_name=self.create_block_id(block_type, block_ref), **kwargs ) <NEW_LINE> block_map[block_ref] = xblock <NEW_LINE> for child_hierarchy in block_hierarchy.get('#children', []): <NEW_LINE> <INDENT> self.build_xblock(child_hierarchy, block_map, xblock) <NEW_LINE> <DEDENT> <DEDENT> def add_parents(self, block_hierarchy, block_map): <NEW_LINE> <INDENT> parents = block_hierarchy.get('#parents', []) <NEW_LINE> if parents: <NEW_LINE> <INDENT> block_key = block_map[block_hierarchy['#ref']].location <NEW_LINE> course = modulestore().get_item(block_map['course'].location) <NEW_LINE> if block_key in course.children: <NEW_LINE> <INDENT> course.children.remove(block_key) <NEW_LINE> block_map['course'] = update_block(course) <NEW_LINE> <DEDENT> for parent_ref in parents: <NEW_LINE> <INDENT> parent_block = modulestore().get_item(block_map[parent_ref].location) <NEW_LINE> parent_block.children.append(block_key) <NEW_LINE> block_map[parent_ref] = update_block(parent_block) <NEW_LINE> <DEDENT> <DEDENT> for child_hierarchy in block_hierarchy.get('#children', []): <NEW_LINE> <INDENT> self.add_parents(child_hierarchy, block_map) <NEW_LINE> <DEDENT> <DEDENT> def build_course(self, course_hierarchy): <NEW_LINE> <INDENT> block_map = {} <NEW_LINE> for block_hierarchy in course_hierarchy: <NEW_LINE> <INDENT> self.build_xblock(block_hierarchy, block_map, parent=None) <NEW_LINE> <DEDENT> for block_hierarchy in course_hierarchy: <NEW_LINE> <INDENT> self.add_parents(block_hierarchy, block_map) <NEW_LINE> <DEDENT> publish_course(block_map['course']) <NEW_LINE> return block_map <NEW_LINE> <DEDENT> def get_block_key_set(self, blocks, *refs): <NEW_LINE> <INDENT> xblocks = (blocks[ref] for ref in refs) <NEW_LINE> return set([xblock.location for xblock in xblocks]) | Helper for test cases that need to build course structures. | 62598f9f8e71fb1e983bb8ce |
class ClusterStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.ClusterInfo = channel.unary_stream( '/Cluster/ClusterInfo', request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, response_deserializer=ClusterDescription.FromString, ) | Cluster service gives some descriptions about the cluster where the service
is running. | 62598f9f8e7ae83300ee8eb7 |
class LogPage: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, 'batch', (LogBatch, LogBatch.thrift_spec), None, ), (2, TType.STRUCT, 'next_page_token', (LogPageToken, LogPageToken.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, batch=None, next_page_token=None,): <NEW_LINE> <INDENT> self.batch = batch <NEW_LINE> self.next_page_token = next_page_token <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.batch = LogBatch() <NEW_LINE> self.batch.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.next_page_token = LogPageToken() <NEW_LINE> self.next_page_token.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('LogPage') <NEW_LINE> if self.batch != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('batch', TType.STRUCT, 1) <NEW_LINE> self.batch.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.next_page_token != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('next_page_token', TType.STRUCT, 2) <NEW_LINE> self.next_page_token.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- batch
- next_page_token | 62598f9f596a897236127a93 |
class Meta: <NEW_LINE> <INDENT> verbose_name_plural='Tags' | > when user clicks on tags, call Meta then the class funcs | 62598f9f91af0d3eaad39c23 |
class RichTextWidget(BaseWidget, patextfield_RichTextWidget): <NEW_LINE> <INDENT> _base = TextareaWidget <NEW_LINE> implementsOnly(IRichTextWidget) <NEW_LINE> pattern_options = BaseWidget.pattern_options.copy() <NEW_LINE> @property <NEW_LINE> def pattern(self): <NEW_LINE> <INDENT> registry = getUtility(IRegistry) <NEW_LINE> try: <NEW_LINE> <INDENT> records = registry.forInterface(IEditingSchema, check=False, prefix='plone') <NEW_LINE> return records.default_editor.lower() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return 'tinymce' <NEW_LINE> <DEDENT> <DEDENT> def _base_args(self): <NEW_LINE> <INDENT> args = super(RichTextWidget, self)._base_args() <NEW_LINE> args['name'] = self.name <NEW_LINE> properties = getToolByName(self.context, 'portal_properties') <NEW_LINE> charset = properties.site_properties.getProperty('default_charset', 'utf-8') <NEW_LINE> value = self.value and self.value.raw_encoded or '' <NEW_LINE> args['value'] = (self.request.get( self.field.getName(), value)).decode(charset) <NEW_LINE> args.setdefault('pattern_options', {}) <NEW_LINE> merged = dict_merge(get_tinymce_options(self.context, self.field, self.request), args['pattern_options']) <NEW_LINE> args['pattern_options'] = merged['pattern_options'] <NEW_LINE> return args <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> if self.mode != 'display': <NEW_LINE> <INDENT> rendered = '' <NEW_LINE> allowed_mime_types = self.allowedMimeTypes() <NEW_LINE> if not allowed_mime_types or len(allowed_mime_types) <= 1: <NEW_LINE> <INDENT> rendered = super(RichTextWidget, self).render() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> base_args = self._base_args() <NEW_LINE> pattern_options = base_args['pattern_options'] <NEW_LINE> del base_args['pattern'] <NEW_LINE> del base_args['pattern_options'] <NEW_LINE> textarea_widget = self._base(None, None, **base_args) <NEW_LINE> textarea_widget.klass = '' <NEW_LINE> mt_pattern_name = '{}{}'.format( self._base._klass_prefix, 'textareamimetypeselector' ) <NEW_LINE> value_mime_type = self.value.mimeType if self.value else self.field.default_mime_type <NEW_LINE> mt_select = etree.Element('select') <NEW_LINE> mt_select.attrib['id'] = '{}_text_format'.format(self.id) <NEW_LINE> mt_select.attrib['name'] = '{}.mimeType'.format(self.name) <NEW_LINE> mt_select.attrib['class'] = mt_pattern_name <NEW_LINE> mt_select.attrib['{}{}'.format('data-', mt_pattern_name)] = json.dumps({ 'textareaName': self.name, 'widgets': { 'text/html': { 'pattern': self.pattern, 'patternOptions': pattern_options } } }) <NEW_LINE> for mt in allowed_mime_types: <NEW_LINE> <INDENT> opt = etree.Element('option') <NEW_LINE> opt.attrib['value'] = mt <NEW_LINE> if value_mime_type == mt: <NEW_LINE> <INDENT> opt.attrib['selected'] = 'selected' <NEW_LINE> <DEDENT> opt.text = mt <NEW_LINE> mt_select.append(opt) <NEW_LINE> <DEDENT> rendered = '{}\n{}'.format( textarea_widget.render(), etree.tostring(mt_select) ) <NEW_LINE> <DEDENT> return rendered <NEW_LINE> <DEDENT> if not self.value: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> if isinstance(self.value, RichTextValue): <NEW_LINE> <INDENT> return self.value.output <NEW_LINE> <DEDENT> return super(RichTextWidget, self).render() | TinyMCE widget for z3c.form. | 62598f9f1f037a2d8b9e3eff |
class InProgressWithETA(InProgress): <NEW_LINE> <INDENT> def __init__(self, size: int, started: datetime, rate: SummaryStat, *args, **kwargs) -> None: <NEW_LINE> <INDENT> rate_mean, rate_stderr = rate <NEW_LINE> self._eta = started + timedelta(seconds=size / rate_mean), int(rate_stderr) <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def eta(self) -> Tuple[datetime, int]: <NEW_LINE> <INDENT> return self._eta <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> eta, stderr = self._eta <NEW_LINE> return datetime.strftime(eta, ISO8601_UTC) + f" +/- {stderr}" | Interrupt raised when data fetching is in progress, with ETA | 62598f9f009cb60464d0133c |
@six.add_metaclass(abc.ABCMeta) <NEW_LINE> class BGPVPNDriverDBMixin(BGPVPNDriverBase): <NEW_LINE> <INDENT> def __init__(self, service_plugin): <NEW_LINE> <INDENT> super(BGPVPNDriverDBMixin, self).__init__(service_plugin) <NEW_LINE> self.bgpvpn_db = bgpvpn_db.BGPVPNPluginDb() <NEW_LINE> <DEDENT> def create_bgpvpn(self, context, bgpvpn): <NEW_LINE> <INDENT> bgpvpn = self.bgpvpn_db.create_bgpvpn( context, bgpvpn) <NEW_LINE> self.create_bgpvpn_postcommit(context, bgpvpn) <NEW_LINE> return bgpvpn <NEW_LINE> <DEDENT> def get_bgpvpns(self, context, filters=None, fields=None): <NEW_LINE> <INDENT> return self.bgpvpn_db.get_bgpvpns(context, filters, fields) <NEW_LINE> <DEDENT> def get_bgpvpn(self, context, id, fields=None): <NEW_LINE> <INDENT> return self.bgpvpn_db.get_bgpvpn(context, id, fields) <NEW_LINE> <DEDENT> def update_bgpvpn(self, context, id, bgpvpn): <NEW_LINE> <INDENT> old_bgpvpn = self.get_bgpvpn(context, id) <NEW_LINE> bgpvpn = self.bgpvpn_db.update_bgpvpn( context, id, bgpvpn) <NEW_LINE> self.update_bgpvpn_postcommit(context, old_bgpvpn, bgpvpn) <NEW_LINE> return bgpvpn <NEW_LINE> <DEDENT> def delete_bgpvpn(self, context, id): <NEW_LINE> <INDENT> bgpvpn = self.bgpvpn_db.delete_bgpvpn(context, id) <NEW_LINE> self.delete_bgpvpn_postcommit(context, bgpvpn) <NEW_LINE> <DEDENT> def associate_network(self, context, id, network_id): <NEW_LINE> <INDENT> self.bgpvpn_db.associate_network(context, id, network_id) <NEW_LINE> self.associate_network_postcommit(context, id, network_id) <NEW_LINE> <DEDENT> def disassociate_network(self, context, id, network_id): <NEW_LINE> <INDENT> self.bgpvpn_db.disassociate_network(context, id, network_id) <NEW_LINE> self.disassociate_network_postcommit(context, id, network_id) <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def create_bgpvpn_postcommit(self, context, bgpvpn): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def update_bgpvpn_postcommit(self, context, old_bgpvpn, bgpvpn): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def delete_bgpvpn_postcommit(self, context, bgpvpn): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def associate_network_postcommit(self, context, bgpvpn_id, network_id): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def disassociate_network_postcommit(self, context, bgpvpn_id, network_id): <NEW_LINE> <INDENT> pass | BGPVPNDriverDB Mixin to provision the database on behalf of the driver
That driver interface persists BGPVPN data in its database and forward
the result to postcommit methods | 62598f9f3eb6a72ae038a459 |
class PPrint(BaseAdapter): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(PPrint, self).__init__(*args, **kwargs) <NEW_LINE> self.pp = pprint.PrettyPrinter(indent=4) <NEW_LINE> <DEDENT> def process(self, item): <NEW_LINE> <INDENT> self.pp.pprint(item) | Just prints payload to stdout.
| 62598f9f4428ac0f6e658343 |
class Beat(HubService): <NEW_LINE> <INDENT> def __init__(self, dmd, instance): <NEW_LINE> <INDENT> HubService.__init__(self, dmd, instance) <NEW_LINE> self.beat() <NEW_LINE> <DEDENT> def beat(self): <NEW_LINE> <INDENT> secs = time.time() <NEW_LINE> for listener in self.listeners: <NEW_LINE> <INDENT> d = listener.callRemote('beat', secs) <NEW_LINE> d.addErrback(self.error) <NEW_LINE> <DEDENT> reactor.callLater(1, self.beat) <NEW_LINE> <DEDENT> def error(self, reason, unused): <NEW_LINE> <INDENT> reason.printTraceback() | Example service which sends a simple heartbeat to keep a client
connection alive. | 62598f9f1f5feb6acb162a3a |
class CategoriesSelectMultiple(forms.CheckboxSelectMultiple): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(self.__class__, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def render(self, name, value, attrs=None): <NEW_LINE> <INDENT> value = value or [] <NEW_LINE> has_id = attrs and 'id' in attrs <NEW_LINE> final_attrs = self.build_attrs(attrs, name=name) <NEW_LINE> choices = [] <NEW_LINE> other = None <NEW_LINE> miscs = Category.objects.filter(misc=True).values_list('id', flat=True) <NEW_LINE> for c in self.choices: <NEW_LINE> <INDENT> if c[0] in miscs: <NEW_LINE> <INDENT> other = (c[0], _("My add-on doesn't fit into any of the categories")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> choices.append(c) <NEW_LINE> <DEDENT> <DEDENT> choices = list(enumerate(choices)) <NEW_LINE> choices_size = len(choices) <NEW_LINE> groups = [choices] <NEW_LINE> if other: <NEW_LINE> <INDENT> groups.append([(choices_size, other)]) <NEW_LINE> <DEDENT> str_values = set([force_unicode(v) for v in value]) <NEW_LINE> output = [] <NEW_LINE> for (k, group) in enumerate(groups): <NEW_LINE> <INDENT> cls = 'addon-misc-category' if k == 1 else 'addon-categories' <NEW_LINE> output.append(u'<ul class="%s">' % cls) <NEW_LINE> for i, (option_value, option_label) in group: <NEW_LINE> <INDENT> if has_id: <NEW_LINE> <INDENT> final_attrs = dict(final_attrs, id='%s_%s' % ( attrs['id'], i)) <NEW_LINE> label_for = u' for="%s"' % final_attrs['id'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> label_for = '' <NEW_LINE> <DEDENT> cb = forms.CheckboxInput( final_attrs, check_test=lambda value: value in str_values) <NEW_LINE> option_value = force_unicode(option_value) <NEW_LINE> rendered_cb = cb.render(name, option_value) <NEW_LINE> option_label = conditional_escape(force_unicode(option_label)) <NEW_LINE> output.append(u'<li><label%s>%s %s</label></li>' % ( label_for, rendered_cb, option_label)) <NEW_LINE> <DEDENT> output.append(u'</ul>') <NEW_LINE> <DEDENT> return mark_safe(u'\n'.join(output)) | Widget that formats the Categories checkboxes. | 62598f9f627d3e7fe0e06cc3 |
class Env(object): <NEW_LINE> <INDENT> action_space = None <NEW_LINE> observation_space = None <NEW_LINE> def step(self, action): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def render(self, mode='human'): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> return | The main OpenAI Gym class. It encapsulates an environment with
arbitrary behind-the-scenes dynamics. An environment can be
partially or fully observed.
The main API methods that users of this class need to know are:
step
reset
render
close
seed
And set the following attributes:
action_space: The Space object corresponding to valid actions
observation_space: The Space object corresponding to valid observations
reward_range: A tuple corresponding to the min and max possible rewards
Note: a default reward range set to [-inf,+inf] already exists. Set it if you want a narrower range.
The methods are accessed publicly as "step", "reset", etc.. The
non-underscored versions are wrapper methods to which we may add
functionality over time. | 62598f9fe5267d203ee6b726 |
class DteWork(Work, MergeDdb): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_scf_task(cls, scf_task, ddk_tolerance=None, manager=None): <NEW_LINE> <INDENT> if not isinstance(scf_task, ScfTask): <NEW_LINE> <INDENT> raise TypeError("task `%s` does not inherit from ScfTask" % scf_task) <NEW_LINE> <DEDENT> new = cls(manager=manager) <NEW_LINE> multi_ddk = scf_task.input.make_ddk_inputs(tolerance=ddk_tolerance) <NEW_LINE> ddk_tasks = [] <NEW_LINE> for ddk_inp in multi_ddk: <NEW_LINE> <INDENT> ddk_task = new.register_ddk_task(ddk_inp, deps={scf_task: "WFK"}) <NEW_LINE> ddk_tasks.append(ddk_task) <NEW_LINE> <DEDENT> multi_dde = scf_task.input.make_dde_inputs(use_symmetries=False) <NEW_LINE> dde_tasks = [] <NEW_LINE> dde_deps = {ddk_task: "DDK" for ddk_task in ddk_tasks} <NEW_LINE> dde_deps.update({scf_task: "WFK"}) <NEW_LINE> for dde_inp in multi_dde: <NEW_LINE> <INDENT> dde_task = new.register_dde_task(dde_inp, deps=dde_deps) <NEW_LINE> dde_tasks.append(dde_task) <NEW_LINE> <DEDENT> dte_deps = {scf_task: "WFK DEN"} <NEW_LINE> dte_deps.update({dde_task: "1WF 1DEN" for dde_task in dde_tasks}) <NEW_LINE> multi_dte = scf_task.input.make_dte_inputs() <NEW_LINE> dte_tasks = [] <NEW_LINE> for dte_inp in multi_dte: <NEW_LINE> <INDENT> dte_task = new.register_dte_task(dte_inp, deps=dte_deps) <NEW_LINE> dte_tasks.append(dte_task) <NEW_LINE> <DEDENT> return new <NEW_LINE> <DEDENT> def on_all_ok(self): <NEW_LINE> <INDENT> out_ddb = self.merge_ddb_files() <NEW_LINE> return self.Results(node=self, returncode=0, message="DDB merge done") | Work for the computation of the third derivative of the energy.
This work consists of DDK tasks and electric field perturbation.
It provides the callback method (on_all_ok) that calls mrgddb to merge the partial DDB files produced
.. rubric:: Inheritance Diagram
.. inheritance-diagram:: DteWork | 62598f9f63b5f9789fe84f8d |
class KNXBinarySensor(BinarySensorDevice): <NEW_LINE> <INDENT> def __init__(self, device): <NEW_LINE> <INDENT> self.device = device <NEW_LINE> self.automations = [] <NEW_LINE> <DEDENT> @callback <NEW_LINE> def async_register_callbacks(self): <NEW_LINE> <INDENT> async def after_update_callback(device): <NEW_LINE> <INDENT> self.async_write_ha_state() <NEW_LINE> <DEDENT> self.device.register_device_updated_cb(after_update_callback) <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> self.async_register_callbacks() <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.device.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> return self.hass.data[DATA_KNX].connected <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_class(self): <NEW_LINE> <INDENT> return self.device.device_class <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self.device.is_on() | Representation of a KNX binary sensor. | 62598f9f090684286d5935e6 |
class EmailCommseqStatResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'error': 'Error', 'metadata': 'ResponseMetadata', 'stats': 'EmailCommseqStat', 'success': 'bool', 'warning': 'Warning' } <NEW_LINE> attribute_map = { 'error': 'error', 'metadata': 'metadata', 'stats': 'stats', 'success': 'success', 'warning': 'warning' } <NEW_LINE> def __init__(self, error=None, metadata=None, stats=None, success=None, warning=None): <NEW_LINE> <INDENT> self._error = None <NEW_LINE> self._metadata = None <NEW_LINE> self._stats = None <NEW_LINE> self._success = None <NEW_LINE> self._warning = None <NEW_LINE> self.discriminator = None <NEW_LINE> if error is not None: <NEW_LINE> <INDENT> self.error = error <NEW_LINE> <DEDENT> if metadata is not None: <NEW_LINE> <INDENT> self.metadata = metadata <NEW_LINE> <DEDENT> if stats is not None: <NEW_LINE> <INDENT> self.stats = stats <NEW_LINE> <DEDENT> if success is not None: <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> if warning is not None: <NEW_LINE> <INDENT> self.warning = warning <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def error(self): <NEW_LINE> <INDENT> return self._error <NEW_LINE> <DEDENT> @error.setter <NEW_LINE> def error(self, error): <NEW_LINE> <INDENT> self._error = error <NEW_LINE> <DEDENT> @property <NEW_LINE> def metadata(self): <NEW_LINE> <INDENT> return self._metadata <NEW_LINE> <DEDENT> @metadata.setter <NEW_LINE> def metadata(self, metadata): <NEW_LINE> <INDENT> self._metadata = metadata <NEW_LINE> <DEDENT> @property <NEW_LINE> def stats(self): <NEW_LINE> <INDENT> return self._stats <NEW_LINE> <DEDENT> @stats.setter <NEW_LINE> def stats(self, stats): <NEW_LINE> <INDENT> self._stats = stats <NEW_LINE> <DEDENT> @property <NEW_LINE> def success(self): <NEW_LINE> <INDENT> return self._success <NEW_LINE> <DEDENT> @success.setter <NEW_LINE> def success(self, success): <NEW_LINE> <INDENT> self._success = success <NEW_LINE> <DEDENT> @property <NEW_LINE> def warning(self): <NEW_LINE> <INDENT> return self._warning <NEW_LINE> <DEDENT> @warning.setter <NEW_LINE> def warning(self, warning): <NEW_LINE> <INDENT> self._warning = warning <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(EmailCommseqStatResponse, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, EmailCommseqStatResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598f9f60cbc95b06364165 |
class Dataset(object): <NEW_LINE> <INDENT> def __init__(self, dim, contents=None): <NEW_LINE> <INDENT> self._dim = dim <NEW_LINE> if contents == None: <NEW_LINE> <INDENT> self._contents = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = [] <NEW_LINE> for x in contents: <NEW_LINE> <INDENT> mycopy = [] <NEW_LINE> for y in x: <NEW_LINE> <INDENT> y = y <NEW_LINE> mycopy.append(y) <NEW_LINE> <DEDENT> result.append(mycopy) <NEW_LINE> <DEDENT> self._contents = result <NEW_LINE> <DEDENT> <DEDENT> def getDimension(self): <NEW_LINE> <INDENT> return self._dim <NEW_LINE> <DEDENT> def getSize(self): <NEW_LINE> <INDENT> length = len(self._contents) <NEW_LINE> return length <NEW_LINE> pass <NEW_LINE> <DEDENT> def getContents(self): <NEW_LINE> <INDENT> return self._contents <NEW_LINE> pass <NEW_LINE> <DEDENT> def getPoint(self, i): <NEW_LINE> <INDENT> mycopy = [] <NEW_LINE> for x in self._contents[i]: <NEW_LINE> <INDENT> x=x <NEW_LINE> mycopy.append(x) <NEW_LINE> print (mycopy) <NEW_LINE> <DEDENT> return mycopy <NEW_LINE> <DEDENT> def addPoint(self,point): <NEW_LINE> <INDENT> mycopy = [] <NEW_LINE> for x in point: <NEW_LINE> <INDENT> x=x <NEW_LINE> mycopy.append(x) <NEW_LINE> <DEDENT> self._contents.append(mycopy) <NEW_LINE> pass | A class representing a dataset for k-means clustering.
The data is stored as a list of list of numbers (ints or floats). Each component
list is a data point.
INSTANCE ATTRIBUTES:
_dimension: the point dimension for this dataset
[int > 0. Value never changes after initialization]
_contents: the dataset contents
[a list of lists of numbers (float or int), possibly empty.
EXTRA INVARIANTS:
The number of columns in _contents is equal to _dimension. That is, for every
item _contents[i] in the list _contents, len(_contents[i]) == dimension.
None of the attributes should be accessed directly outside of the class Dataset
(e.g. in the methods of class Cluster or KMeans). Instead, this class has getter and
setter style methods (with the appropriate preconditions) for modifying these values. | 62598f9f8c0ade5d55dc359b |
class PkgRepoOptionsBundle(OptionsBundle): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(PkgRepoOptionsBundle, self).__init__() <NEW_LINE> self.opt_remove_missing.description += _('; defaults to false') <NEW_LINE> d = _('distribution releases (suites or codenames) to sync; defaults to stable') <NEW_LINE> self.opt_releases = PulpCliOption('--releases', d, required=False) <NEW_LINE> d = _('components to sync') <NEW_LINE> self.opt_components = PulpCliOption('--components', d, required=False) <NEW_LINE> d = _('Comma separated list of architectures') <NEW_LINE> self.opt_architectures = PulpCliOption('--architectures', d, required=False) <NEW_LINE> d = _('Require that Release files are signed and verified') <NEW_LINE> self.opt_require_signature = PulpCliOption('--require-signature', d, required=False) <NEW_LINE> d = _('fingerprints of gpg-keys to verify releases signature against') <NEW_LINE> self.opt_allowed_keys = PulpCliOption('--allowed-keys', d, required=False) | Contains small modifications to the default option descriptions,
and additional options. | 62598f9fa79ad16197769e7d |
class Float(Base): <NEW_LINE> <INDENT> fmt = '<f' <NEW_LINE> def __init__(self, value): <NEW_LINE> <INDENT> self.value = value | Convert raw bytes to an floating point number.
| 62598f9f4e4d56256637223c |
class DetailView(generic.DetailView): <NEW_LINE> <INDENT> model = User <NEW_LINE> context_object_name = "user_object" <NEW_LINE> def get_object(self, queryset=None): <NEW_LINE> <INDENT> return get_object_or_404(User, username=self.kwargs["pk"]) | User Detail View | 62598f9f97e22403b383ad24 |
@tf_export('keras.layers.GaussianDropout') <NEW_LINE> class GaussianDropout(Layer): <NEW_LINE> <INDENT> def __init__(self, rate, **kwargs): <NEW_LINE> <INDENT> super(GaussianDropout, self).__init__(**kwargs) <NEW_LINE> self.supports_masking = True <NEW_LINE> self.rate = rate <NEW_LINE> self._can_use_graph_functions = True <NEW_LINE> <DEDENT> def call(self, inputs, training=None): <NEW_LINE> <INDENT> if 0 < self.rate < 1: <NEW_LINE> <INDENT> def noised(): <NEW_LINE> <INDENT> stddev = np.sqrt(self.rate / (1.0 - self.rate)) <NEW_LINE> return inputs * K.random_normal( shape=array_ops.shape(inputs), mean=1.0, stddev=stddev) <NEW_LINE> <DEDENT> return K.in_train_phase(noised, inputs, training=training) <NEW_LINE> <DEDENT> return inputs <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = {'rate': self.rate} <NEW_LINE> base_config = super(GaussianDropout, self).get_config() <NEW_LINE> return dict(list(base_config.items()) + list(config.items())) <NEW_LINE> <DEDENT> @tf_utils.shape_type_conversion <NEW_LINE> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return input_shape | Apply multiplicative 1-centered Gaussian noise.
As it is a regularization layer, it is only active at training time.
Arguments:
rate: float, drop probability (as with `Dropout`).
The multiplicative noise will have
standard deviation `sqrt(rate / (1 - rate))`.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input. | 62598f9f2c8b7c6e89bd35df |
class Customer(BaseModel): <NEW_LINE> <INDENT> customer_id = CharField(primary_key=True, max_length=30) <NEW_LINE> first_name = CharField(max_length=30) <NEW_LINE> last_name = CharField(max_length=40) <NEW_LINE> home_address = CharField(max_length=100) <NEW_LINE> phone_number = CharField(max_length=20) <NEW_LINE> email_address = CharField(max_length=100) <NEW_LINE> status = BooleanField() <NEW_LINE> credit_limit = FloatField() | This creates a customer model | 62598f9f3cc13d1c6d465584 |
class PluginsRegistry(collections.Mapping): <NEW_LINE> <INDENT> def __init__(self, plugins): <NEW_LINE> <INDENT> self._plugins = collections.OrderedDict(sorted(six.iteritems(plugins))) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_all(cls): <NEW_LINE> <INDENT> plugins = {} <NEW_LINE> entry_points = itertools.chain( pkg_resources.iter_entry_points( constants.SETUPTOOLS_PLUGINS_ENTRY_POINT), pkg_resources.iter_entry_points( constants.OLD_SETUPTOOLS_PLUGINS_ENTRY_POINT),) <NEW_LINE> for entry_point in entry_points: <NEW_LINE> <INDENT> plugin_ep = PluginEntryPoint(entry_point) <NEW_LINE> assert plugin_ep.name not in plugins, ( "PREFIX_FREE_DISTRIBUTIONS messed up") <NEW_LINE> if interfaces.IPluginFactory.providedBy(plugin_ep.plugin_cls): <NEW_LINE> <INDENT> plugins[plugin_ep.name] = plugin_ep <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warning( "%r does not provide IPluginFactory, skipping", plugin_ep) <NEW_LINE> <DEDENT> <DEDENT> return cls(plugins) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> return self._plugins[name] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._plugins) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._plugins) <NEW_LINE> <DEDENT> def init(self, config): <NEW_LINE> <INDENT> return [plugin_ep.init(config) for plugin_ep in six.itervalues(self._plugins)] <NEW_LINE> <DEDENT> def filter(self, pred): <NEW_LINE> <INDENT> return type(self)(dict((name, plugin_ep) for name, plugin_ep in six.iteritems(self._plugins) if pred(plugin_ep))) <NEW_LINE> <DEDENT> def visible(self): <NEW_LINE> <INDENT> return self.filter(lambda plugin_ep: not plugin_ep.hidden) <NEW_LINE> <DEDENT> def ifaces(self, *ifaces_groups): <NEW_LINE> <INDENT> return self.filter(lambda p_ep: p_ep.ifaces(*ifaces_groups)) <NEW_LINE> <DEDENT> def verify(self, ifaces): <NEW_LINE> <INDENT> return self.filter(lambda p_ep: p_ep.verify(ifaces)) <NEW_LINE> <DEDENT> def prepare(self): <NEW_LINE> <INDENT> return [plugin_ep.prepare() for plugin_ep in six.itervalues(self._plugins)] <NEW_LINE> <DEDENT> def available(self): <NEW_LINE> <INDENT> return self.filter(lambda p_ep: p_ep.available) <NEW_LINE> <DEDENT> def find_init(self, plugin): <NEW_LINE> <INDENT> candidates = [plugin_ep for plugin_ep in six.itervalues(self._plugins) if plugin_ep.initialized and plugin_ep.init() is plugin] <NEW_LINE> assert len(candidates) <= 1 <NEW_LINE> if candidates: <NEW_LINE> <INDENT> return candidates[0] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{0}({1})".format( self.__class__.__name__, ','.join( repr(p_ep) for p_ep in six.itervalues(self._plugins))) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if not self._plugins: <NEW_LINE> <INDENT> return "No plugins" <NEW_LINE> <DEDENT> return "\n\n".join(str(p_ep) for p_ep in six.itervalues(self._plugins)) | Plugins registry. | 62598f9f925a0f43d25e7e55 |
class CephPrefix(CephArgtype): <NEW_LINE> <INDENT> def __init__(self, prefix=''): <NEW_LINE> <INDENT> self.prefix = prefix <NEW_LINE> <DEDENT> def valid(self, s, partial=False): <NEW_LINE> <INDENT> if partial: <NEW_LINE> <INDENT> if self.prefix.startswith(s): <NEW_LINE> <INDENT> self.val = s <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if (s == self.prefix): <NEW_LINE> <INDENT> self.val = s <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> raise ArgumentPrefix("no match for {0}".format(s)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.prefix | CephPrefix: magic type for "all the first n fixed strings" | 62598f9f07f4c71912baf264 |
class Roles(IntEnum): <NEW_LINE> <INDENT> ADMIN = 10 <NEW_LINE> REGISTERED = 5 <NEW_LINE> UNREGISTERED = 1 <NEW_LINE> SIGNED_OUT = 0 | Enumaration of possible roles (access levels) | 62598f9f66673b3332c301e0 |
class DescribeAutoScalingInstancesResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.AutoScalingInstanceSet = None <NEW_LINE> self.TotalCount = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("AutoScalingInstanceSet") is not None: <NEW_LINE> <INDENT> self.AutoScalingInstanceSet = [] <NEW_LINE> for item in params.get("AutoScalingInstanceSet"): <NEW_LINE> <INDENT> obj = Instance() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.AutoScalingInstanceSet.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.TotalCount = params.get("TotalCount") <NEW_LINE> self.RequestId = params.get("RequestId") | DescribeAutoScalingInstances返回参数结构体
| 62598f9fadb09d7d5dc0a3a3 |
class WMS13GetMapTIFFDatasetTestCase(wmsbase.WMS13GetMapTestCase): <NEW_LINE> <INDENT> layers = ("mosaic_MER_FRS_1PNPDE20060822_092058_000001972050_00308_23408_0077_RGB_reduced",) <NEW_LINE> bbox = (8.5, 32.2, 25.4, 46.3) <NEW_LINE> frmt = "image/tiff" | Test a GetMap request with a dataset series. | 62598f9f76e4537e8c3ef3d0 |
class SiteWeight(models.Model): <NEW_LINE> <INDENT> site_type = models.ForeignKey(SiteType, related_name='weights') <NEW_LINE> sysclass = models.IntegerField(choices=[(1, "C1"), (2, "C2"), (3, "C3"), (4, "C4"), (5, "C5"), (6, "C6"), (7, "High Sec"), (8, "Low Sec"), (9, "Null Sec")]) <NEW_LINE> raw_points = models.IntegerField() | Represents the raw points available for a site type / system class combo | 62598f9f5f7d997b871f92ec |
class RegOrgUnitExternalID(models.Model): <NEW_LINE> <INDENT> org_unit = models.ForeignKey(RegOrgUnit) <NEW_LINE> identifier_type_id = models.CharField(max_length=4) <NEW_LINE> identifier_value = models.CharField(max_length=255, null=True) <NEW_LINE> is_void = models.BooleanField(default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'reg_org_units_external_ids' | Model for Organisational units external IDs. | 62598f9f0a50d4780f7051f3 |
class UserViewSet(ModelViewSet): <NEW_LINE> <INDENT> queryset = get_user_model().objects.filter(is_active=True) <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> filter_backends = (SearchFilter,OrderingFilter,DjangoFilterBackend) <NEW_LINE> filter_fields=('gender','is_admin') <NEW_LINE> search_fields = ('username', 'phone', 'email') <NEW_LINE> ordering_fields = ('update_time', 'create_time') <NEW_LINE> pagination_class = CustomLimitOffsetPagination <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> dept_id=self.request.query_params.get('dept_id',None) <NEW_LINE> if hasattr(dept_id,'isnumeric') and dept_id.isnumeric():dept_id = int(dept_id) <NEW_LINE> if dept_id and dept_id not in set(self.request.dept_ids): <NEW_LINE> <INDENT> raise ValidationError({'error':'您没有权限查看该分院下用户'}) <NEW_LINE> <DEDENT> if dept_id: <NEW_LINE> <INDENT> dept_id=Department.objects.get(id=dept_id).get_descendants(include_self=True).values_list('id',flat=True) <NEW_LINE> return get_user_model().objects.filter(department__in=dept_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return get_user_model().objects.filter(department__in=self.request.dept_ids) <NEW_LINE> <DEDENT> <DEDENT> @atomic() <NEW_LINE> def create(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return super().create(request,*args,**kwargs) <NEW_LINE> <DEDENT> @atomic() <NEW_LINE> def update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return super().update(request,*args,**kwargs) <NEW_LINE> <DEDENT> @atomic() <NEW_LINE> def destroy(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return super().destroy(request,*args,**kwargs) | 所有用户视图 | 62598f9fcc0a2c111447ae26 |
class ComputeVmPropertiesFragment(Model): <NEW_LINE> <INDENT> _attribute_map = { 'statuses': {'key': 'statuses', 'type': '[ComputeVmInstanceViewStatusFragment]'}, 'os_type': {'key': 'osType', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'network_interface_id': {'key': 'networkInterfaceId', 'type': 'str'}, 'os_disk_id': {'key': 'osDiskId', 'type': 'str'}, 'data_disk_ids': {'key': 'dataDiskIds', 'type': '[str]'}, 'data_disks': {'key': 'dataDisks', 'type': '[ComputeDataDiskFragment]'}, } <NEW_LINE> def __init__(self, statuses=None, os_type=None, vm_size=None, network_interface_id=None, os_disk_id=None, data_disk_ids=None, data_disks=None): <NEW_LINE> <INDENT> self.statuses = statuses <NEW_LINE> self.os_type = os_type <NEW_LINE> self.vm_size = vm_size <NEW_LINE> self.network_interface_id = network_interface_id <NEW_LINE> self.os_disk_id = os_disk_id <NEW_LINE> self.data_disk_ids = data_disk_ids <NEW_LINE> self.data_disks = data_disks | Properties of a virtual machine returned by the Microsoft.Compute API.
:param statuses: Gets the statuses of the virtual machine.
:type statuses: list of :class:`ComputeVmInstanceViewStatusFragment
<azure.mgmt.devtestlabs.models.ComputeVmInstanceViewStatusFragment>`
:param os_type: Gets the OS type of the virtual machine.
:type os_type: str
:param vm_size: Gets the size of the virtual machine.
:type vm_size: str
:param network_interface_id: Gets the network interface ID of the virtual
machine.
:type network_interface_id: str
:param os_disk_id: Gets OS disk blob uri for the virtual machine.
:type os_disk_id: str
:param data_disk_ids: Gets data disks blob uri for the virtual machine.
:type data_disk_ids: list of str
:param data_disks: Gets all data disks attached to the virtual machine.
:type data_disks: list of :class:`ComputeDataDiskFragment
<azure.mgmt.devtestlabs.models.ComputeDataDiskFragment>` | 62598f9fe1aae11d1e7ce731 |
class StackdriverDeleteNotificationChannelOperator(BaseOperator): <NEW_LINE> <INDENT> template_fields = ( 'name', 'impersonation_chain', ) <NEW_LINE> ui_color = "#e5ffcc" <NEW_LINE> @apply_defaults <NEW_LINE> def __init__( self, *, name: str, retry: Optional[str] = DEFAULT, timeout: Optional[float] = DEFAULT, metadata: Optional[str] = None, gcp_conn_id: str = 'google_cloud_default', project_id: Optional[str] = None, delegate_to: Optional[str] = None, impersonation_chain: Optional[Union[str, Sequence[str]]] = None, **kwargs, ) -> None: <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.retry = retry <NEW_LINE> self.timeout = timeout <NEW_LINE> self.metadata = metadata <NEW_LINE> self.gcp_conn_id = gcp_conn_id <NEW_LINE> self.project_id = project_id <NEW_LINE> self.delegate_to = delegate_to <NEW_LINE> self.impersonation_chain = impersonation_chain <NEW_LINE> self.hook = None <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> self.log.info('Delete Notification Channel: Project id: %s Name: %s', self.project_id, self.name) <NEW_LINE> if self.hook is None: <NEW_LINE> <INDENT> self.hook = StackdriverHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, impersonation_chain=self.impersonation_chain, ) <NEW_LINE> <DEDENT> self.hook.delete_notification_channel( name=self.name, retry=self.retry, timeout=self.timeout, metadata=self.metadata ) | Deletes a notification channel.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverDeleteNotificationChannelOperator`
:param name: The alerting policy to delete. The format is:
``projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]``.
:type name: str
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project from which notification channel needs to be deleted.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]] | 62598f9f462c4b4f79dbb825 |
class Resource(resource.Resource): <NEW_LINE> <INDENT> def __init__(self, *args, get=None, put=None, post=None, delete=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> if get is not None: <NEW_LINE> <INDENT> self.render_get = get <NEW_LINE> <DEDENT> if put is not None: <NEW_LINE> <INDENT> self.render_put = put <NEW_LINE> <DEDENT> if post is not None: <NEW_LINE> <INDENT> self.render_post = post <NEW_LINE> <DEDENT> if delete is not None: <NEW_LINE> <INDENT> self.render_delete = delete | Generic resource class | 62598f9f097d151d1a2c0e42 |
class SectionListHeader(SectionList): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.header = {} <NEW_LINE> <DEDENT> def read_lines(self, lines): <NEW_LINE> <INDENT> sections = [] <NEW_LINE> for line_no, line in enumerate(lines): <NEW_LINE> <INDENT> if ( re.sub(r"^\s*|\s*$", "", line) == "Primary Contact" or re.sub(r"^\s*|\s*$", "", line) == "Secondary Contact" ): <NEW_LINE> <INDENT> sections.append(line_no) <NEW_LINE> <DEDENT> <DEDENT> self.header = Section() <NEW_LINE> self.header.read_lines(lines[0 : sections[0]] + lines[-3:]) <NEW_LINE> for sec_no, subsection in enumerate(sections): <NEW_LINE> <INDENT> s = self.subsection_type() <NEW_LINE> if subsection == sections[-1]: <NEW_LINE> <INDENT> s.read_lines(lines[subsection:]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s.read_lines(lines[subsection : sections[sec_no + 1]]) <NEW_LINE> <DEDENT> if self.section_type == "subsectionheader": <NEW_LINE> <INDENT> s.subtitle = str(sec_no + 1) <NEW_LINE> <DEDENT> self._subsections.append(s) | class for sections with subsections (Primary and secondary contacts)
and header values | 62598f9fcc0a2c111447ae27 |
class TestInspector(GitSweepTestCase, InspectorTestCase): <NEW_LINE> <INDENT> def test_no_branches(self): <NEW_LINE> <INDENT> self.assertEqual([], self.inspector.merged_refs()) <NEW_LINE> <DEDENT> def test_filtered_refs(self): <NEW_LINE> <INDENT> for i in range(1, 4): <NEW_LINE> <INDENT> self.command('git checkout -b branch{0}'.format(i)) <NEW_LINE> self.command('git checkout master') <NEW_LINE> <DEDENT> refs = self.inspector._filtered_remotes( self.inspector.repo.remotes[0]) <NEW_LINE> self.assertEqual(['branch1', 'branch2', 'branch3'], [i.remote_head for i in refs]) <NEW_LINE> <DEDENT> def test_one_branch_no_commits(self): <NEW_LINE> <INDENT> self.command('git checkout -b branch1') <NEW_LINE> self.command('git checkout master') <NEW_LINE> self.assertEqual(['branch1'], self.merged_refs()) <NEW_LINE> <DEDENT> def test_one_branch_one_commit(self): <NEW_LINE> <INDENT> self.command('git checkout -b branch1') <NEW_LINE> self.make_commit() <NEW_LINE> self.command('git checkout master') <NEW_LINE> self.assertEqual([], self.merged_refs()) <NEW_LINE> <DEDENT> def test_one_merged_branch(self): <NEW_LINE> <INDENT> self.command('git checkout -b branch1') <NEW_LINE> self.make_commit() <NEW_LINE> self.command('git checkout master') <NEW_LINE> self.command('git merge branch1') <NEW_LINE> self.assertEqual(['branch1'], self.merged_refs()) <NEW_LINE> <DEDENT> def test_commit_in_master(self): <NEW_LINE> <INDENT> self.command('git checkout -b branch1') <NEW_LINE> self.make_commit() <NEW_LINE> self.command('git checkout master') <NEW_LINE> self.make_commit() <NEW_LINE> self.command('git merge branch1') <NEW_LINE> self.assertEqual(['branch1'], self.merged_refs()) <NEW_LINE> <DEDENT> def test_large_set_of_changes(self): <NEW_LINE> <INDENT> for i in range(1, 6): <NEW_LINE> <INDENT> self.command('git checkout -b branch{0}'.format(i)) <NEW_LINE> self.make_commit() <NEW_LINE> self.command('git checkout master') <NEW_LINE> self.make_commit() <NEW_LINE> self.command('git merge branch{0}'.format(i)) <NEW_LINE> <DEDENT> self.assertEqual( ['branch1', 'branch2', 'branch3', 'branch4', 'branch5'], self.merged_refs()) | Inspector can find merged branches and present them for cleaning. | 62598f9f67a9b606de545de3 |
class HealthOfficerRegisterSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = HealthOfficer <NEW_LINE> fields = ( 'user', ) <NEW_LINE> def create(self,validated_data): <NEW_LINE> <INDENT> new_health_officer = HealthOfficer( user = validated_data['user'], ) <NEW_LINE> new_health_officer.save() <NEW_LINE> return new_health_officer | Description: SERializer to be used during the registering of a health officer. | 62598f9ffff4ab517ebcd608 |
class PendingQuestTable(tables.Table): <NEW_LINE> <INDENT> owner = UserColumn(accessor='relation.owner') <NEW_LINE> title = tables.LinkColumn('quests:detail', args=[A('pk')]) <NEW_LINE> description = DescriptionColumn() <NEW_LINE> rating = RatingColumn() <NEW_LINE> accept = AcceptColumn(accessor="pk", orderable=False) <NEW_LINE> decline = DeclineColumn(accessor="pk", orderable=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Quest <NEW_LINE> attrs = {"class": "paleblue"} <NEW_LINE> sequence = ("title", "description", "...", "owner", "accept", "decline") <NEW_LINE> fields = ("title", "description", "rating") | Table layout for showing quests pending for a user. | 62598f9f3eb6a72ae038a45b |
class LoginFormMiddleware(MiddlewareMixin): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> if request.method == 'POST' and 'login-modal' in request.POST: <NEW_LINE> <INDENT> form = AuthenticationForm(data=request.POST, prefix="login") <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> from django.contrib.auth import login <NEW_LINE> login(request, form.get_user()) <NEW_LINE> <DEDENT> request.method = 'GET' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> form = AuthenticationForm(request, prefix="login") <NEW_LINE> <DEDENT> request.login_form = form | Middleware to load login form on every page | 62598f9f57b8e32f52508029 |
class ParameterStore: <NEW_LINE> <INDENT> def __init__(self, region, role): <NEW_LINE> <INDENT> self.client = role.client('ssm', region_name=region, config=SSM_CONFIG) <NEW_LINE> <DEDENT> def put_parameter(self, name, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> current_value = self.fetch_parameter(name) <NEW_LINE> assert current_value == value <NEW_LINE> LOGGER.debug('No need to update parameter %s with value %s since they are the same', name, value) <NEW_LINE> <DEDENT> except (ParameterNotFoundError, AssertionError): <NEW_LINE> <INDENT> LOGGER.debug('Putting SSM Parameter %s with value %s', name, value) <NEW_LINE> self.client.put_parameter( Name=name, Description=PARAMETER_DESCRIPTION, Value=value, Type='String', Overwrite=True ) <NEW_LINE> <DEDENT> <DEDENT> def delete_parameter(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> LOGGER.debug('Deleting Parameter %s', name) <NEW_LINE> return self.client.delete_parameter( Name=name ) <NEW_LINE> <DEDENT> except self.client.exceptions.ParameterNotFound: <NEW_LINE> <INDENT> LOGGER.debug('Attempted to delete Parameter %s but it was not found', name) <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> def fetch_parameters_by_path(self, path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> LOGGER.debug('Fetching Parameters from path %s', path) <NEW_LINE> return paginator(self.client.get_parameters_by_path, Path=path, Recursive=True, WithDecryption=False ) <NEW_LINE> <DEDENT> except self.client.exceptions.ParameterNotFound: <NEW_LINE> <INDENT> raise ParameterNotFoundError( 'Parameter Path {0} Not Found'.format(path) ) <NEW_LINE> <DEDENT> <DEDENT> def fetch_parameter(self, name, with_decryption=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> LOGGER.debug('Fetching Parameter %s', name) <NEW_LINE> response = self.client.get_parameter( Name=name, WithDecryption=with_decryption ) <NEW_LINE> return response['Parameter']['Value'] <NEW_LINE> <DEDENT> except self.client.exceptions.ParameterNotFound: <NEW_LINE> <INDENT> raise ParameterNotFoundError( 'Parameter {0} Not Found'.format(name) ) | Class used for modeling Parameters
| 62598f9f24f1403a926857bf |
class PlaceOrderView(LoginRequiredMixin,View): <NEW_LINE> <INDENT> def post(self,request): <NEW_LINE> <INDENT> sku_ids = request.POST.getlist('sku_ids') <NEW_LINE> count = request.POST.get('count') <NEW_LINE> if not sku_ids: <NEW_LINE> <INDENT> return redirect(reverse('cart:info')) <NEW_LINE> <DEDENT> skus = [] <NEW_LINE> total_sku_amount = 0 <NEW_LINE> total_count = 0 <NEW_LINE> trans_cost = 10 <NEW_LINE> if count is None: <NEW_LINE> <INDENT> redis_conn = get_redis_connection('default') <NEW_LINE> user_id = request.user.id <NEW_LINE> cart_dict = redis_conn.hgetall('cart_%s' % user_id) <NEW_LINE> for sku_id in sku_ids: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sku =GoodsSKU.objects.get(id=sku_id) <NEW_LINE> <DEDENT> except GoodsSKU.DoesNotExist: <NEW_LINE> <INDENT> return redirect(reverse('cart:info')) <NEW_LINE> <DEDENT> sku_count = cart_dict[sku_id.encode()] <NEW_LINE> sku_count = int(sku_count) <NEW_LINE> amount = sku_count * sku.price <NEW_LINE> sku.count = sku_count <NEW_LINE> sku.amount = amount <NEW_LINE> skus.append(sku) <NEW_LINE> total_count += sku_count <NEW_LINE> total_sku_amount += amount <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for sku_id in sku_ids: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sku = GoodsSKU.objects.get(id=sku_id) <NEW_LINE> <DEDENT> except GoodsSKU.DoesNotExist: <NEW_LINE> <INDENT> return redirect(reverse('cart:info')) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> sku_count = int(count) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return redirect(reverse('goods:detail',args=(sku_id,))) <NEW_LINE> <DEDENT> if sku_count >sku.stock: <NEW_LINE> <INDENT> return redirect(reverse('goods:detail', args=(sku_id,))) <NEW_LINE> <DEDENT> amount = sku_count * sku.price <NEW_LINE> sku.count = sku_count <NEW_LINE> sku.amount = amount <NEW_LINE> skus.append(sku) <NEW_LINE> total_count += sku_count <NEW_LINE> total_sku_amount += amount <NEW_LINE> <DEDENT> <DEDENT> total_amount = total_sku_amount + trans_cost <NEW_LINE> try: <NEW_LINE> <INDENT> address = Address.objects.filter(user=request.user).latest('create_time') <NEW_LINE> <DEDENT> except Address.DoesNotExist: <NEW_LINE> <INDENT> address =None <NEW_LINE> <DEDENT> context={ 'skus':skus, 'total_amount':total_amount, 'total_count':total_count, 'total_sku_amount':total_sku_amount, 'address':address, 'sku_ids':sku_ids, 'trans_cost':trans_cost } <NEW_LINE> return render(request,'place_order.html', context) | 的订单确认 | 62598f9f7b25080760ed72c2 |
class EmailView(UpdateAPIView): <NEW_LINE> <INDENT> permission_classes = [IsAuthenticated] <NEW_LINE> serializer_class = EmailSerializer <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> return self.request.user | PUT /users/emails
保存邮箱 | 62598f9fb7558d5895463448 |
class ImageCodeCheckSerializer(serializers.Serializer): <NEW_LINE> <INDENT> image_code_id=serializers.UUIDField() <NEW_LINE> text=serializers.CharField(max_length=4,min_length=4) <NEW_LINE> def validate(self, attrs): <NEW_LINE> <INDENT> image_code_id = attrs['image_code_id'] <NEW_LINE> text = attrs['text'] <NEW_LINE> redis_conn=get_redis_connection('verify_codes') <NEW_LINE> image_code_server=redis_conn.get('img_%s' % image_code_id) <NEW_LINE> if image_code_server is None: <NEW_LINE> <INDENT> raise serializers.ValidationError('无效图片验证码') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> redis_conn.delete('img_%s' % image_code_id) <NEW_LINE> <DEDENT> except RedisError as e: <NEW_LINE> <INDENT> logger.error(e) <NEW_LINE> <DEDENT> image_code_server=image_code_server.decode() <NEW_LINE> if text.lower() != image_code_server.lower(): <NEW_LINE> <INDENT> raise serializers.ValidationError('输入图片验证码有误') <NEW_LINE> <DEDENT> mobile = self.context['view'].kwargs['mobile'] <NEW_LINE> send_flag= redis_conn.get('send_flag_%s' % mobile) <NEW_LINE> if send_flag: <NEW_LINE> <INDENT> raise serializers.ValidationError('发送短信验证码过于频繁') <NEW_LINE> <DEDENT> return attrs | 图片验证码序列化器 | 62598f9f99cbb53fe6830cec |
class TokenCertificate(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'expiry': {'key': 'expiry', 'type': 'iso-8601'}, 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, 'encoded_pem_certificate': {'key': 'encodedPemCertificate', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, name: Optional[Union[str, "TokenCertificateName"]] = None, expiry: Optional[datetime.datetime] = None, thumbprint: Optional[str] = None, encoded_pem_certificate: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(TokenCertificate, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.expiry = expiry <NEW_LINE> self.thumbprint = thumbprint <NEW_LINE> self.encoded_pem_certificate = encoded_pem_certificate | The properties of a certificate used for authenticating a token.
:ivar name: Possible values include: "certificate1", "certificate2".
:vartype name: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenCertificateName
:ivar expiry: The expiry datetime of the certificate.
:vartype expiry: ~datetime.datetime
:ivar thumbprint: The thumbprint of the certificate.
:vartype thumbprint: str
:ivar encoded_pem_certificate: Base 64 encoded string of the public certificate1 in PEM format
that will be used for authenticating the token.
:vartype encoded_pem_certificate: str | 62598f9f379a373c97d98e30 |
class PositionalEncoding(nn.Module): <NEW_LINE> <INDENT> def __init__(self, d_model, max_len=10000, dropout=0.1): <NEW_LINE> <INDENT> super(PositionalEncoding, self).__init__() <NEW_LINE> if d_model % 2 != 0: <NEW_LINE> <INDENT> d_model += 1 <NEW_LINE> <DEDENT> pe = torch.zeros([max_len, d_model]) <NEW_LINE> pos = torch.arange(0, max_len).unsqueeze(1) <NEW_LINE> i = torch.arange(0, d_model, 2) <NEW_LINE> denom = torch.pow(10000, i / d_model) <NEW_LINE> pe[:, 0::2] = torch.sin(pos / denom) <NEW_LINE> pe[:, 1::2] = torch.cos(pos / denom) <NEW_LINE> self.register_buffer('pe', pe) <NEW_LINE> self.dropout = nn.Dropout(dropout) <NEW_LINE> <DEDENT> def forward(self, seq): <NEW_LINE> <INDENT> seq = seq + self.pe[:seq.shape[1], :seq.shape[2]] <NEW_LINE> return self.dropout(seq) | Encode relative positional information into the input sequence tensor
Attributes
----------
pe : 2d tensor (max_len, d_model - if even OR d_model + 1 - if odd) | 62598f9fd268445f26639a90 |
class button_manuel(Frame): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> Frame.__init__(self, parent, width=768, height=576) <NEW_LINE> self.parent = parent <NEW_LINE> """def Button""" <NEW_LINE> self.Button_open = tkinter.Button(self, text ="OPEN", command = self.parent.open_file) <NEW_LINE> self.Button_open.pack() <NEW_LINE> self.pack(side=LEFT) <NEW_LINE> <DEDENT> """rq : a proteger""" <NEW_LINE> def display_button(self): <NEW_LINE> <INDENT> self.Button_BR1 = tkinter.Button(self, text ="BR1", command = self.Button_BR1) <NEW_LINE> self.Button_BR1.pack() <NEW_LINE> self.Button_BR2 = tkinter.Button(self, text ="BR2", command = self.Button_BR2) <NEW_LINE> self.Button_BR2.pack() <NEW_LINE> self.Button_BR3 = tkinter.Button(self, text ="BR3", command = self.Button_BR3) <NEW_LINE> self.Button_BR3.pack() <NEW_LINE> self.Button_save = tkinter.Button(self, text ="SAVE", command = self.Button_save) <NEW_LINE> self.Button_save.pack() <NEW_LINE> self.Button_open.destroy() <NEW_LINE> <DEDENT> def Button_BR1(self) : <NEW_LINE> <INDENT> self.parent.visual_feedback.current_rect = "BR1" <NEW_LINE> <DEDENT> def Button_BR2(self) : <NEW_LINE> <INDENT> self.parent.visual_feedback.current_rect = "BR2" <NEW_LINE> <DEDENT> def Button_BR3(self) : <NEW_LINE> <INDENT> self.parent.visual_feedback.current_rect = "BR3" <NEW_LINE> <DEDENT> def Button_save(self): <NEW_LINE> <INDENT> print("SAVING : ") <NEW_LINE> file = open("config/config_crop_BR.txt", "w") <NEW_LINE> file.write("Temporium" + "\n") <NEW_LINE> file.write("CONFIGURATION_CROP : BR " + "\n") <NEW_LINE> for item in self.parent.visual_feedback.dict_rect : <NEW_LINE> <INDENT> msg = str(self.parent.visual_feedback.get_rect_image(item)).replace("[","") <NEW_LINE> msg = msg.replace("]", "") <NEW_LINE> file.write(item + " : " + msg + "\n") <NEW_LINE> print(item + " : " + msg) <NEW_LINE> <DEDENT> file.flush() <NEW_LINE> file.close() | Notre fenetre principale.
Tous les widgets sont stockes comme attributs de cette fenetre. | 62598f9f435de62698e9bc0d |
class CaseEdit(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Case edit") <NEW_LINE> verbose_name_plural = _("Case edits") <NEW_LINE> <DEDENT> case = models.ForeignKey(Case, related_name='edits') <NEW_LINE> TYPE_MIGRATION_URL = 'migration-url' <NEW_LINE> TYPE_MIGRATION_REVIEWED = 'migration-reviewed' <NEW_LINE> TYPE_MIGRATION_UNREVIEWED = 'migration-unreviewed' <NEW_LINE> TYPE_MIGRATION_REPORT = 'migration-report' <NEW_LINE> TYPE_DEPLOYMENT_REPORT = 'deployment-report' <NEW_LINE> TYPE_CHOICES = ( (TYPE_MIGRATION_URL, _('Migration URL')), (TYPE_MIGRATION_REVIEWED, _('Migration reviewed')), (TYPE_MIGRATION_UNREVIEWED, _('Migration unreviewed')), (TYPE_MIGRATION_REPORT, _('Migration report')), (TYPE_DEPLOYMENT_REPORT, _('Deployment report')), ) <NEW_LINE> type = models.CharField(max_length=50, choices=TYPE_CHOICES) <NEW_LINE> params = JSONField(default=None) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '{self.case}: {self.type}: {self.params}'.format(self=self) | Bug tracking system case edit. | 62598f9fdd821e528d6d8d4f |
@pytest.mark.django_db <NEW_LINE> class TestArmy(): <NEW_LINE> <INDENT> def test_natural_key(self, army): <NEW_LINE> <INDENT> assert army.natural_key() == (army.name,) | Unit tests for the ``Army`` model. | 62598f9f3cc13d1c6d465586 |
class V1TypedLocalObjectReference(object): <NEW_LINE> <INDENT> openapi_types = { 'api_group': 'str', 'kind': 'str', 'name': 'str' } <NEW_LINE> attribute_map = { 'api_group': 'apiGroup', 'kind': 'kind', 'name': 'name' } <NEW_LINE> def __init__(self, api_group=None, kind=None, name=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration.get_default_copy() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._api_group = None <NEW_LINE> self._kind = None <NEW_LINE> self._name = None <NEW_LINE> self.discriminator = None <NEW_LINE> if api_group is not None: <NEW_LINE> <INDENT> self.api_group = api_group <NEW_LINE> <DEDENT> self.kind = kind <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def api_group(self): <NEW_LINE> <INDENT> return self._api_group <NEW_LINE> <DEDENT> @api_group.setter <NEW_LINE> def api_group(self, api_group): <NEW_LINE> <INDENT> self._api_group = api_group <NEW_LINE> <DEDENT> @property <NEW_LINE> def kind(self): <NEW_LINE> <INDENT> return self._kind <NEW_LINE> <DEDENT> @kind.setter <NEW_LINE> def kind(self, kind): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and kind is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `kind`, must not be `None`") <NEW_LINE> <DEDENT> self._kind = kind <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and name is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `name`, must not be `None`") <NEW_LINE> <DEDENT> self._name = name <NEW_LINE> <DEDENT> def to_dict(self, serialize=False): <NEW_LINE> <INDENT> result = {} <NEW_LINE> def convert(x): <NEW_LINE> <INDENT> if hasattr(x, "to_dict"): <NEW_LINE> <INDENT> args = getfullargspec(x.to_dict).args <NEW_LINE> if len(args) == 1: <NEW_LINE> <INDENT> return x.to_dict() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return x.to_dict(serialize) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> <DEDENT> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> attr = self.attribute_map.get(attr, attr) if serialize else attr <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: convert(x), value )) <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], convert(item[1])), value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = convert(value) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1TypedLocalObjectReference): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1TypedLocalObjectReference): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict() | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 62598f9f9b70327d1c57ebb9 |
class PerspectiveAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ['name'] | Perspective backend definition | 62598f9f442bda511e95c276 |
class PacketFilterRelation(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.PacketFilterConfig = None <NEW_LINE> self.InstanceDetailList = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("PacketFilterConfig") is not None: <NEW_LINE> <INDENT> self.PacketFilterConfig = PacketFilterConfig() <NEW_LINE> self.PacketFilterConfig._deserialize(params.get("PacketFilterConfig")) <NEW_LINE> <DEDENT> if params.get("InstanceDetailList") is not None: <NEW_LINE> <INDENT> self.InstanceDetailList = [] <NEW_LINE> for item in params.get("InstanceDetailList"): <NEW_LINE> <INDENT> obj = InstanceRelation() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.InstanceDetailList.append(obj) <NEW_LINE> <DEDENT> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | 特征过滤相关信息
| 62598f9f01c39578d7f12b98 |
class UserProfile(BaseHandler): <NEW_LINE> <INDENT> @asynchronous <NEW_LINE> @coroutine <NEW_LINE> def post(self, *_args, **_kwargs): <NEW_LINE> <INDENT> _params = self.check_auth(2) <NEW_LINE> if not _params: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> args = self.parse_json_arguments(name=ENFORCED) <NEW_LINE> exists_result = tasks.query_username_exists(username=args.name) <NEW_LINE> if exists_result: <NEW_LINE> <INDENT> return self.fail(3004) <NEW_LINE> <DEDENT> tasks.update_user_name(user_id=_params.user_id, username=args.name) <NEW_LINE> _params.add('user_name', args.name) <NEW_LINE> self.set_parameters(_params[0]) <NEW_LINE> self.success() | Handler account info stuff. | 62598f9f32920d7e50bc5e70 |
class ArgScheme(object): <NEW_LINE> <INDENT> def type(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def serialize_header(self, obj): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def deserialize_header(self, obj): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def serialize_body(self, obj): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def deserialize_body(self, obj): <NEW_LINE> <INDENT> raise NotImplementedError() | ArgScheme defines interface on how to serialize/deserialize
header and body.
Customized ArgScheme subclass must implement all methods::
def type()
def serialize_header(obj)
def deserialize_header(obj)
def serialize_body(obj)
def deserialize_body(obj) | 62598f9f8e71fb1e983bb8d1 |
class ProjectDefinitionTest(test_lib.BaseTestCase): <NEW_LINE> <INDENT> def testIsPython2Only(self): <NEW_LINE> <INDENT> project_definition = projects.ProjectDefinition('test') <NEW_LINE> result = project_definition.IsPython2Only() <NEW_LINE> self.assertFalse(result) | Tests for the project definition. | 62598f9f4a966d76dd5eecfc |
class CustomEncoder(json.JSONEncoder): <NEW_LINE> <INDENT> def default(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, complex): <NEW_LINE> <INDENT> return OrderedDict([ ("__class__", "complex"), ("real", obj.real), ("imag", obj.imag), ]) <NEW_LINE> <DEDENT> if isinstance(obj, Fraction): <NEW_LINE> <INDENT> return OrderedDict([ ("__class__", "Fraction"), ("numerator", obj.numerator), ("denominator", obj.denominator), ]) <NEW_LINE> <DEDENT> if isinstance(obj, BoundaryCondition): <NEW_LINE> <INDENT> return obj.p <NEW_LINE> <DEDENT> return super(CustomEncoder, self).default(obj) | This custom JSON encoder can handle complex and Fraction and BoundaryCondition types.
| 62598f9f1b99ca400228f43b |
class DotTicker: <NEW_LINE> <INDENT> def __init__(self, pattern='.', max_length=3): <NEW_LINE> <INDENT> self.length = 0 <NEW_LINE> self.pattern = pattern <NEW_LINE> self.max_length = max_length <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> result = self.pattern * self.length <NEW_LINE> self.tick() <NEW_LINE> return result <NEW_LINE> <DEDENT> def __format__(self, format_spec): <NEW_LINE> <INDENT> return str(self).__format__(format_spec) <NEW_LINE> <DEDENT> def tick(self): <NEW_LINE> <INDENT> if self.length < self.max_length: <NEW_LINE> <INDENT> self.length += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.length = 0 | Simple throbber-like object for showing unknown amounts of progress | 62598f9f56b00c62f0fb26cb |
class AccountPaymentSepaEsTestCase(ModuleTestCase): <NEW_LINE> <INDENT> module = 'account_payment_sepa_es' <NEW_LINE> @with_transaction() <NEW_LINE> def test_sepa_identifier(self): <NEW_LINE> <INDENT> pool = Pool() <NEW_LINE> Party = pool.get('party.party') <NEW_LINE> Identifier = pool.get('party.identifier') <NEW_LINE> party = Party(name='test') <NEW_LINE> party.save() <NEW_LINE> Party.calculate_sepa_creditor_identifier([party]) <NEW_LINE> self.assertIsNone(party.sepa_creditor_identifier_used) <NEW_LINE> sepa = Identifier(party=party, code='ES47690558N', type='eu_at_02') <NEW_LINE> sepa.save() <NEW_LINE> Party.calculate_sepa_creditor_identifier([party]) <NEW_LINE> self.assertEqual(party.sepa_creditor_identifier_used, 'ES47690558N') <NEW_LINE> <DEDENT> @with_transaction() <NEW_LINE> def test_sepa_identifier_used(self): <NEW_LINE> <INDENT> pool = Pool() <NEW_LINE> Party = pool.get('party.party') <NEW_LINE> Identifier = pool.get('party.identifier') <NEW_LINE> party = Party(name='test') <NEW_LINE> party.save() <NEW_LINE> sepa = Identifier(party=party, code='ES23ZZZ47690558N', type='eu_at_02') <NEW_LINE> sepa.save() <NEW_LINE> self.assertEqual(party.sepa_creditor_identifier_used, 'ES23ZZZ47690558N') <NEW_LINE> with Transaction().set_context(kind='receivable', suffix='001'): <NEW_LINE> <INDENT> party = Party(party.id) <NEW_LINE> self.assertEqual(party.sepa_creditor_identifier_used, 'ES2300147690558N') <NEW_LINE> <DEDENT> with Transaction().set_context(kind='payable', suffix='001'): <NEW_LINE> <INDENT> party = Party(party.id) <NEW_LINE> self.assertEqual(party.sepa_creditor_identifier_used, '47690558N001') | Test Account Payment Sepa Es module | 62598f9f76e4537e8c3ef3d2 |
class Trace(object): <NEW_LINE> <INDENT> def __init__(self, points, code, pair, net): <NEW_LINE> <INDENT> self.points = points <NEW_LINE> self.code = code <NEW_LINE> self.pseudoPair = None <NEW_LINE> self.net = net | Trace objects repesent the generated connections between routed pins.
It is classified as one of two types: 1.principal trace, which connects
either a net of size 2 or could be the main connection made to route
a net of 3+. 2. auxiliary trace, which connects a pin to a principal trace
in a net of 3+.
points -- points that make up the points on lines of the trace. | 62598f9f21a7993f00c65d9e |
class Patologia(models.Model): <NEW_LINE> <INDENT> contenedora = models.ForeignKey("rubricas.Patologia", null=True, on_delete=models.SET_NULL) <NEW_LINE> codigo = models.CharField(max_length=20, unique=True, blank=False) <NEW_LINE> nombre = models.CharField(max_length=150, unique=True, blank=False) <NEW_LINE> descripcion = models.TextField(null=True, blank=True, default="") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.codigo + ": " + self.nombre <NEW_LINE> <DEDENT> def buscar_por_codigo(codigo: str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return Patologia.objects.get(codigo=codigo) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None | Esta clase sirve para representar una patología que se estudie en uno o muchos cursos | 62598f9fbaa26c4b54d4f0ca |
class Bfa(): <NEW_LINE> <INDENT> def __init__(self, conjunto="abc"): <NEW_LINE> <INDENT> self.__conjunto = conjunto <NEW_LINE> <DEDENT> def subConjuntos(self, s): <NEW_LINE> <INDENT> self.subConjuntosAux("", s) <NEW_LINE> <DEDENT> def subConjuntosAux(self, respuesta, pregunta): <NEW_LINE> <INDENT> if len(pregunta) == 0: <NEW_LINE> <INDENT> print(respuesta) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.subConjuntosAux(respuesta, pregunta[1:]) <NEW_LINE> self.subConjuntosAux(respuesta+pregunta[0], pregunta[1:]) <NEW_LINE> <DEDENT> <DEDENT> def permutaciones(self, s): <NEW_LINE> <INDENT> self.permutacionesAux("", s) <NEW_LINE> <DEDENT> def permutacionesAux(self, respuesta, pregunta): <NEW_LINE> <INDENT> if len(pregunta) == 0: <NEW_LINE> <INDENT> print(respuesta) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(len(pregunta)): <NEW_LINE> <INDENT> self.permutacionesAux(respuesta + pregunta[i], pregunta[0:i] + pregunta[i+1:]) | Implementacion de fuerza bruta
(Brutal force algoritmo) | 62598f9f44b2445a339b687b |
class TableCell(Element): <NEW_LINE> <INDENT> __slots__ = ['_content', 'alignment', 'rowspan', 'colspan', 'identifier', 'classes', 'attributes'] <NEW_LINE> _children = ['content'] <NEW_LINE> def __init__(self, *args, alignment='AlignDefault', rowspan=1, colspan=1, identifier='', classes=[], attributes={}): <NEW_LINE> <INDENT> self._set_ica(identifier, classes, attributes) <NEW_LINE> self._set_content(args, Block) <NEW_LINE> self.alignment = check_group(alignment, TABLE_ALIGNMENT) <NEW_LINE> self.rowspan = rowspan <NEW_LINE> self.colspan = colspan <NEW_LINE> if (self.rowspan <= 0): <NEW_LINE> <INDENT> raise TypeError('Cell rowspan must be positive') <NEW_LINE> <DEDENT> if (self.colspan <= 0): <NEW_LINE> <INDENT> raise TypeError('Cell colspan must be positive') <NEW_LINE> <DEDENT> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return [self._ica_to_json(), {'t': self.alignment}, self.rowspan, self.colspan, self.content.to_json()] | Table Cell
:param args: elements
:type args: :class:`Block`
:param alignment: row alignment
(either 'AlignLeft', 'AlignRight', 'AlignCenter' or 'AlignDefault').
:type alignment: :class:`str`
:param rowspan: number of rows occupied by a cell (height of a cell)
:type rowspan: :class:`int`
:param colspan: number of columns occupied by a cell (width of a cell)
:type colspan: :class:`int`
:param identifier: element identifier (usually unique)
:type identifier: :class:`str`
:param classes: class names of the element
:type classes: :class:`list` of :class:`str`
:param attributes: additional attributes
:type attributes: :class:`dict`
:Base: :class:`Element`
| 62598f9ffbf16365ca793ed5 |
class Tool(benchexec.tools.template.BaseTool2): <NEW_LINE> <INDENT> REQUIRED_PATHS = ["bin", "lib"] <NEW_LINE> def executable(self, tool_locator): <NEW_LINE> <INDENT> return tool_locator.find_executable("brick", subdir="bin") <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return "BRICK" <NEW_LINE> <DEDENT> def cmdline(self, executable, options, task, rlimits): <NEW_LINE> <INDENT> data_model_param = get_data_model_from_task(task, {ILP32: "--32", LP64: "--64"}) <NEW_LINE> if data_model_param and data_model_param not in options: <NEW_LINE> <INDENT> options += [data_model_param] <NEW_LINE> <DEDENT> return [executable] + options + list(task.input_files_or_identifier) <NEW_LINE> <DEDENT> def version(self, executable): <NEW_LINE> <INDENT> return self._version_from_tool(executable, arg="--version") <NEW_LINE> <DEDENT> def program_files(self, executable): <NEW_LINE> <INDENT> paths = self.REQUIRED_PATHS <NEW_LINE> return [executable] + self._program_files_from_executable( executable, paths, parent_dir=True ) <NEW_LINE> <DEDENT> def determine_result(self, run): <NEW_LINE> <INDENT> status = result.RESULT_ERROR <NEW_LINE> for line in run.output: <NEW_LINE> <INDENT> if line == "VERIFICATION SUCCESSFUL": <NEW_LINE> <INDENT> status = result.RESULT_TRUE_PROP <NEW_LINE> break <NEW_LINE> <DEDENT> elif line == "VERIFICATION FAILED": <NEW_LINE> <INDENT> status = result.RESULT_FALSE_REACH <NEW_LINE> break <NEW_LINE> <DEDENT> elif line == "VERIFICATION UNKNOWN" or line == "VERIFICATION BOUNDED TRUE": <NEW_LINE> <INDENT> status = result.RESULT_UNKNOWN <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return status | Tool info for BRICK
https://github.com/brick-tool-dev/brick-tool | 62598f9f91af0d3eaad39c27 |
class KeyValueType (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_MIXED <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'KeyValueType') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/wssplat/schemas/ds.xsd', 164, 2) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> __DSAKeyValue = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'DSAKeyValue'), 'DSAKeyValue', '__httpwww_w3_org200009xmldsig_KeyValueType_httpwww_w3_org200009xmldsigDSAKeyValue', False, pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/wssplat/schemas/ds.xsd', 289, 0), ) <NEW_LINE> DSAKeyValue = property(__DSAKeyValue.value, __DSAKeyValue.set, None, None) <NEW_LINE> __RSAKeyValue = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'RSAKeyValue'), 'RSAKeyValue', '__httpwww_w3_org200009xmldsig_KeyValueType_httpwww_w3_org200009xmldsigRSAKeyValue', False, pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/wssplat/schemas/ds.xsd', 306, 0), ) <NEW_LINE> RSAKeyValue = property(__RSAKeyValue.value, __RSAKeyValue.set, None, None) <NEW_LINE> _HasWildcardElement = True <NEW_LINE> _ElementMap.update({ __DSAKeyValue.name() : __DSAKeyValue, __RSAKeyValue.name() : __RSAKeyValue }) <NEW_LINE> _AttributeMap.update({ }) | Complex type {http://www.w3.org/2000/09/xmldsig#}KeyValueType with content type MIXED | 62598f9f596a897236127a97 |
class FieldDescriptorPacket(MysqlPacket): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> MysqlPacket.__init__(self, *args) <NEW_LINE> self.__parse_field_descriptor() <NEW_LINE> <DEDENT> def __parse_field_descriptor(self): <NEW_LINE> <INDENT> self.catalog = self.read_length_coded_string() <NEW_LINE> self.db = self.read_length_coded_string() <NEW_LINE> self.table_name = self.read_length_coded_string() <NEW_LINE> self.org_table = self.read_length_coded_string() <NEW_LINE> self.name = self.read_length_coded_string().decode(self.connection.charset) <NEW_LINE> self.org_name = self.read_length_coded_string() <NEW_LINE> self.advance(1) <NEW_LINE> self.charsetnr = struct.unpack('<H', self.read(2))[0] <NEW_LINE> self.length = struct.unpack('<I', self.read(4))[0] <NEW_LINE> self.type_code = byte2int(self.read(1)) <NEW_LINE> self.flags = struct.unpack('<H', self.read(2))[0] <NEW_LINE> self.scale = byte2int(self.read(1)) <NEW_LINE> self.advance(2) <NEW_LINE> <DEDENT> def description(self): <NEW_LINE> <INDENT> desc = [] <NEW_LINE> desc.append(self.name) <NEW_LINE> desc.append(self.type_code) <NEW_LINE> desc.append(None) <NEW_LINE> desc.append(self.get_column_length()) <NEW_LINE> desc.append(self.get_column_length()) <NEW_LINE> desc.append(self.scale) <NEW_LINE> if self.flags % 2 == 0: <NEW_LINE> <INDENT> desc.append(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> desc.append(0) <NEW_LINE> <DEDENT> return tuple(desc) <NEW_LINE> <DEDENT> def get_column_length(self): <NEW_LINE> <INDENT> if self.type_code == FIELD_TYPE.VAR_STRING: <NEW_LINE> <INDENT> mblen = MBLENGTH.get(self.charsetnr, 1) <NEW_LINE> return self.length // mblen <NEW_LINE> <DEDENT> return self.length <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('%s %s.%s.%s, type=%s' % (self.__class__, self.db, self.table_name, self.name, self.type_code)) | A MysqlPacket that represents a specific column's metadata in the result.
Parsing is automatically done and the results are exported via public
attributes on the class such as: db, table_name, name, length, type_code. | 62598f9f097d151d1a2c0e44 |
class Event(object): <NEW_LINE> <INDENT> def __init__(self, start=None, end=None, name=None, scanpath=None): <NEW_LINE> <INDENT> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.name = name <NEW_LINE> self.scanpath = scanpath <NEW_LINE> <DEDENT> def valid(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.start < self.end <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def duration(self): <NEW_LINE> <INDENT> return (self.end - self.start) | The generic stimulus event, providing start time, end time, and
name. Can also contain viewing data (Is this a good idea?) | 62598f9f1f037a2d8b9e3f03 |
class Socket: <NEW_LINE> <INDENT> def __init__(self, device: Device, index: int) -> None: <NEW_LINE> <INDENT> self.device = device <NEW_LINE> self.index = index <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self) -> bool: <NEW_LINE> <INDENT> return self.raw.state == 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def raw(self) -> SocketResponse: <NEW_LINE> <INDENT> socket_control_response = self.device.raw.socket_control <NEW_LINE> assert socket_control_response is not None <NEW_LINE> return socket_control_response[self.index] <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> state = "on" if self.state else "off" <NEW_LINE> return f"<Socket #{self.index} - name: {self.device.name}, state: {state}>" | Represent a socket. | 62598f9fd53ae8145f9182aa |
class Lockin(Instrument): <NEW_LINE> <INDENT> def __init__(self, resource=None, sim_mode=False, backend="@py", query='GPIB?*::INSTR', name=None, path='./'): <NEW_LINE> <INDENT> Instrument.__init__(self, resource, sim_mode, backend, query, name, path) <NEW_LINE> if not self._name == 'Stanford_Research_Systems-SR830': <NEW_LINE> <INDENT> raise Warning('Using {}'.format(self._name)) <NEW_LINE> <DEDENT> self._inst.write('OUTX 1') <NEW_LINE> self.adquisition = _lockin_adquisition(self) <NEW_LINE> self.input_panel = _lockin_input(self) <NEW_LINE> self.ch1_panel = _lockin_ch1(self) <NEW_LINE> self.ch2_panel = _lockin_ch2(self) <NEW_LINE> self.auto_panel = _lockin_autofuncs(self) <NEW_LINE> self.setup_panel = _lockin_setup(self) <NEW_LINE> self.interface_panel = _lockin_interface(self) <NEW_LINE> self.reference_panel = _lockin_reference(self) <NEW_LINE> self.auxiliar_outs = _lockin_auxout(self) | Class for PyVISA control of Lock-in Amplifier SR830. | 62598f9f1f5feb6acb162a3e |
class cp(Action): <NEW_LINE> <INDENT> expected_param = {"src": [str, list], "dst_dir": str} <NEW_LINE> optional_param = {"new_file_name": str} <NEW_LINE> expected_result = {"job_param": {"result_files": list}} <NEW_LINE> def cp(self, file_path, dst_dir, new_file_name=None): <NEW_LINE> <INDENT> if new_file_name: <NEW_LINE> <INDENT> file_name = new_file_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> file_name = file_path.split("/")[-1] <NEW_LINE> <DEDENT> import ipdb; ipdb.set_trace() <NEW_LINE> dst = "%s/%s" % (dst_dir, file_name) <NEW_LINE> try: <NEW_LINE> <INDENT> shutil.copy2(file_path, dst) <NEW_LINE> <DEDENT> except shutil.Error as e: <NEW_LINE> <INDENT> msg = 'Coping Error: %s' % e <NEW_LINE> self.log_error(msg) <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> msg = 'Coping Error: %s' % e.strerror <NEW_LINE> self.log_error(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.out["job_param"]["result_files"].append(file_name) <NEW_LINE> <DEDENT> <DEDENT> def run(self, action_param): <NEW_LINE> <INDENT> self.out["job_param"]["result_files"] = [] <NEW_LINE> src = action_param["src"] <NEW_LINE> dst_dir = action_param["dst_dir"] <NEW_LINE> new_file_name = action_param.get("new_file_name", None) <NEW_LINE> if type(src) == str: <NEW_LINE> <INDENT> self.cp(src, dst_dir, new_file_name) <NEW_LINE> <DEDENT> elif type(src) == list: <NEW_LINE> <INDENT> for file_path in src: <NEW_LINE> <INDENT> self.cp(src, dst_dir, new_file_name) <NEW_LINE> <DEDENT> <DEDENT> return self.result() | This is similar to the Unix command cp -p.
src - can be file path or list of file paths
{"src": "xxx.xxx", "dst_dir": "xxx.xxx"}
| 62598f9fd486a94d0ba2bdf1 |
class FancyStrMixin: <NEW_LINE> <INDENT> showAttributes = () <NEW_LINE> def __str__(self) -> str: <NEW_LINE> <INDENT> r = ['<', getattr(self, 'fancybasename', self.__class__.__name__)] <NEW_LINE> for attr in self.showAttributes: <NEW_LINE> <INDENT> if isinstance(attr, str): <NEW_LINE> <INDENT> r.append(' %s=%r' % (attr, getattr(self, attr))) <NEW_LINE> <DEDENT> elif len(attr) == 2: <NEW_LINE> <INDENT> attr = cast(Tuple[str, Callable], attr) <NEW_LINE> r.append((' %s=' % (attr[0],)) + attr[1](getattr(self, attr[0]))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attr = cast(Tuple[str, str, str], attr) <NEW_LINE> r.append((' %s=' + attr[2]) % (attr[1], getattr(self, attr[0]))) <NEW_LINE> <DEDENT> <DEDENT> r.append('>') <NEW_LINE> return ''.join(r) <NEW_LINE> <DEDENT> __repr__ = __str__ | Mixin providing a flexible implementation of C{__str__}.
C{__str__} output will begin with the name of the class, or the contents
of the attribute C{fancybasename} if it is set.
The body of C{__str__} can be controlled by overriding C{showAttributes} in
a subclass. Set C{showAttributes} to a sequence of strings naming
attributes, or sequences of C{(attributeName, callable)}, or sequences of
C{(attributeName, displayName, formatCharacter)}. In the second case, the
callable is passed the value of the attribute and its return value used in
the output of C{__str__}. In the final case, the attribute is looked up
using C{attributeName}, but the output uses C{displayName} instead, and
renders the value of the attribute using C{formatCharacter}, e.g. C{"%.3f"}
might be used for a float. | 62598f9fbd1bec0571e14fd1 |
class QtValueMap: <NEW_LINE> <INDENT> def __init__(self, mapping): <NEW_LINE> <INDENT> self.mapping = mapping <NEW_LINE> <DEDENT> def __contains__(self, qwidget): <NEW_LINE> <INDENT> return qwidget.__class__ in self.mapping <NEW_LINE> <DEDENT> def __getitem__(self, qwidget): <NEW_LINE> <INDENT> return getattr(qwidget, self.mapping[qwidget.__class__]) | Maps a Qt Widget class to its corresponding attribute | 62598f9fa8ecb03325871029 |
class ShowIpBgpRouteDistributer(MetaParser): <NEW_LINE> <INDENT> cli_command = ['show ip bgp {route}', 'show ip bgp {address_family}'] <NEW_LINE> def cli(self, route=None, address_family=None, output=None): <NEW_LINE> <INDENT> if route: <NEW_LINE> <INDENT> cmd = self.cli_command[0].format(route=route) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cmd = self.cli_command[1].format(address_family=address_family) <NEW_LINE> <DEDENT> if not output: <NEW_LINE> <INDENT> output = self.device.execute(cmd) <NEW_LINE> <DEDENT> if route or '.' in address_family: <NEW_LINE> <INDENT> parser = ShowIpBgpAllDetail(self.device) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parser = ShowIpBgp(self.device) <NEW_LINE> <DEDENT> self.schema = parser.schema <NEW_LINE> return parser.parse(output=output) | Parser for:
* 'show ip bgp {route}'
* 'show ip bgp {address_family}' | 62598f9fe5267d203ee6b729 |
class ModelReferenced1(Document): <NEW_LINE> <INDENT> ref_param1 = StringField(required=True) <NEW_LINE> ref_param2 = StringField(default="Referenced second param") | Class to be referenced | 62598f9f63b5f9789fe84f91 |
class ScaleFileSerializerV6(ScaleFileBaseSerializerV6): <NEW_LINE> <INDENT> from batch.serializers import BatchBaseSerializerV6 <NEW_LINE> from job.job_type_serializers import JobTypeBaseSerializerV6 <NEW_LINE> from recipe.serializers import RecipeTypeBaseSerializerV6 <NEW_LINE> workspace = WorkspaceBaseSerializer() <NEW_LINE> data_type_tags = serializers.ListField(child=serializers.CharField()) <NEW_LINE> media_type = serializers.CharField() <NEW_LINE> file_type = serializers.CharField() <NEW_LINE> file_size = serializers.IntegerField() <NEW_LINE> file_path = serializers.CharField() <NEW_LINE> is_deleted = serializers.BooleanField() <NEW_LINE> url = serializers.URLField() <NEW_LINE> created = serializers.DateTimeField() <NEW_LINE> deleted = serializers.DateTimeField() <NEW_LINE> data_started = serializers.DateTimeField() <NEW_LINE> data_ended = serializers.DateTimeField() <NEW_LINE> source_started = serializers.DateTimeField() <NEW_LINE> source_ended = serializers.DateTimeField() <NEW_LINE> source_sensor_class = serializers.CharField() <NEW_LINE> source_sensor = serializers.CharField() <NEW_LINE> source_collection = serializers.CharField() <NEW_LINE> source_task = serializers.CharField() <NEW_LINE> last_modified = serializers.DateTimeField() <NEW_LINE> geometry = WktField() <NEW_LINE> center_point = WktField() <NEW_LINE> countries = serializers.StringRelatedField(many=True, read_only=True) <NEW_LINE> job_type = JobTypeBaseSerializerV6() <NEW_LINE> job = ModelIdSerializer() <NEW_LINE> job_exe = ModelIdSerializer() <NEW_LINE> job_output = serializers.CharField() <NEW_LINE> recipe_type = RecipeTypeBaseSerializerV6() <NEW_LINE> recipe = ModelIdSerializer() <NEW_LINE> recipe_node = serializers.CharField() <NEW_LINE> batch = BatchBaseSerializerV6() <NEW_LINE> is_superseded = serializers.BooleanField() <NEW_LINE> superseded = serializers.DateTimeField() | Converts Scale file model fields to REST output | 62598f9f656771135c48949f |
class Schedule(BaseAPI): <NEW_LINE> <INDENT> def __init__(self, api_key: Optional[str] = None, timeout: Optional[int] = None): <NEW_LINE> <INDENT> super().__init__(api_key, timeout) <NEW_LINE> <DEDENT> def schedule_today(self, markets: Optional[Union[int, List[int]]] = None, bookmakers: Optional[Union[int, List[int]]] = None, league_ids: Optional[Union[int, List[int]]] = None, includes: Optional[Union[str, List[str]]] = None, filters: Optional[dict] = None, df: bool = False, df_cols: Optional[Union[str, List[str]]] = None): <NEW_LINE> <INDENT> params = {"leagues": league_ids, "markets": markets, "bookmakers": bookmakers} <NEW_LINE> schedule = self.make_request(endpoint="livescores", includes=includes, params=params, filters=filters) <NEW_LINE> if df: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> df_schedule = self._to_df(schedule, cols=df_cols) <NEW_LINE> return df_schedule <NEW_LINE> <DEDENT> except NotJSONNormalizable: <NEW_LINE> <INDENT> log.info("Not JSON-normalizable, returning JSON.") <NEW_LINE> return schedule <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return schedule | Schedule (today) Class | 62598f9f851cf427c66b80e4 |
class Rollback(object): <NEW_LINE> <INDENT> __slots__ = '_rollbacks' <NEW_LINE> def __init__(self, rollback=None): <NEW_LINE> <INDENT> if rollback is None: <NEW_LINE> <INDENT> self._rollbacks = [] <NEW_LINE> <DEDENT> elif isinstance(rollback, (list, tuple)): <NEW_LINE> <INDENT> self._rollbacks = rollback <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._rollbacks = [rollback] <NEW_LINE> <DEDENT> <DEDENT> def merge(self, *others): <NEW_LINE> <INDENT> self._rollbacks.extend(others) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, *_): <NEW_LINE> <INDENT> for rollback in self._rollbacks: <NEW_LINE> <INDENT> rollback() <NEW_LINE> <DEDENT> del self._rollbacks[:] <NEW_LINE> <DEDENT> rollback = __call__ = __exit__ | When called, rollbacks all the patches and changes the :func:`weave` has done. | 62598f9f236d856c2adc9348 |
class L3_interfacesArgs(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> argument_spec = { 'config': { 'elements': 'dict', 'options': { 'ipv4': { 'mutually_exclusive': [['addresses', 'anycast_addresses']], 'options': { 'addresses': { 'elements': 'dict', 'options': { 'address': {'type': 'str'}, 'secondary': {'default': 'False', 'type': 'bool'} }, 'type': 'list' }, 'anycast_addresses': {'elements': 'str', 'type': 'list'}, }, 'type': 'dict' }, 'ipv6': { 'options': { 'addresses': { 'elements': 'dict', 'options': { 'address': {'type': 'str'} }, 'type': 'list' }, 'enabled': {'type': 'bool'} }, 'type': 'dict' }, 'name': {'required': True, 'type': 'str'} }, 'type': 'list' }, 'state': { 'choices': ['merged', 'deleted'], 'default': 'merged', 'type': 'str' } } | The arg spec for the sonic_l3_interfaces module
| 62598f9f6e29344779b00478 |
class ApacheLogParserError(Exception): <NEW_LINE> <INDENT> pass | Appache log parsing error
| 62598f9f97e22403b383ad28 |
class TestUserMetadataSplitter(object): <NEW_LINE> <INDENT> pass | >>> splitRun = test_splitter('UserMetadataSplitter', 'dataK.dbs', create_config(config_dict={'dataset': {'split metadata': 'NOTHING'}}))
file1: A, file2: B, file3: C, filex: D
AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDD => 40
---------------------------------------- => 0,40
>>> splitRun = test_splitter('UserMetadataSplitter', 'dataK.dbs', create_config(config_dict={'dataset': {'split metadata': 'KEY1'}}))
file1: A, file2: B, file3: C, filex: D
AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDD => 40
---------------------------------------- => 0,40
>>> splitRun = test_splitter('UserMetadataSplitter', 'dataK.dbs', create_config(config_dict={'dataset': {'split metadata': 'KEY2'}}))
filex: D
DDDDDDDDDD => 10
---------- => 0,10
file1: A, file2: B, file3: C
AAAAAAAAAABBBBBBBBBBCCCCCCCCCC => 30
------------------------------ => 0,30
>>> splitRun = test_splitter('UserMetadataSplitter', 'dataK.dbs', create_config(config_dict={'dataset': {'split metadata': 'KEY3'}}))
filex: D
DDDDDDDDDD => 10
---------- => 0,10
file1: A
AAAAAAAAAA => 10
---------- => 0,10
file2: B
BBBBBBBBBB => 10
---------- => 0,10
file3: C
CCCCCCCCCC => 10
---------- => 0,10 | 62598f9f01c39578d7f12b9a |
class HathiChecksumReport(AbsChecksumBuilder): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _format_entry(filename: str, hash_value: str) -> str: <NEW_LINE> <INDENT> return "{} *{}".format(hash_value, filename) <NEW_LINE> <DEDENT> def build(self) -> str: <NEW_LINE> <INDENT> lines = [] <NEW_LINE> for entry in sorted(self._files, key=lambda x: x.filename): <NEW_LINE> <INDENT> lines.append(self._format_entry( filename=entry.filename, hash_value=entry.hash) ) <NEW_LINE> <DEDENT> return "{}\n".format("\n".join(lines)) | Generate a new Checksum report for Hathi. | 62598f9f85dfad0860cbf983 |
class TreebookHandler(BookHandler): <NEW_LINE> <INDENT> def __init__(self, pObject): <NEW_LINE> <INDENT> BookHandler.__init__(self, pObject) <NEW_LINE> <DEDENT> def Save(self): <NEW_LINE> <INDENT> book, obj = self._window, self._pObject <NEW_LINE> expanded = "" <NEW_LINE> for page in range(book.GetPageCount()): <NEW_LINE> <INDENT> if book.IsNodeExpanded(page): <NEW_LINE> <INDENT> if expanded: <NEW_LINE> <INDENT> expanded += PERSIST_SEP <NEW_LINE> <DEDENT> expanded += "%u"%page <NEW_LINE> <DEDENT> <DEDENT> obj.SaveValue(PERSIST_TREEBOOK_EXPANDED_BRANCHES, expanded) <NEW_LINE> return BookHandler.Save(self) <NEW_LINE> <DEDENT> def Restore(self): <NEW_LINE> <INDENT> book, obj = self._window, self._pObject <NEW_LINE> expanded = obj.RestoreValue(PERSIST_TREEBOOK_EXPANDED_BRANCHES) <NEW_LINE> if expanded: <NEW_LINE> <INDENT> indices = expanded.split(PERSIST_SEP) <NEW_LINE> pageCount = book.GetPageCount() <NEW_LINE> for indx in indices: <NEW_LINE> <INDENT> idx = int(indx) <NEW_LINE> if idx >= 0 and idx < pageCount: <NEW_LINE> <INDENT> book.ExpandNode(idx) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return BookHandler.Restore(self) <NEW_LINE> <DEDENT> def GetKind(self): <NEW_LINE> <INDENT> return PERSIST_TREEBOOK_KIND | Supports saving/restoring open tree branches.
This class handles the following wxPython widgets:
- :class:`Treebook` (except for page selection, see :class:`BookHandler` for this). | 62598f9f4a966d76dd5eecfe |
class CNNTargetNetwork(CNN): <NEW_LINE> <INDENT> def __init__(self, state_shape, num_actions, hidden=20, lr=1e-4, tau=0.01): <NEW_LINE> <INDENT> super(CNNTargetNetwork, self).__init__(state_shape, num_actions, hidden, lr) <NEW_LINE> self.tau = tau <NEW_LINE> self._associate = self._register_associate() <NEW_LINE> <DEDENT> def _register_associate(self): <NEW_LINE> <INDENT> tf_vars = tf.trainable_variables() <NEW_LINE> total_vars = len(tf_vars) <NEW_LINE> op_holder = [] <NEW_LINE> for idx,var in enumerate(tf_vars[0:total_vars//2]): <NEW_LINE> <INDENT> op_holder.append(tf_vars[idx+total_vars//2].assign( (var.value()*self.tau) + ((1-self.tau)*tf_vars[idx+total_vars//2].value()))) <NEW_LINE> <DEDENT> return op_holder <NEW_LINE> <DEDENT> def update(self, sess): <NEW_LINE> <INDENT> for op in self._associate: <NEW_LINE> <INDENT> sess.run(op) | Slowly updated target network. Tau indicates the speed of adjustment. If 1,
it is always set to the values of its associate. | 62598f9f07f4c71912baf268 |
class Universe(object): <NEW_LINE> <INDENT> LTP = Ola_pb2.LTP <NEW_LINE> HTP = Ola_pb2.HTP <NEW_LINE> def __init__(self, universe_id, name, merge_mode): <NEW_LINE> <INDENT> self._id = universe_id <NEW_LINE> self._name = name <NEW_LINE> self._merge_mode = merge_mode <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def merge_mode(self): <NEW_LINE> <INDENT> return self._merge_mode <NEW_LINE> <DEDENT> def __cmp__(self, other): <NEW_LINE> <INDENT> return cmp(self._id, other._id) | Represents a universe.
Attributes:
id: the integer universe id
name: the name of this universe
merge_mode: the merge mode this universe is using | 62598f9f66673b3332c301e4 |
class TestBase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> app = create_app("test") <NEW_LINE> self.app = app <NEW_LINE> self.db = db <NEW_LINE> with self.app.app_context(): <NEW_LINE> <INDENT> db.create_all() <NEW_LINE> <DEDENT> self.client = self.app.test_client(use_cookies=False) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> with self.app.app_context(): <NEW_LINE> <INDENT> self.db.session.remove() <NEW_LINE> self.db.drop_all() <NEW_LINE> <DEDENT> if "sqlite" in self.app.config.get("SQLALCHEMY_DATABASE_URI", ""): <NEW_LINE> <INDENT> if os.path.exists(os.path.join(basedir, "test.db")): <NEW_LINE> <INDENT> os.unlink(os.path.join(basedir, "test.db")) | Base class for testing the fulcrum API | 62598f9f91f36d47f2230daf |
class ChanceScheduler(driver.Scheduler): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ChanceScheduler, self).__init__(*args, **kwargs) <NEW_LINE> self.compute_rpcapi = compute_rpcapi.ComputeAPI() <NEW_LINE> <DEDENT> def _filter_hosts(self, request_spec, hosts, filter_properties): <NEW_LINE> <INDENT> ignore_hosts = filter_properties.get('ignore_hosts', []) <NEW_LINE> hosts = [host for host in hosts if host not in ignore_hosts] <NEW_LINE> return hosts <NEW_LINE> <DEDENT> def _schedule(self, context, topic, request_spec, filter_properties): <NEW_LINE> <INDENT> elevated = context.elevated() <NEW_LINE> hosts = self.hosts_up(elevated, topic) <NEW_LINE> if not hosts: <NEW_LINE> <INDENT> msg = _("Is the appropriate service running?") <NEW_LINE> raise exception.NoValidHost(reason=msg) <NEW_LINE> <DEDENT> hosts = self._filter_hosts(request_spec, hosts, filter_properties) <NEW_LINE> if not hosts: <NEW_LINE> <INDENT> msg = _("Could not find another compute") <NEW_LINE> raise exception.NoValidHost(reason=msg) <NEW_LINE> <DEDENT> return random.choice(hosts) <NEW_LINE> <DEDENT> def select_hosts(self, context, request_spec, filter_properties): <NEW_LINE> <INDENT> hosts = [self._schedule(context, CONF.compute_topic, request_spec, filter_properties) for instance_uuid in request_spec.get('instance_uuids', [])] <NEW_LINE> if not hosts: <NEW_LINE> <INDENT> raise exception.NoValidHost(reason="") <NEW_LINE> <DEDENT> return hosts <NEW_LINE> <DEDENT> def select_destinations(self, context, request_spec, filter_properties): <NEW_LINE> <INDENT> num_instances = request_spec['num_instances'] <NEW_LINE> dests = [] <NEW_LINE> for i in range(num_instances): <NEW_LINE> <INDENT> host = self._schedule(context, CONF.compute_topic, request_spec, filter_properties) <NEW_LINE> host_state = dict(host=host, nodename=None, limits=None) <NEW_LINE> dests.append(host_state) <NEW_LINE> <DEDENT> if len(dests) < num_instances: <NEW_LINE> <INDENT> raise exception.NoValidHost(reason='') <NEW_LINE> <DEDENT> return dests <NEW_LINE> <DEDENT> def schedule_run_instance(self, context, request_spec, admin_password, injected_files, requested_networks, is_first_time, filter_properties): <NEW_LINE> <INDENT> instance_uuids = request_spec.get('instance_uuids') <NEW_LINE> for num, instance_uuid in enumerate(instance_uuids): <NEW_LINE> <INDENT> request_spec['instance_properties']['launch_index'] = num <NEW_LINE> try: <NEW_LINE> <INDENT> host = self._schedule(context, CONF.compute_topic, request_spec, filter_properties) <NEW_LINE> updated_instance = driver.instance_update_db(context, instance_uuid) <NEW_LINE> self.compute_rpcapi.run_instance(context, instance=updated_instance, host=host, requested_networks=requested_networks, injected_files=injected_files, admin_password=admin_password, is_first_time=is_first_time, request_spec=request_spec, filter_properties=filter_properties) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> driver.handle_schedule_error(context, ex, instance_uuid, request_spec) | Implements Scheduler as a random node selector. | 62598f9f0c0af96317c5619e |
class InvalidMarket(Exception): <NEW_LINE> <INDENT> pass | Markets can only have one reference product | 62598f9f8e7ae83300ee8ebd |
class MRI_ESM(NetCDF_Gridded): <NEW_LINE> <INDENT> priority = 100 <NEW_LINE> def _add_available_aux_coords(self, cube, filenames): <NEW_LINE> <INDENT> from iris.aux_factory import HybridPressureFactory <NEW_LINE> from iris.coords import AuxCoord <NEW_LINE> from cis.data_io.netcdf import read <NEW_LINE> ps_filenames = [f.replace('concbc', 'ps_TL95L80_192x48NH_3hr') for f in filenames] <NEW_LINE> hybrid_a = read(ps_filenames[0], 'a')['a'] <NEW_LINE> hybrid_b = read(ps_filenames[0], 'b')['b'] <NEW_LINE> hybrid_a_coord = AuxCoord(points=hybrid_a[:], long_name='vertical coordinate formula term: a(k)', units='Pa') <NEW_LINE> hybrid_b_coord = AuxCoord(points=hybrid_b[:], long_name='vertical coordinate formula term: b(k)', units='1') <NEW_LINE> surface_pressure_cube = _get_cubes(ps_filenames, 'ps', callback=self.load_multiple_files_callback).concatenate_cube() <NEW_LINE> surface_pressure = AuxCoord(points=surface_pressure_cube.data, standard_name='surface_air_pressure', long_name='surface pressure', units='Pa') <NEW_LINE> hybrid_a_coord.convert_units('hPa') <NEW_LINE> surface_pressure.convert_units('hPa') <NEW_LINE> cube.add_aux_coord(surface_pressure, (0, 2, 3)) <NEW_LINE> cube.add_aux_coord(hybrid_a_coord, (1,)) <NEW_LINE> cube.add_aux_coord(hybrid_b_coord, (1,)) <NEW_LINE> cube.add_aux_factory(HybridPressureFactory(delta=hybrid_a_coord, sigma=hybrid_b_coord, surface_air_pressure=surface_pressure)) | Plugin for reading ECHAM-HAM NetCDF output files. **Air pressure is converted to hPa** | 62598f9f0a50d4780f7051f7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.